diff --git a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 4eb7aa51ec..cda0145dda 100644 --- a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -17,23 +17,15 @@ package org.polypheny.db.catalog; +import com.google.common.base.Predicates; +import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.io.File; import java.io.IOException; import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.Objects; -import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; @@ -1901,6 +1893,55 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent } + /** + * {@inheritDoc} + */ + @Override + public long relocateTable( CatalogTable sourceTable, long targetNamespaceId ) { + // Clone the source table by changing the ID of the parent namespace + CatalogTable targetTable = transferCatalogTable( sourceTable, targetNamespaceId ); + synchronized ( this ) { + // Build the new immutable list for the source namespace by removing the table to transfer + ImmutableList reducedSourceSchemaChildren = ImmutableList + .copyOf( Collections2.filter( schemaChildren.get( sourceTable.namespaceId ), + Predicates.not( Predicates.equalTo( sourceTable.id ) ) ) ); + // Build the new immutable list for the target namespace by adding the table to transfer + ImmutableList extendedTargetSchemaChildren = new ImmutableList.Builder() + .addAll( schemaChildren.get( targetNamespaceId ) ) + .add( targetTable.id ) + .build(); + + // Replace the immutable list for both the source and target namespaces + schemaChildren.replace( sourceTable.namespaceId, reducedSourceSchemaChildren ); + schemaChildren.replace( targetNamespaceId, extendedTargetSchemaChildren ); + + // Replace the tables' trees with the cloned table + tables.replace( sourceTable.id, targetTable ); + tableNames.remove( new Object[]{ sourceTable.databaseId, sourceTable.namespaceId, sourceTable.name } ); + tableNames.put( new Object[]{ targetTable.databaseId, targetNamespaceId, targetTable.name }, targetTable ); + + // Replace the trees of the tables' columns with cloned columns + for ( Long fieldId : sourceTable.fieldIds ) { + CatalogColumn targetCatalogColumn = transferCatalogColumn( targetNamespaceId, columns.get( fieldId ) ); + columns.replace( fieldId, targetCatalogColumn ); + columnNames.remove( new Object[]{ sourceTable.databaseId, sourceTable.namespaceId, sourceTable.id, targetCatalogColumn.name } ); + columnNames.put( new Object[]{ sourceTable.databaseId, targetNamespaceId, sourceTable.id, targetCatalogColumn.name }, targetCatalogColumn ); + } + + // When transferring between document-based namespaces, also replace the collection trees. + if ( getSchema( sourceTable.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { + CatalogCollection targetCollection = transferCatalogCollection( collections.get( sourceTable.id ), targetNamespaceId ); + collections.replace( sourceTable.id, targetCollection ); + collectionNames.remove( new Object[]{ sourceTable.databaseId, sourceTable.namespaceId, sourceTable.name } ); + collectionNames.put( new Object[]{ targetTable.databaseId, targetNamespaceId, targetTable.name }, targetCollection ); + } + } + listeners.firePropertyChange( "table", sourceTable, null ); + + return sourceTable.id; + } + + /** * {@inheritDoc} */ @@ -5481,6 +5522,56 @@ private CatalogKey getKey( long keyId ) { } + private static CatalogColumn transferCatalogColumn( long targetNamespaceId, CatalogColumn sourceCatalogColumn ) { + CatalogColumn targetCatalogColumn = new CatalogColumn( + sourceCatalogColumn.id, + sourceCatalogColumn.name, + sourceCatalogColumn.tableId, + targetNamespaceId, + sourceCatalogColumn.databaseId, + sourceCatalogColumn.position, + sourceCatalogColumn.type, + sourceCatalogColumn.collectionsType, + sourceCatalogColumn.length, + sourceCatalogColumn.scale, + sourceCatalogColumn.dimension, + sourceCatalogColumn.cardinality, + sourceCatalogColumn.nullable, + sourceCatalogColumn.collation, + sourceCatalogColumn.defaultValue ); + return targetCatalogColumn; + } + + + private CatalogTable transferCatalogTable( CatalogTable sourceTable, long targetNamespaceId ) { + return new CatalogTable( + sourceTable.id, + sourceTable.name, + sourceTable.fieldIds, + targetNamespaceId, + sourceTable.databaseId, + sourceTable.ownerId, + sourceTable.entityType, + sourceTable.primaryKey, + sourceTable.dataPlacements, + sourceTable.modifiable, + sourceTable.partitionProperty, + sourceTable.connectedViews ); + } + + + private CatalogCollection transferCatalogCollection( CatalogCollection sourceCollection, long targetNamespaceId ) { + return new CatalogCollection( + sourceCollection.databaseId, + targetNamespaceId, + sourceCollection.id, + sourceCollection.name, + sourceCollection.placements, + sourceCollection.entityType, + sourceCollection.physicalName ); + } + + static class CatalogValidator { public void validate() throws GenericCatalogException { @@ -5510,4 +5601,4 @@ public void startCheck() { } -} +} \ No newline at end of file diff --git a/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java b/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java index d84c8dd294..8e0ed3df0d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java +++ b/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java @@ -236,6 +236,11 @@ public enum Kind { */ MERGE, + /** + * TRANSFER statement + */ + TRANSFER, + /** * TABLESAMPLE operator */ @@ -1713,4 +1718,3 @@ private static > EnumSet concat( EnumSet set0, EnumSet convertTableTypeList( @NonNull final List return typeList; } -} +} \ No newline at end of file diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 812615e594..c70dc34e33 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -193,6 +193,20 @@ public static DdlManager getInstance() { */ public abstract void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException; + /** + * Merge multiple columns into one new column + * + * @param catalogTable the table + * @param sourceColumnNames name of the columns to be merged + * @param newColumnName name of the new column to be added + * @param joinString the string to place between the values + * @param type the SQL data type specification of the merged column + * @param nullable if the merged column should be nullable + * @param defaultValue the new default value of the merged column + * @param statement the initial query statement + */ + public abstract void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String newColumnName, String joinString, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; + /** * Adds an index to a table * @@ -451,6 +465,17 @@ public static DdlManager getInstance() { */ public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; + /** + * Transfer a table between two namespaces. + * Currently, the transfer works between namespaces of the same model and between relational and document-based and vice versa. + * + * @param table the table about to be transfered + * @param targetSchemaId the id of the target namespace + * @param statement the used statement + * @param statement the used statement + */ + public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement, Map> primaryKeyColumnNames ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException, GenericCatalogException; + /** * Create a new view * @@ -751,4 +776,4 @@ public enum DefaultIndexPlacementStrategy { POLYPHENY, ONE_DATA_STORE, ALL_DATA_STORES } -} +} \ No newline at end of file diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 1830269180..752efe8e70 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -16,6 +16,7 @@ package org.polypheny.db.processing; +import com.google.gson.JsonObject; import java.util.List; import java.util.Map; import org.polypheny.db.algebra.AlgRoot; @@ -24,10 +25,12 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; - public interface DataMigrator { void copyData( @@ -76,16 +79,48 @@ void copyPartitionData( List sourcePartitionIds, List targetPartitionIds ); + /** + * Used to merge columns in a relational table. The values of the source columns will be selected, + * concatenated and inserted into the target column. + * + * @param transaction Transactional scope + * @param store Target Store where data should be migrated to + * @param sourceColumns Columns to be merged + * @param targetColumn New column to be added + * @param joinString String delimiter between the values to be merged + */ + void mergeColumns( Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn, String joinString ); + AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ); - //is used within copyData + // is used within copyData void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); + // is used within mergeColumns + void executeMergeQuery( List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, String joinString, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); + AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ); AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ); - void copyGraphData( CatalogGraphDatabase graph, Transaction transaction, Integer existingAdapterId, CatalogAdapter adapter ); + /** + * Does migration when transferring between a relational and a document-based namespace. + * + * @param transaction Transactional scope + * @param sourceTable Source Table from where data is queried + * @param targetSchemaId ID of the target namespace + */ + void copyRelationalDataToDocumentData(Transaction transaction , CatalogTable sourceTable, long targetSchemaId); + + /** + * Does migration when transferring between a document-based and a relational namespace. + * + * @param transaction Transactional scope + * @param jsonObjects List of the JSON-objects of the source collection + * @param table Target tables created in the {@link DdlManager} + */ + void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, List table ) throws UnknownColumnException, UnknownTableException; + } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index c39aebee21..1961a7b565 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -352,6 +352,12 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent } + @Override + public long relocateTable( CatalogTable sourceTable, long targetNamespaceId ) { + throw new NotImplementedException(); + } + + @Override public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { throw new NotImplementedException(); @@ -1371,4 +1377,4 @@ public void updateCollectionPartitionPhysicalNames( long collectionId, int adapt throw new NotImplementedException(); } -} +} \ No newline at end of file diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 8b896c69c5..492de3bb3c 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -18,19 +18,29 @@ import com.google.common.collect.ImmutableList; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import java.sql.ResultSetMetaData; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.IntStream; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.map.LinkedMap; import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.polypheny.db.PolyImplementation; import org.polypheny.db.StatisticsManager; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; @@ -119,6 +129,9 @@ import org.polypheny.db.ddl.exception.PlacementNotExistsException; import org.polypheny.db.ddl.exception.SchemaNotExistException; import org.polypheny.db.ddl.exception.UnknownIndexMethodException; +import org.polypheny.db.languages.QueryParameters; +import org.polypheny.db.languages.mql.MqlNode; +import org.polypheny.db.languages.mql.MqlQueryParameters; import org.polypheny.db.monitoring.events.DdlEvent; import org.polypheny.db.monitoring.events.StatementEvent; import org.polypheny.db.partition.PartitionManager; @@ -127,6 +140,7 @@ import org.polypheny.db.partition.properties.TemperaturePartitionProperty; import org.polypheny.db.partition.properties.TemperaturePartitionProperty.PartitionCostIndication; import org.polypheny.db.partition.raw.RawTemperaturePartitionInformation; +import org.polypheny.db.processing.AutomaticDdlProcessor; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.runtime.PolyphenyDbContextException; @@ -134,10 +148,14 @@ import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.transaction.Statement; +import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.type.ArrayType; import org.polypheny.db.type.PolyType; import org.polypheny.db.view.MaterializedViewManager; +import org.polypheny.db.webui.Crud; +import org.polypheny.db.webui.models.DbColumn; +import org.polypheny.db.webui.models.Result; @Slf4j @@ -598,6 +616,86 @@ public void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, Lis } + public void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String newColumnName, String joinString, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { + if ( catalog.checkIfExistsColumn( catalogTable.id, newColumnName ) ) { + throw new ColumnAlreadyExistsException( newColumnName, catalogTable.name ); + } + + CatalogColumn afterColumn = getCatalogColumn( catalogTable.id, sourceColumnNames.get( sourceColumnNames.size() - 1 ) ); + int position = updateAdjacentPositions( catalogTable, null, afterColumn ); + + long columnId = catalog.addColumn( + newColumnName, + catalogTable.id, + position, + type.type, + type.collectionType, + type.precision, + type.scale, + type.dimension, + type.cardinality, + nullable, + Collation.getDefaultCollation() + ); + + // Add default value + addDefaultValue( defaultValue, columnId ); + CatalogColumn addedColumn = catalog.getColumn( columnId ); + + // Remove quotes from joinString + if ( joinString.startsWith( "'" ) ) { + joinString = joinString.substring( 1, joinString.length() - 1 ); + } + + // Ask router on which stores this column shall be placed + List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( addedColumn ); + DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + + // Build catalog columns + List sourceCatalogColumns = new LinkedList<>(); + for ( String columnName : sourceColumnNames ) { + sourceCatalogColumns.add( catalog.getColumn( catalogTable.id, columnName ) ); + } + CatalogColumn targetCatalogColumn = catalog.getColumn( catalogTable.id, newColumnName ); + + // Add column on underlying data stores and insert default value + for ( DataStore store : stores ) { + catalog.addColumnPlacement( + store.getAdapterId(), + addedColumn.id, + PlacementType.AUTOMATIC, + null, + null, + null + ); + AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), catalogTable, addedColumn ); + // Call migrator + dataMigrator.mergeColumns( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), sourceCatalogColumns, targetCatalogColumn, joinString ); + + for ( CatalogColumn sourceCatalogColumn : sourceCatalogColumns ) { + // Delete column from underlying data stores + for ( CatalogColumnPlacement dp : catalog.getColumnPlacementsByColumn( sourceCatalogColumn.id ) ) { + if ( catalogTable.entityType == EntityType.ENTITY ) { + AdapterManager.getInstance().getStore( dp.adapterId ).dropColumn( statement.getPrepareContext(), dp ); + } + catalog.deleteColumnPlacement( dp.adapterId, dp.columnId, true ); + } + + // Delete from catalog + List columns = catalog.getColumns( catalogTable.id ); + catalog.deleteColumn( sourceCatalogColumn.id ); + if ( sourceCatalogColumn.position != columns.size() ) { + // Update position of the other columns + IntStream.range( sourceCatalogColumn.position, columns.size() ).forEach( i -> catalog.setColumnPosition( columns.get( i ).id, i ) ); + } + } + } + + // Reset plan cache implementation cache & routing cache + statement.getQueryProcessor().resetCaches(); + } + + @Override public void addIndex( CatalogTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { List columnIds = new LinkedList<>(); @@ -2243,6 +2341,223 @@ public void createTable( long schemaId, String name, List fiel } + /** + * {@inheritDoc} + */ + @Override + public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement, Map> pkColumnNamesOfTables ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException, GenericCatalogException { + // Check if there is already an entity with this name + if ( assertEntityExists( targetSchemaId, sourceTable.name, true ) ) { + return; + } + + // Retrieve the catalog schema objects for later use + CatalogSchema sourceNamespace = catalog.getSchema( sourceTable.namespaceId ); + CatalogSchema targetNamespace = catalog.getSchema( targetSchemaId ); + if ( sourceNamespace.getNamespaceType() == targetNamespace.getNamespaceType() ) { + // If the source and target namespaces are from the same model, it is sufficient to just move them in the catalog + catalog.relocateTable( sourceTable, targetSchemaId ); + } else if ( sourceNamespace.getNamespaceType() == NamespaceType.RELATIONAL && targetNamespace.getNamespaceType() == NamespaceType.DOCUMENT ) { + // If the source namespace is relational and the target is document-based (the DataMigrator will to be also called) + transferRelationalToDocument( sourceTable, targetSchemaId, statement ); + } else if ( sourceNamespace.getNamespaceType() == NamespaceType.DOCUMENT && targetNamespace.getNamespaceType() == NamespaceType.RELATIONAL ) { + // If the source namespace is document-based and the target is relational (the DataMigrator will to be also called) + transferDocumentToRelational( sourceTable, targetSchemaId, pkColumnNamesOfTables, statement ); + } + } + + + private void transferRelationalToDocument( CatalogTable sourceTable, long targetSchemaId, Statement statement ) throws EntityAlreadyExistsException, DdlOnSourceException { + // Create the new collection in the same datastore + List stores = sourceTable.dataPlacements + .stream() + .map( id -> (DataStore) AdapterManager.getInstance().getAdapter( id ) ) + .collect( Collectors.toList() ); + PlacementType placementType = catalog.getDataPlacement( sourceTable.dataPlacements.get( 0 ), sourceTable.id ).placementType; + createCollection( targetSchemaId, sourceTable.name, false, stores, placementType, statement ); + + // Call the migrator + DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + dataMigrator.copyRelationalDataToDocumentData( statement.getTransaction(), sourceTable, targetSchemaId ); + + // Drop the source table + dropTable( sourceTable, statement ); + statement.getQueryProcessor().resetCaches(); + } + + + private void transferDocumentToRelational( CatalogTable sourceTable, long targetSchemaId, Map> pkColumnNamesOfTables, Statement statement ) throws EntityAlreadyExistsException, UnknownTableException, UnknownColumnException, GenericCatalogException { + // Retrieve the data placements of the source catalog + CatalogCollection sourceCollection = catalog.getCollection( sourceTable.id ); + List stores = sourceTable.dataPlacements + .stream() + .map( id -> (DataStore) AdapterManager.getInstance().getAdapter( id ) ) + .collect( Collectors.toList() ); + PlacementType placementType = catalog.getDataPlacement( sourceTable.dataPlacements.get( 0 ), sourceTable.id ).placementType; + + // Get all documents of the source collection. Here it is necessary to create the target table with its columns + String query = String.format( "db.%s.find({})", sourceTable.name ); + + QueryParameters parameters = new MqlQueryParameters( query, catalog.getSchema( sourceTable.namespaceId ).name, NamespaceType.DOCUMENT ); + AutomaticDdlProcessor mqlProcessor = (AutomaticDdlProcessor) statement.getTransaction().getProcessor( QueryLanguage.MONGO_QL ); + MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); + AlgRoot logicalRoot = mqlProcessor.translate( statement, parsed, parameters ); + PolyImplementation polyImplementation = statement.getQueryProcessor().prepareQuery( logicalRoot, true ); + Result result = getResult( QueryLanguage.MONGO_QL, statement, query, polyImplementation, statement.getTransaction(), false ); + + // Create a list of the JsonObjects skipping the _id column which is only needed for the documents but not for the table + List jsonObjects = new ArrayList(); + LinkedList currentPosition = new LinkedList<>( Arrays.asList( sourceTable.name ) ); + LinkedMap documentHierarchy = new LinkedMap<>( Map.of( currentPosition, new LinkedHashSet() ) ); + + for ( String document : result.getData()[0] ) { + JsonObject jsonObject = JsonParser.parseString( document ).getAsJsonObject(); + buildDocumentHierarchy( jsonObject, documentHierarchy, currentPosition, pkColumnNamesOfTables ); + jsonObjects.add( jsonObject ); + } + + // Create the target table + // Only VARCHAR(32) columns are added in the current version + ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 64, null, null, null, false ); + List addedTables = new ArrayList(); + for ( Entry hierarchyEntry : documentHierarchy.entrySet() ) { + LinkedList tablePath = hierarchyEntry.getKey(); + LinkedHashSet tableChildren = hierarchyEntry.getValue(); + String tableName = tablePath.getLast(); + + // If the table has already added. E.g. when it has multiple parents + if ( !addedTables.stream().anyMatch( table -> tableName.equals( table.name ) ) ) { + // If the table has also another parent, merge the tableChildren + List sameTableWithOtherParents = documentHierarchy.keySet() + .stream() + .filter( k -> !k.equals( hierarchyEntry.getKey() ) && k.getLast().equals( hierarchyEntry.getKey().getLast() ) ) + .collect( Collectors.toList() ); + for ( LinkedList sameTableWithOtherParent : sameTableWithOtherParents ) { + tableChildren.addAll( documentHierarchy.get( sameTableWithOtherParent ) ); + } + + // Create the list of the PKs for the current table + List constraintInformations = + List.of( new ConstraintInformation( "primary", ConstraintType.PRIMARY, pkColumnNamesOfTables.get( tableName ) ) ); + + List fieldInformations = tableChildren + .stream() + .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), null, new ArrayList( tableChildren ).indexOf( fieldName ) + 1 ) ) + .collect( Collectors.toList() ); + + createTable( targetSchemaId, tableName, fieldInformations, constraintInformations, false, stores, placementType, statement ); + addedTables.add( catalog.getTable( targetSchemaId, tableName ) ); + } + // Add FK if it's a child table + CatalogTable table = catalog.getTable( targetSchemaId, tableName ); + if ( tablePath.size() > 1 ) { + CatalogTable refTable = catalog.getTable( targetSchemaId, tablePath.get( tablePath.size() - 2 ) ); + List refColumnNames = pkColumnNamesOfTables.get( refTable.name ); + List columnNames = refColumnNames + .stream() + .map( columnName -> "fk_" + refTable.name + "_" + columnName ) + .collect( Collectors.toList() ); + addForeignKey( table, refTable, columnNames, refColumnNames, "fk_from_" + table.name + "_to_" + refTable.name, ForeignKeyOption.NONE, ForeignKeyOption.NONE ); + } + } + + //Call the DataMigrator + DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + dataMigrator.copyDocumentDataToRelationalData( statement.getTransaction(), jsonObjects, addedTables ); + + // Remove the source collection + dropCollection( sourceCollection, statement ); + statement.getQueryProcessor().resetCaches(); + } + + + private static void buildDocumentHierarchy( JsonElement jsonObject, Map documentHierarchy, LinkedList currentPath, + Map> pkColumnNamesOfTables ) { + Set currentTableColumnNames = documentHierarchy.get( currentPath ); + + // if no PK column was given, select the _id column. + // if the _id column not exist add it to the columns of the table + if ( !pkColumnNamesOfTables.containsKey( currentPath.getLast() ) ) { + pkColumnNamesOfTables.put( currentPath.getLast(), List.of( "_id" ) ); + currentTableColumnNames.add( "_id" ); + } + + for ( String fieldName : ((JsonObject) jsonObject).keySet() ) { + JsonElement jsonElement = ((JsonObject) jsonObject).get( fieldName ); + if ( jsonElement instanceof JsonObject ) { + LinkedList childTablePath = (LinkedList) currentPath.clone(); + childTablePath.add( fieldName ); + if ( !documentHierarchy.containsKey( childTablePath ) ) { + documentHierarchy.put( childTablePath, new LinkedHashSet() ); + } + buildDocumentHierarchy( jsonElement, documentHierarchy, childTablePath, pkColumnNamesOfTables ); + } else if ( !currentTableColumnNames.contains( fieldName ) && !fieldName.equals( "_id" ) ) { + currentTableColumnNames.add( fieldName ); + } + } + + // Add the PKs of the parent table to te current table as FKs, if it's a child table + if ( currentPath.size() > 1 ) { + String parentTableName = currentPath.get( currentPath.size() - 2 ); + pkColumnNamesOfTables.getOrDefault( parentTableName, List.of( "_id" ) ) + .stream() + .map( parentPkColumnName -> "fk_" + parentTableName + "_" + parentPkColumnName ) + .forEach( currentTableColumnNames::add ); + } + } + + + @NotNull + public static Result getResult( QueryLanguage language, Statement statement, String query, PolyImplementation result, Transaction transaction, final boolean noLimit ) { + Catalog catalog = Catalog.getInstance(); + + List> rows = result.getRows( statement, noLimit ? -1 : language == QueryLanguage.CYPHER ? RuntimeConfig.UI_NODE_AMOUNT.getInteger() : RuntimeConfig.UI_PAGE_SIZE.getInteger() ); + + boolean hasMoreRows = result.hasMoreRows(); + + CatalogTable catalogTable = null; + + ArrayList header = new ArrayList<>(); + for ( AlgDataTypeField metaData : result.rowType.getFieldList() ) { + String columnName = metaData.getName(); + + DbColumn dbCol = new DbColumn( + metaData.getName(), + metaData.getType().getFullTypeString(), + metaData.getType().isNullable() == (ResultSetMetaData.columnNullable == 1), + metaData.getType().getPrecision(), + null, + null ); + + // Get column default values + if ( catalogTable != null ) { + try { + if ( catalog.checkIfExistsColumn( catalogTable.id, columnName ) ) { + CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); + if ( catalogColumn.defaultValue != null ) { + dbCol.defaultValue = catalogColumn.defaultValue.value; + } + } + } catch ( UnknownColumnException e ) { + log.error( "Caught exception", e ); + } + } + header.add( dbCol ); + } + + ArrayList data = Crud.computeResultData( rows, header, statement.getTransaction() ); + + return new Result( header.toArray( new DbColumn[0] ), data.toArray( new String[0][] ) ) + .setNamespaceType( result.getNamespaceType() ) + .setNamespaceName( "target" ) + .setLanguage( language ) + .setAffectedRows( data.size() ) + .setHasMoreRows( hasMoreRows ) + .setXid( transaction.getXid().toString() ) + .setGeneratedQuery( query ); + } + + @Override public void createCollection( long schemaId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { name = adjustNameIfNeeded( name, schemaId ); @@ -3159,4 +3474,4 @@ public void dropType() { throw new RuntimeException( "Not supported yet" ); } -} +} \ No newline at end of file diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 10fbe8baba..a177c65a93 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -17,11 +17,15 @@ package org.polypheny.db.processing; import com.google.common.collect.ImmutableList; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -31,8 +35,10 @@ import lombok.extern.slf4j.Slf4j; import org.apache.calcite.avatica.MetaImpl; import org.apache.calcite.linq4j.Enumerable; +import org.bson.types.ObjectId; import org.jetbrains.annotations.NotNull; import org.polypheny.db.PolyImplementation; +import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.AlgStructuredTypeFlattener; @@ -43,6 +49,7 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; import org.polypheny.db.algebra.logical.lpg.LogicalLpgValues; import org.polypheny.db.algebra.logical.relational.LogicalValues; +import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; @@ -52,10 +59,16 @@ import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.languages.QueryParameters; +import org.polypheny.db.languages.mql.MqlNode; +import org.polypheny.db.languages.mql.MqlQueryParameters; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; @@ -147,6 +160,222 @@ public void copyGraphData( CatalogGraphDatabase target, Transaction transaction, } + /** + * {@inheritDoc} + */ + @Override + public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTable sourceTable, long targetSchemaId ) { + try { + Catalog catalog = Catalog.getInstance(); + + // Collect the columns of the source table + List sourceColumns = new ArrayList<>(); + for ( String columnName : sourceTable.getColumnNames() ) { + sourceColumns.add( catalog.getColumn( sourceTable.id, columnName ) ); + } + + // Retrieve the placements of the source table + Map> sourceColumnPlacements = new HashMap<>(); + sourceColumnPlacements.put( + sourceTable.partitionProperty.partitionIds.get( 0 ), + selectSourcePlacements( sourceTable, sourceColumns, -1 ) ); + Map> subDistribution = new HashMap<>( sourceColumnPlacements ); + subDistribution.keySet().retainAll( Arrays.asList( sourceTable.partitionProperty.partitionIds.get( 0 ) ) ); + + // Initialize the source statement to read all values from the source table + Statement sourceStatement = transaction.createStatement(); + AlgRoot sourceAlg = getSourceIterator( sourceStatement, subDistribution ); + PolyImplementation result = sourceStatement.getQueryProcessor().prepareQuery( + sourceAlg, + sourceAlg.alg.getCluster().getTypeFactory().builder().build(), + true, + false, + false ); + + // Build the data structure to map the columns to the physical placements + Map sourceColMapping = new LinkedHashMap<>(); + for ( CatalogColumn catalogColumn : sourceColumns ) { + int i = 0; + for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { + if ( metaData.getName().equalsIgnoreCase( catalogColumn.name ) ) { + sourceColMapping.put( catalogColumn.name, i ); + } + i++; + } + } + + int batchSize = RuntimeConfig.DATA_MIGRATOR_BATCH_SIZE.getInteger(); + final Enumerable enumerable = result.enumerable( sourceStatement.getDataContext() ); + Iterator sourceIterator = enumerable.iterator(); + while ( sourceIterator.hasNext() ) { + // Build a data structure for all values of the source table for the insert query + List> rows = MetaImpl.collect( result.getCursorFactory(), LimitIterator.of( sourceIterator, batchSize ), new ArrayList<>() ); + List> values = new ArrayList<>(); + for ( List list : rows ) { + LinkedHashMap currentRowValues = new LinkedHashMap<>(); + sourceColMapping.forEach( ( key, value ) -> currentRowValues.put( key, list.get( value ) ) ); + values.add( currentRowValues ); + } + + // Create the insert query for all documents in the collection + boolean firstRow = true; + StringBuffer bf = new StringBuffer(); + bf.append( "db." + sourceTable.name + ".insertMany([" ); + for ( Map row : values ) { + if ( firstRow ) { + bf.append( "{" ); + firstRow = false; + } else { + bf.append( ",{" ); + } + boolean firstColumn = true; + for ( Map.Entry entry : row.entrySet() ) { + if ( entry.getValue() != null ) { + if ( firstColumn == true ) { + firstColumn = false; + } else { + bf.append( "," ); + } + bf.append( "\"" + entry.getKey() + "\" : \"" + entry.getValue() + "\"" ); + } + } + bf.append( "}" ); + } + bf.append( "])" ); + + // Insert als documents into the newly created collection + Statement targetStatement = transaction.createStatement(); + String query = bf.toString(); + AutomaticDdlProcessor mqlProcessor = (AutomaticDdlProcessor) transaction.getProcessor( Catalog.QueryLanguage.MONGO_QL ); + QueryParameters parameters = new MqlQueryParameters( query, catalog.getSchema( targetSchemaId ).name, Catalog.NamespaceType.DOCUMENT ); + MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); + AlgRoot logicalRoot = mqlProcessor.translate( targetStatement, parsed, parameters ); + + // Prepare the insert query + Iterator iterator = targetStatement.getQueryProcessor() + .prepareQuery( logicalRoot, true ) + .enumerable( targetStatement.getDataContext() ) + .iterator(); + //noinspection WhileLoopReplaceableByForEach + while ( iterator.hasNext() ) { + iterator.next(); + } + targetStatement.getDataContext().resetParameterValues(); + } + } catch ( Throwable t ) { + throw new RuntimeException( t ); + } + } + + + /** + * {@inheritDoc} + */ + @Override + public void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, List targetTables ) throws UnknownColumnException, UnknownTableException { + Catalog catalog = Catalog.getInstance(); + + // Get the values in all documents of the collection + Map> columnValues = new HashMap<>(); + for ( JsonObject jsonObject : jsonObjects ) { + getColumnValuesForTable( catalog, targetTables.get( 0 ), columnValues, jsonObject, Collections.emptyMap() ); + } + + for ( CatalogTable targetTable : targetTables ) { + Statement targetStatement = transaction.createStatement(); + final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + List targetColumnPlacements = new LinkedList<>(); + for ( CatalogColumn targetColumn : catalog.getColumns( targetTable.id ) ) { + // Add the values to the column to the statement + targetStatement.getDataContext().addParameterValues( targetColumn.id, targetColumn.getAlgDataType( typeFactory ), columnValues.get( targetColumn ) ); + + // Add all placements of the column to the targetColumnPlacements list + for ( DataStore store : RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( targetColumn ) ) { + CatalogColumnPlacement columnPlacement = Catalog.getInstance().getColumnPlacement( store.getAdapterId(), targetColumn.id ); + targetColumnPlacements.add( columnPlacement ); + } + } + + // Prepare the insert query + AlgRoot targetAlg = buildInsertStatement( targetStatement, targetColumnPlacements, targetTable.partitionProperty.partitionIds.get( 0 ) ); + Iterator iterator = targetStatement.getQueryProcessor() + .prepareQuery( targetAlg, targetAlg.validatedRowType, true, false, false ) + .enumerable( targetStatement.getDataContext() ) + .iterator(); + //noinspection WhileLoopReplaceableByForEach + while ( iterator.hasNext() ) { + iterator.next(); + } + targetStatement.getDataContext().resetParameterValues(); + } + } + + + private static void getColumnValuesForTable( Catalog catalog, CatalogTable table, Map> columnValues, JsonObject jsonObject, Map parentPkValues ) throws UnknownColumnException, UnknownTableException { + Map pkValues = new HashMap(); + + // For the columns are not in the current document + for ( String columnName : table.getColumnNames() ) { + if ( !jsonObject.keySet().contains( columnName ) ) { + CatalogColumn column = catalog.getColumn( table.id, columnName ); + if ( !columnValues.containsKey( column ) ) { + columnValues.put( column, new ArrayList<>() ); + } + if ( catalog.getPrimaryKey( table.primaryKey ).columnIds.contains( column.id ) ) { + // Generate _id if it's not in the document. + String generatedValue = new ObjectId().toString(); + columnValues.get( column ).add( generatedValue ); + pkValues.put( String.join( ".", table.name, column.name ), generatedValue ); + } else if ( !catalog.getForeignKeys( table.id ).stream().anyMatch( fk -> fk.getColumnNames().contains( columnName ) ) ) { + // It's not a FK in the table just simply add null. + columnValues.get( column ).add( null ); + } + } + } + + for ( String fieldName : jsonObject.keySet() ) { + // Skip the _id field if the target table is not intended to contain the _id column + if ( fieldName.equals( "_id" ) && !table.getColumnNames().contains( "_id" ) ) { + continue; + } + + JsonElement jsonElement = jsonObject.get( fieldName ); + // If it's a parent element + if ( jsonElement instanceof JsonObject ) { + // Add PKs from the document + for ( long pkColumnId : catalog.getPrimaryKey( table.primaryKey ).columnIds ) { + String pkColumnName = catalog.getColumn( pkColumnId ).name; + if ( jsonObject.has( pkColumnName ) ) { + pkValues.put( String.join( ".", table.name, pkColumnName ), jsonObject.get( pkColumnName ).getAsString() ); + } + } + + CatalogTable childTable = catalog.getTable( table.namespaceId, fieldName ); + getColumnValuesForTable( catalog, childTable, columnValues, (JsonObject) jsonElement, pkValues ); + } else { + CatalogColumn column = catalog.getColumn( table.id, fieldName ); + if ( !columnValues.containsKey( column ) ) { + columnValues.put( column, new LinkedList<>() ); + } + columnValues.get( column ).add( jsonObject.get( fieldName ).getAsString() ); + } + } + + // Add the FK columns + for ( CatalogForeignKey fk : catalog.getForeignKeys( table.id ) ) { + int i = 0; + for ( String refColumnName : fk.getReferencedKeyColumnNames() ) { + CatalogColumn fkColumn = catalog.getColumn( table.id, fk.getColumnNames().get( i++ ) ); + if ( !columnValues.containsKey( fkColumn ) ) { + columnValues.put( fkColumn, new ArrayList<>() ); + } + String refColumnValue = parentPkValues.get( String.join( ".", fk.getReferencedKeyTableName(), refColumnName ) ); + columnValues.get( fkColumn ).add( refColumnValue ); + } + } + } + + @NotNull private static LogicalLpgValues getLogicalLpgValues( AlgBuilder builder, PolyGraph graph ) { List fields = new ArrayList<>(); @@ -314,6 +543,115 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl } + @Override + public void executeMergeQuery( List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, String joinString, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { + try { + PolyImplementation result; + if ( isMaterializedView ) { + result = sourceStatement.getQueryProcessor().prepareQuery( + sourceAlg, + sourceAlg.alg.getCluster().getTypeFactory().builder().build(), + false, + false, + doesSubstituteOrderBy ); + } else { + result = sourceStatement.getQueryProcessor().prepareQuery( + sourceAlg, + sourceAlg.alg.getCluster().getTypeFactory().builder().build(), + true, + false, + false ); + } + final Enumerable enumerable = result.enumerable( sourceStatement.getDataContext() ); + //noinspection unchecked + Iterator sourceIterator = enumerable.iterator(); + + // Get the mappings of the source columns from the Catalog + Map sourceColMapping = new LinkedHashMap<>(); + for ( CatalogColumn catalogColumn : sourceColumns ) { + int i = 0; + for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { + if ( metaData.getName().equalsIgnoreCase( catalogColumn.name ) ) { + sourceColMapping.put( catalogColumn.id, i ); + } + i++; + } + } + + if ( isMaterializedView ) { + for ( CatalogColumn catalogColumn : sourceColumns ) { + if ( !sourceColMapping.containsKey( catalogColumn.id ) ) { + int i = sourceColMapping.values().stream().mapToInt( v -> v ).max().orElseThrow( NoSuchElementException::new ); + sourceColMapping.put( catalogColumn.id, i + 1 ); + } + } + } + + int batchSize = RuntimeConfig.DATA_MIGRATOR_BATCH_SIZE.getInteger(); + int i = 0; + while ( sourceIterator.hasNext() ) { + List> rows = MetaImpl.collect( result.getCursorFactory(), LimitIterator.of( sourceIterator, batchSize ), new ArrayList<>() ); + Map> values = new LinkedHashMap<>(); + + // Read the values of the source columns from all rows + for ( List list : rows ) { + for ( Map.Entry entry : sourceColMapping.entrySet() ) { + if ( !values.containsKey( entry.getKey() ) ) { + values.put( entry.getKey(), new LinkedList<>() ); + } + if ( isMaterializedView ) { + if ( entry.getValue() > list.size() - 1 ) { + values.get( entry.getKey() ).add( i ); + i++; + } else { + values.get( entry.getKey() ).add( list.get( entry.getValue() ) ); + } + } else { + values.get( entry.getKey() ).add( list.get( entry.getValue() ) ); + } + } + } + + // Combine the source values into a single string + final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + List mergedValueList = null; + for ( Map.Entry> v : values.entrySet() ) { + if ( !primaryKeyColumns.stream().map( c -> c.id ).collect( Collectors.toList() ).contains( v.getKey() ) ) { + if ( mergedValueList == null ) { + mergedValueList = v.getValue(); + } else { + int j = 0; + for ( Object value : mergedValueList ) { + mergedValueList.set( j, ((String) value).concat( joinString + v.getValue().get( j++ ) ) ); + } + } + } + } + targetStatement.getDataContext().addParameterValues( targetColumn.id, targetColumn.getAlgDataType( typeFactory ), mergedValueList ); + + // Select the PK columns for the target statement + for ( CatalogColumn primaryKey : primaryKeyColumns ) { + AlgDataType primaryKeyAlgDataType = primaryKey.getAlgDataType( typeFactory ); + List primaryKeyValues = values.get( primaryKey.id ); + targetStatement.getDataContext().addParameterValues( primaryKey.id, primaryKeyAlgDataType, primaryKeyValues ); + } + + Iterator iterator = targetStatement.getQueryProcessor() + .prepareQuery( targetAlg, sourceAlg.validatedRowType, true, false, false ) + .enumerable( targetStatement.getDataContext() ) + .iterator(); + //noinspection WhileLoopReplaceableByForEach + while ( iterator.hasNext() ) { + iterator.next(); + } + targetStatement.getDataContext().resetParameterValues(); + } + } catch ( Throwable t ) { + throw new RuntimeException( t ); + } + } + + @Override public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { List qualifiedTableName = ImmutableList.of( @@ -774,4 +1112,42 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Ca } } + + @Override + public void mergeColumns( Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn, String joinString ) { + CatalogTable table = Catalog.getInstance().getTable( sourceColumns.get( 0 ).tableId ); + CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( table.primaryKey ); + + List selectColumnList = new LinkedList<>( sourceColumns ); + List primaryKeyList = new LinkedList<>(); + + // Add primary keys to select column list + for ( long cid : primaryKey.columnIds ) { + CatalogColumn catalogColumn = Catalog.getInstance().getColumn( cid ); + if ( !selectColumnList.contains( catalogColumn ) ) { + selectColumnList.add( catalogColumn ); + } + primaryKeyList.add( catalogColumn ); + } + + // Get the placements of the source columns + Map> sourceColumnPlacements = new HashMap<>(); + sourceColumnPlacements.put( + table.partitionProperty.partitionIds.get( 0 ), + selectSourcePlacements( table, selectColumnList, -1 ) ); + + // Get the placement of the newly added target column + CatalogColumnPlacement targetColumnPlacement = Catalog.getInstance().getColumnPlacement( store.id, targetColumn.id ); + Map> subDistribution = new HashMap<>( sourceColumnPlacements ); + subDistribution.keySet().retainAll( Arrays.asList( table.partitionProperty.partitionIds.get( 0 ) ) ); + + // Initialize statements for the reading and inserting + Statement sourceStatement = transaction.createStatement(); + Statement targetStatement = transaction.createStatement(); + AlgRoot sourceAlg = getSourceIterator( sourceStatement, subDistribution ); + AlgRoot targetAlg = buildUpdateStatement( targetStatement, Collections.singletonList( targetColumnPlacement ), table.partitionProperty.partitionIds.get( 0 ) ); + + executeMergeQuery( primaryKeyList, selectColumnList, targetColumn, joinString, sourceAlg, sourceStatement, targetStatement, targetAlg, false, false ); + } + } diff --git a/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStore.java b/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStore.java index 970fcc2883..e579f443b0 100644 --- a/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStore.java +++ b/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStore.java @@ -413,7 +413,7 @@ private String getPhysicalColumnName( CatalogColumnPlacement columnPlacement ) { public void dropColumn( Context context, CatalogColumnPlacement columnPlacement ) { commitAll(); for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { - Document field = new Document().append( partitionPlacement.physicalTableName, 1 ); + Document field = new Document().append( getPhysicalColumnName(columnPlacement.physicalColumnName, columnPlacement.columnId), 1 ); Document filter = new Document().append( "$unset", field ); context.getStatement().getTransaction().registerInvolvedAdapter( this ); diff --git a/sql-language/src/main/codegen/config.fmpp b/sql-language/src/main/codegen/config.fmpp index 1acf60a9ae..989396c6f5 100644 --- a/sql-language/src/main/codegen/config.fmpp +++ b/sql-language/src/main/codegen/config.fmpp @@ -82,8 +82,10 @@ data: { "org.polypheny.db.sql.language.ddl.altermaterializedview.SqlAlterMaterializedViewRenameColumn" "org.polypheny.db.sql.language.ddl.alterschema.SqlAlterSchemaOwner" "org.polypheny.db.sql.language.ddl.alterschema.SqlAlterSchemaRename" + "org.polypheny.db.sql.language.ddl.alterschema.SqlAlterSchemaTransferTable" "org.polypheny.db.sql.language.ddl.altertable.SqlAlterSourceTableAddColumn" "org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddColumn" + "org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableMergeColumns" "org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddForeignKey" "org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddIndex" "org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddPartitions" @@ -161,6 +163,7 @@ data: { "CATALOG_NAME" "CENTURY" "CONFIG" + "COLUMNS" "CHAIN" "CHARACTER_SET_CATALOG" "CHARACTER_SET_NAME" @@ -251,6 +254,7 @@ data: { "JSON" "K" "KEY" + "KEYS" "KEY_MEMBER" "KEY_TYPE" "LABEL" @@ -422,6 +426,7 @@ data: { "TRANSACTIONS_ACTIVE" "TRANSACTIONS_COMMITTED" "TRANSACTIONS_ROLLED_BACK" + "TRANSFER" "TRANSFORM" "TRANSFORMS" "TRIGGER_CATALOG" @@ -523,4 +528,3 @@ data: { freemarkerLinks: { includes: includes/ } - diff --git a/sql-language/src/main/codegen/includes/parserImpls.ftl b/sql-language/src/main/codegen/includes/parserImpls.ftl index dbf980e885..60ee53557d 100644 --- a/sql-language/src/main/codegen/includes/parserImpls.ftl +++ b/sql-language/src/main/codegen/includes/parserImpls.ftl @@ -43,11 +43,24 @@ SqlAlterSchema SqlAlterSchema(Span s) : final SqlIdentifier schema; final SqlIdentifier name; final SqlIdentifier owner; + final SqlNodeList columnList; } { schema = CompoundIdentifier() ( + + name = CompoundIdentifier() + ( + + columnList = ParenthesizedSimpleIdentifierList() + | + { columnList = null; } + ) + { + return new SqlAlterSchemaTransferTable(s.end(this), name, schema, columnList); + } + | name = CompoundIdentifier() { @@ -200,6 +213,7 @@ SqlAlterTable SqlAlterTable(Span s) : final SqlIdentifier physicalName; final SqlIdentifier partitionType; final SqlIdentifier partitionColumn; + final SqlNode joinString; List partitionList = new ArrayList(); int partitionIndex = 0; int numPartitionGroups = 0; @@ -655,6 +669,32 @@ SqlAlterTable SqlAlterTable(Span s) : { return new SqlAlterTableMergePartitions(s.end(this), table); } + | + + columnList = ParenthesizedSimpleIdentifierList() + + name = SimpleIdentifier() + + joinString = Literal() + type = DataType() + ( + { nullable = true; } + | + { nullable = false; } + | + { nullable = true; } + ) + ( + + defaultValue = Literal() + | + defaultValue = ArrayConstructor() + | + { defaultValue = null; } + ) + { + return new SqlAlterTableMergeColumns(s.end(this), table, columnList, name, joinString, type, nullable, defaultValue); + } ) } diff --git a/sql-language/src/main/codegen/templates/Parser.jj b/sql-language/src/main/codegen/templates/Parser.jj index 23502edeee..2e97de81a6 100644 --- a/sql-language/src/main/codegen/templates/Parser.jj +++ b/sql-language/src/main/codegen/templates/Parser.jj @@ -6311,6 +6311,7 @@ SqlPostfixOperator PostfixRowOperator() : | < JSON_QUERY: "JSON_QUERY" > | < K: "K" > | < KEY: "KEY" > +| < KEYS: "KEYS" > | < KEY_MEMBER: "KEY_MEMBER" > | < KEY_TYPE: "KEY_TYPE" > | < DISTANCE: "DISTANCE" > @@ -6350,6 +6351,7 @@ SqlPostfixOperator PostfixRowOperator() : | < MEASURES: "MEASURES" > | < MEMBER: "MEMBER" > | < MERGE: "MERGE" > +| < COLUMNS: "COLUMNS" > | < MESSAGE_LENGTH: "MESSAGE_LENGTH" > | < MESSAGE_OCTET_LENGTH: "MESSAGE_OCTET_LENGTH" > | < MESSAGE_TEXT: "MESSAGE_TEXT" > @@ -6649,6 +6651,7 @@ SqlPostfixOperator PostfixRowOperator() : | < TRANSACTIONS_ACTIVE: "TRANSACTIONS_ACTIVE" > | < TRANSACTIONS_COMMITTED: "TRANSACTIONS_COMMITTED" > | < TRANSACTIONS_ROLLED_BACK: "TRANSACTIONS_ROLLED_BACK" > +| < TRANSFER: "TRANSFER" > | < TRANSFORM: "TRANSFORM" > | < TRANSFORMS: "TRANSFORMS" > | < TRANSLATE: "TRANSLATE" > diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java new file mode 100644 index 0000000000..6a70c5358c --- /dev/null +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java @@ -0,0 +1,136 @@ +/* + * Copyright 2019-2022 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.sql.language.ddl.alterschema; + + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import org.apache.commons.lang.StringUtils; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; +import org.polypheny.db.ddl.DdlManager; +import org.polypheny.db.ddl.exception.DdlOnSourceException; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.languages.QueryParameters; +import org.polypheny.db.nodes.Node; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.sql.language.SqlIdentifier; +import org.polypheny.db.sql.language.SqlNode; +import org.polypheny.db.sql.language.SqlNodeList; +import org.polypheny.db.sql.language.SqlWriter; +import org.polypheny.db.sql.language.ddl.SqlAlterSchema; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.util.CoreUtil; +import org.polypheny.db.util.ImmutableNullableList; + +import java.util.List; +import java.util.Objects; + +import static org.polypheny.db.util.Static.RESOURCE; + + +/** + * Parse tree for {@code ALTER SCHEMA name TRANSFER tanle TO namespace} statement. + */ +public class SqlAlterSchemaTransferTable extends SqlAlterSchema { + + private final SqlIdentifier table; + private final SqlIdentifier targetSchema; + private final SqlNodeList primaryKeyColumns; + + + /** + * Creates a SqlAlterSchemaOwner. + */ + public SqlAlterSchemaTransferTable( ParserPos pos, SqlIdentifier table, SqlIdentifier targetSchema, SqlNodeList primaryKeyColumns ) { + super( pos ); + this.table = Objects.requireNonNull( table ); + this.targetSchema = Objects.requireNonNull( targetSchema ); + this.primaryKeyColumns = primaryKeyColumns; + } + + + @Override + public List getOperandList() { + return ImmutableNullableList.of( table, targetSchema ); + } + + + @Override + public List getSqlOperandList() { + return ImmutableNullableList.of( table, targetSchema ); + } + + + @Override + public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { + writer.keyword( "ALTER" ); + writer.keyword( "SCHEMA" ); + table.unparse( writer, leftPrec, rightPrec ); + writer.keyword( "OWNER" ); + writer.keyword( "TO" ); + targetSchema.unparse( writer, leftPrec, rightPrec ); + } + + + @Override + public void execute( Context context, Statement statement, QueryParameters parameters ) { + try { + Catalog catalog = Catalog.getInstance(); + CatalogTable catalogTable = getCatalogTable( context, table ); + + long targetSchemaId = catalog.getSchema( context.getDatabaseId(), targetSchema.getNames().get( 0 ) ).id; + DdlManager.getInstance().transferTable( catalogTable, targetSchemaId, statement, buildPkColumnNamesOfTables() ); + + } catch ( UnknownSchemaException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.schemaNotFound( table.getSimple() ) ); + } catch ( EntityAlreadyExistsException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableExists( table.names.get( 1 ) ) ); + } catch ( DdlOnSourceException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.ddlOnSourceTable() ); + } catch ( UnknownTableException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableNotFound( e.getTableName() ) ); + } catch ( UnknownColumnException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.columnNotFound( e.getColumnName() ) ); + } catch ( GenericCatalogException e ) { + throw new RuntimeException( e ); + } + } + + + private Map> buildPkColumnNamesOfTables() { + Map> pkColumnNamesOfTables = new HashMap(); + if(primaryKeyColumns != null) { + for (Node pkNode : primaryKeyColumns.getList()) { + String tableName = StringUtils.substringBefore( pkNode.toString(), "." ); + String columnName = StringUtils.substringAfter( pkNode.toString(), "." ); + if( !pkColumnNamesOfTables.containsKey( tableName )) { + pkColumnNamesOfTables.put( tableName, new ArrayList<>() ); + } + pkColumnNamesOfTables.get( tableName ).add( columnName ); + } + } + return pkColumnNamesOfTables; + } + +} diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java new file mode 100644 index 0000000000..d1baa9e612 --- /dev/null +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java @@ -0,0 +1,148 @@ +/* + * Copyright 2019-2022 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.sql.language.ddl.altertable; + + +import static org.polypheny.db.util.Static.RESOURCE; + +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.catalog.Catalog.EntityType; +import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.ddl.DdlManager; +import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; +import org.polypheny.db.ddl.exception.ColumnNotExistsException; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.languages.QueryParameters; +import org.polypheny.db.nodes.Node; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.sql.language.*; +import org.polypheny.db.sql.language.ddl.SqlAlterTable; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.util.CoreUtil; +import org.polypheny.db.util.ImmutableNullableList; + + +/** + * Parse tree for {@code ALTER TABLE name MERGE COLUMNS name} statement. + */ +@Slf4j +public class SqlAlterTableMergeColumns extends SqlAlterTable { + + private final SqlIdentifier table; + private final SqlNodeList columnsToMerge; + private final SqlIdentifier targetColumnName; + private final SqlNode joinString; + private final SqlDataTypeSpec type; + private final boolean nullable; + private final SqlNode defaultValue; + + + public SqlAlterTableMergeColumns( + ParserPos pos, + SqlIdentifier table, + SqlNodeList columnsToMerge, + SqlIdentifier targetColumnName, + SqlNode joinString, + SqlDataTypeSpec type, + boolean nullable, + SqlNode defaultValue ) { + super( pos ); + this.table = Objects.requireNonNull( table ); + this.columnsToMerge = columnsToMerge; + this.targetColumnName = targetColumnName; + this.joinString = joinString; + this.type = type; + this.nullable = nullable; + this.defaultValue = defaultValue; + } + + + @Override + public List getOperandList() { + return ImmutableNullableList.of( table, columnsToMerge ); + } + + + @Override + public List getSqlOperandList() { + return ImmutableNullableList.of( table, columnsToMerge ); + } + + + @Override + public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { + writer.keyword( "ALTER" ); + writer.keyword( "TABLE" ); + table.unparse( writer, leftPrec, rightPrec ); + writer.keyword( "MERGE" ); + writer.keyword( "COLUMNS" ); + columnsToMerge.unparse( writer, leftPrec, rightPrec ); + writer.keyword( "AS" ); + columnsToMerge.unparse( writer, leftPrec, rightPrec ); + if ( defaultValue != null ) { + writer.keyword( "DEFAULT" ); + defaultValue.unparse( writer, leftPrec, rightPrec ); + } + if ( targetColumnName != null ) { + writer.keyword( "AFTER" ); + targetColumnName.unparse( writer, leftPrec, rightPrec ); + } + } + + + @Override + public void execute( Context context, Statement statement, QueryParameters parameters ) { + CatalogTable catalogTable = getCatalogTable( context, table ); + + if ( catalogTable.entityType != EntityType.ENTITY ) { + throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); + } + + // Make sure that all adapters are of type store (and not source) + for ( int storeId : catalogTable.dataPlacements ) { + getDataStoreInstance( storeId ); + } + + String defaultValue = this.defaultValue == null ? null : this.defaultValue.toString(); + String joinString = this.joinString == null ? "" : this.joinString.toString(); + + try { + DdlManager.getInstance().mergeColumns( + catalogTable, + columnsToMerge.getList().stream().map( Node::toString ).collect( Collectors.toList() ), + targetColumnName.getSimple(), + joinString, + ColumnTypeInformation.fromDataTypeSpec( type ), + nullable, + defaultValue, + statement ); + } catch ( UnknownColumnException e ) { + throw CoreUtil.newContextException( columnsToMerge.getPos(), RESOURCE.columnNotFound( e.getColumnName() ) ); + } catch ( ColumnAlreadyExistsException e ) { + throw CoreUtil.newContextException( targetColumnName.getPos(), RESOURCE.columnExists( targetColumnName.getSimple() ) ); + } catch ( ColumnNotExistsException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.columnNotFoundInTable( e.columnName, e.tableName ) ); + } + } + +} + diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 4c0dd16c74..f6096513b5 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -208,7 +208,7 @@ import org.polypheny.db.webui.models.TableConstraint; import org.polypheny.db.webui.models.Uml; import org.polypheny.db.webui.models.UnderlyingTables; -import org.polypheny.db.webui.models.requests.BatchUpdateRequest; +import org.polypheny.db.webui.models.requests.*; import org.polypheny.db.webui.models.requests.BatchUpdateRequest.Update; import org.polypheny.db.webui.models.requests.ClassifyAllData; import org.polypheny.db.webui.models.requests.ColumnRequest; @@ -217,13 +217,10 @@ import org.polypheny.db.webui.models.requests.ExploreData; import org.polypheny.db.webui.models.requests.ExploreTables; import org.polypheny.db.webui.models.requests.HubRequest; +import org.polypheny.db.webui.models.requests.MergeColumnsRequest; import org.polypheny.db.webui.models.requests.PartitioningRequest; import org.polypheny.db.webui.models.requests.PartitioningRequest.ModifyPartitionRequest; -import org.polypheny.db.webui.models.requests.QueryExplorationRequest; -import org.polypheny.db.webui.models.requests.QueryRequest; -import org.polypheny.db.webui.models.requests.RelAlgRequest; -import org.polypheny.db.webui.models.requests.SchemaTreeRequest; -import org.polypheny.db.webui.models.requests.UIRequest; +import org.polypheny.db.webui.models.requests.TransferTableRequest; @Slf4j @@ -623,6 +620,43 @@ void createTable( final Context ctx ) { ctx.json( result ); } + /** + * Transfer a table + */ + void transferTable(final Context ctx ) { + TransferTableRequest request = ctx.bodyAsClass( TransferTableRequest.class ); + Transaction transaction = getTransaction(); + StringBuilder query = new StringBuilder(); + String targetSchemaId = String.format( "\"%s\"", request.targetSchema ); + String tableId = String.format( "\"%s\".\"%s\"", request.sourceSchema, request.table ); + query + .append( "ALTER SCHEMA " ) + .append( targetSchemaId ) + .append( " TRANSFER " ) + .append(tableId); + if( request.primaryKeyNames != null && !request.primaryKeyNames.isBlank() ) { + query + .append( " SET PRIMARY KEYS " ) + .append(request.primaryKeyNames); + } + Result result; + + try { + int a = executeSqlUpdate( transaction, query.toString() ); + result = new Result( a ).setGeneratedQuery( query.toString() ); + transaction.commit(); + } catch ( QueryExecutionException | TransactionException e ) { + log.error( "Caught exception while creating a table", e ); + result = new Result( e ).setGeneratedQuery( query.toString() ); + try { + transaction.rollback(); + } catch ( TransactionException ex ) { + log.error( "Could not rollback CREATE TABLE statement: {}", ex.getMessage(), ex ); + } + } + ctx.json( result ); + } + /** * Initialize a multipart request, so that the values can be fetched with request.raw().getPart( name ) @@ -1774,6 +1808,84 @@ void dropColumn( final Context ctx ) { } + /** + * Merge multiple columns of table + */ + void mergeColumns( final Context ctx ) { + MergeColumnsRequest request = ctx.bodyAsClass( MergeColumnsRequest.class ); + Transaction transaction = getTransaction(); + + String[] t = request.tableId.split( "\\." ); + String tableId = String.format( "\"%s\".\"%s\"", t[0], t[1] ); + + boolean nullable = Arrays.stream( request.sourceColumns ).allMatch( c -> c.nullable ); + Integer precison = Arrays.stream( request.sourceColumns ).mapToInt( c -> c.precision ).sum(); + DbColumn newColumn = new DbColumn( request.targetColumnName, "varchar", nullable, precison, null, null ); + newColumn.collectionsType = ""; + + String as = ""; + String dataType = newColumn.dataType; + if ( newColumn.as != null ) { + //for data sources + as = "AS \"" + newColumn.as + "\""; + dataType = ""; + } + + String listOfColumnsToMerge = + Arrays.stream( request.sourceColumns ) + .map( s -> "\"" + s.name + "\"" ) + .collect( Collectors.joining( ", " ) ); + String query = String.format( "ALTER TABLE %s MERGE COLUMNS (%s) INTO \"%s\" WITH '%s' %s %s", + tableId, listOfColumnsToMerge, newColumn.name, request.joinString, as, dataType ); + + //we don't want precision, scale etc. for source columns + if ( newColumn.as == null ) { + if ( newColumn.precision != null ) { + query = query + "(" + newColumn.precision; + if ( newColumn.scale != null ) { + query = query + "," + newColumn.scale; + } + query = query + ")"; + } + if ( !newColumn.collectionsType.equals( "" ) ) { + query = query + " " + newColumn.collectionsType; + int dimension = newColumn.dimension == null ? -1 : newColumn.dimension; + int cardinality = newColumn.cardinality == null ? -1 : newColumn.cardinality; + query = query + String.format( "(%d,%d)", dimension, cardinality ); + } + if ( !newColumn.nullable ) { + query = query + " NOT NULL"; + } + } + + String defaultValue = Arrays + .stream( request.sourceColumns ) + .map( c -> c.defaultValue ) + .filter( s -> s != null && !s.isEmpty() ) + .collect( Collectors.joining( request.joinString ) ); + + if ( defaultValue != null && !defaultValue.equals( "" ) ) { + query = query + " DEFAULT " + String.format( "'%s'", defaultValue ); + } + + Result result; + try { + int affectedRows = executeSqlUpdate( transaction, query ); + transaction.commit(); + result = new Result( affectedRows ).setGeneratedQuery( query ); + } catch ( TransactionException | QueryExecutionException e ) { + log.error( "Caught exception while adding a column", e ); + result = new Result( e ); + try { + transaction.rollback(); + } catch ( TransactionException ex ) { + log.error( "Could not rollback", ex ); + } + } + ctx.json( result ); + } + + /** * Get artificially generated index/foreign key/constraint names for placeholders in the UI */ @@ -4184,4 +4296,4 @@ public static class QueryExecutionException extends Exception { } -} +} \ No newline at end of file diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index daf1436407..6e99ffb1c0 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -241,6 +241,8 @@ private void crudRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/dropColumn", crud::dropColumn ); + webuiServer.post( "/mergeColumns", crud::mergeColumns ); + webuiServer.post( "/getTables", crud::getTables ); webuiServer.post( "/renameTable", crud::renameTable ); @@ -249,6 +251,8 @@ private void crudRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/createTable", crud::createTable ); + webuiServer.post( "/transferTable", crud::transferTable); + webuiServer.post( "/createCollection", crud.languageCrud::createCollection ); webuiServer.get( "/getGeneratedNames", crud::getGeneratedNames ); @@ -408,4 +412,4 @@ private static void enableCORS( Javalin webuiServer ) { } -} +} \ No newline at end of file diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index 9461674ca3..3bf8d860ba 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -16,6 +16,7 @@ package org.polypheny.db.webui.crud; +import com.google.gson.JsonObject; import io.javalin.http.Context; import java.sql.ResultSetMetaData; import java.util.ArrayList; diff --git a/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java new file mode 100644 index 0000000000..2affae415f --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java @@ -0,0 +1,27 @@ +/* + * Copyright 2019-2021 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.models.requests; + + +public class TransferTableRequest { + + public String table; + public String sourceSchema; + public String targetSchema; + public String primaryKeyNames; + +} \ No newline at end of file diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java new file mode 100644 index 0000000000..2bf4aaf41c --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java @@ -0,0 +1,31 @@ +/* + * Copyright 2019-2021 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.models.requests; + + +import org.polypheny.db.webui.models.DbColumn; + + +public class MergeColumnsRequest extends UIRequest { + + public DbColumn[] sourceColumns; + public String targetColumnName; + public String joinString; + // for data sources + public String tableType; + +}