Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change response for tables.add and tables.import #3807

Merged
merged 6 commits into from
Sep 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 16 additions & 11 deletions db/sql/00_msar.sql
Original file line number Diff line number Diff line change
Expand Up @@ -2838,8 +2838,8 @@ DROP FUNCTION IF EXISTS msar.add_mathesar_table(oid, text, jsonb, jsonb, text);

CREATE OR REPLACE FUNCTION
msar.add_mathesar_table(sch_id oid, tab_name text, col_defs jsonb, con_defs jsonb, comment_ text)
RETURNS oid AS $$/*
Add a table, with a default id column, returning the OID of the created table.
RETURNS jsonb AS $$/*
Add a table, with a default id column, returning the OID & name of the created table.

Args:
sch_id: The OID of the schema where the table will be created.
Expand Down Expand Up @@ -2886,7 +2886,10 @@ BEGIN
PERFORM __msar.add_table(fq_table_name, column_defs, constraint_defs);
created_table_id := fq_table_name::regclass::oid;
PERFORM msar.comment_on_table(created_table_id, comment_);
RETURN created_table_id;
RETURN jsonb_build_object(
'oid', created_table_id::bigint,
'name', relname
) FROM pg_catalog.pg_class WHERE oid = created_table_id;
END;
$$ LANGUAGE plpgsql;

Expand All @@ -2904,17 +2907,18 @@ msar.prepare_table_for_import(
comment_ text
) RETURNS jsonb AS $$/*
Add a table, with a default id column, returning a JSON object containing
a properly formatted SQL statement to carry out `COPY FROM` and also contains table_oid of the created table.
a properly formatted SQL statement to carry out `COPY FROM`, table_oid & table_name of the created table.

Each returned JSON object will have the form:
{
"copy_sql": <str>,
"table_oid": <int>
"table_oid": <int>,
"table_name": <str>
}

Args:
sch_id: The OID of the schema where the table will be created.
tab_name: The unquoted name for the new table.
tab_name (optional): The unquoted name for the new table.
col_defs: The columns for the new table, in order.
header: Whether or not the file contains a header line with the names of each column in the file.
delimiter: The character that separates columns within each row (line) of the file.
Expand All @@ -2932,7 +2936,7 @@ DECLARE
copy_sql text;
BEGIN
-- Create string table
rel_id := msar.add_mathesar_table(sch_id, tab_name, col_defs, NULL, comment_);
rel_id := msar.add_mathesar_table(sch_id, tab_name, col_defs, NULL, comment_) ->> 'oid';
-- Get unquoted schema and table name for the created table
SELECT nspname, relname INTO sch_name, rel_name
FROM pg_catalog.pg_class AS pgc
Expand All @@ -2956,8 +2960,9 @@ BEGIN
copy_sql := format('COPY %I.%I (%s) FROM STDIN CSV %s', sch_name, rel_name, col_names_sql, options_sql);
RETURN jsonb_build_object(
'copy_sql', copy_sql,
'table_oid', rel_id
);
'table_oid', rel_id::bigint,
'table_name', relname
) FROM pg_catalog.pg_class WHERE oid = rel_id;
END;
$$ LANGUAGE plpgsql;

Expand Down Expand Up @@ -3660,7 +3665,7 @@ The elements of the mapping_columns array must have the form
DECLARE
added_table_id oid;
BEGIN
added_table_id := msar.add_mathesar_table(sch_id, tab_name, NULL, NULL, NULL);
added_table_id := msar.add_mathesar_table(sch_id, tab_name, NULL, NULL, NULL) ->> 'oid';
PERFORM msar.add_foreign_key_column(column_name, added_table_id, referent_table_oid)
FROM jsonb_to_recordset(mapping_columns) AS x(column_name text, referent_table_oid oid);
RETURN added_table_id;
Expand Down Expand Up @@ -3708,7 +3713,7 @@ BEGIN
extracted_col_defs,
extracted_con_defs,
format('Extracted from %s', __msar.get_qualified_relation_name(tab_id))
);
) ->> 'oid';
-- Create a new fkey column and foreign key linking the original table to the extracted one.
fkey_attnum := msar.add_foreign_key_column(fkey_name, tab_id, extracted_table_id);
-- Insert the data from the original table's columns into the extracted columns, and add
Expand Down
7 changes: 4 additions & 3 deletions db/tables/operations/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def create_mathesar_table(engine, table_name, schema_oid, columns=[], constraint
json.dumps(columns),
json.dumps(constraints),
comment
).fetchone()[0]
).fetchone()[0]["oid"]


def create_table_on_database(
Expand All @@ -52,7 +52,7 @@ def create_table_on_database(
comment: The comment for the new table. (optional)

Returns:
Returns the OID of the created table.
Returns the OID and name of the created table.
"""
return exec_msar_func(
conn,
Expand Down Expand Up @@ -122,7 +122,8 @@ def prepare_table_for_import(
).fetchone()[0]
return (
import_info['copy_sql'],
import_info['table_oid']
import_info['table_oid'],
import_info['table_name']
)


Expand Down
4 changes: 2 additions & 2 deletions db/tables/operations/import_.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def import_csv(data_file_id, table_name, schema_oid, conn, comment=None):
with open(file_path, 'rb') as csv_file:
csv_reader = get_sv_reader(csv_file, header, dialect)
column_names = process_column_names(csv_reader.fieldnames)
copy_sql, table_oid = prepare_table_for_import(
copy_sql, table_oid, table_name = prepare_table_for_import(
table_name,
schema_oid,
column_names,
Expand All @@ -44,7 +44,7 @@ def import_csv(data_file_id, table_name, schema_oid, conn, comment=None):
conversion_encoding,
conn
)
return table_oid
return {"oid": table_oid, "name": table_name}


def insert_csv_records(
Expand Down
1 change: 1 addition & 0 deletions docs/docs/api/rpc.md
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ To use an RPC function:
- get_import_preview
- list_joinable
- TableInfo
- AddedTableInfo
- SettableTableInfo
- JoinableTableRecord
- JoinableTableInfo
Expand Down
24 changes: 18 additions & 6 deletions mathesar/rpc/tables/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,18 @@ class TableInfo(TypedDict):
current_role_owns: bool


class AddedTableInfo(TypedDict):
"""
Information about a newly created table.

Attributes:
oid: The `oid` of the table in the schema.
name: The name of the table.
"""
oid: int
name: str


class SettableTableInfo(TypedDict):
"""
Information about a table, restricted to settable fields.
Expand Down Expand Up @@ -191,7 +203,7 @@ def add(
constraint_data_list: list[CreatableConstraintInfo] = [],
comment: str = None,
**kwargs
) -> int:
) -> AddedTableInfo:
"""
Add a table with a default id column.

Expand All @@ -204,7 +216,7 @@ def add(
comment: The comment for the new table.

Returns:
The `oid` of the created table.
The `oid` & `name` of the created table.
"""
user = kwargs.get(REQUEST_KEY).user
with connect(database_id, user) as conn:
Expand Down Expand Up @@ -264,24 +276,24 @@ def patch(
def import_(
*,
data_file_id: int,
table_name: str,
schema_oid: int,
database_id: int,
table_name: str = None,
comment: str = None,
**kwargs
) -> int:
) -> AddedTableInfo:
"""
Import a CSV/TSV into a table.

Args:
data_file_id: The Django id of the DataFile containing desired CSV/TSV.
table_name: Name of the table to be imported.
schema_oid: Identity of the schema in the user's database.
database_id: The Django id of the database containing the table.
table_name: Name of the table to be imported.
comment: The comment for the new table.

Returns:
The `oid` of the created table.
The `oid` and `name` of the created table.
"""
user = kwargs.get(REQUEST_KEY).user
with connect(database_id, user) as conn:
Expand Down
12 changes: 6 additions & 6 deletions mathesar/tests/rpc/tables/test_t_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,11 +148,11 @@ def mock_connect(_database_id, user):
def mock_table_add(table_name, _schema_oid, conn, column_data_list, constraint_data_list, comment):
if _schema_oid != schema_oid:
raise AssertionError('incorrect parameters passed')
return 1964474
return {"oid": 1964474, "name": "newtable"}
monkeypatch.setattr(tables.base, 'connect', mock_connect)
monkeypatch.setattr(tables.base, 'create_table_on_database', mock_table_add)
actual_table_oid = tables.add(table_name='newtable', schema_oid=2200, database_id=11, request=request)
assert actual_table_oid == 1964474
actual_table_info = tables.add(table_name='newtable', schema_oid=2200, database_id=11, request=request)
assert actual_table_info == {"oid": 1964474, "name": "newtable"}


def test_tables_patch(rf, monkeypatch):
Expand Down Expand Up @@ -215,17 +215,17 @@ def mock_connect(_database_id, user):
def mock_table_import(_data_file_id, table_name, _schema_oid, conn, comment):
if _schema_oid != schema_oid and _data_file_id != data_file_id:
raise AssertionError('incorrect parameters passed')
return 1964474
return {"oid": 1964474, "name": "imported_table"}
monkeypatch.setattr(tables.base, 'connect', mock_connect)
monkeypatch.setattr(tables.base, 'import_csv', mock_table_import)
imported_table_oid = tables.import_(
imported_table_info = tables.import_(
data_file_id=10,
table_name='imported_table',
schema_oid=2200,
database_id=11,
request=request
)
assert imported_table_oid == 1964474
assert imported_table_info == {"oid": 1964474, "name": "imported_table"}


def test_tables_preview(rf, monkeypatch):
Expand Down
Loading