Skip to content

Commit

Permalink
Update table data via CSV import (#10313)
Browse files Browse the repository at this point in the history
* Add identifierFields select for import

* Update rows on import (Internal DB)

* Only allow internal DB to upsert import CSV

* Clear identifierFields when turning off update

* Passing table instead of tableId

* Pass table

* Pass tableType
  • Loading branch information
melohagan authored May 25, 2023
1 parent 504a08f commit d77568c
Show file tree
Hide file tree
Showing 9 changed files with 76 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
<Grid
{API}
tableId={id}
tableType={$tables.selected?.type}
allowAddRows={!isUsersTable}
allowDeleteRows={!isUsersTable}
schemaOverrides={isUsersTable ? userSchemaOverrides : null}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import ImportModal from "../modals/ImportModal.svelte"
export let tableId
export let tableType
export let disabled
let modal
Expand All @@ -12,5 +13,5 @@
Import
</ActionButton>
<Modal bind:this={modal}>
<ImportModal {tableId} on:importrows />
<ImportModal {tableId} {tableType} on:importrows />
</Modal>
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
export let disabled = false
const { rows, tableId } = getContext("grid")
const { rows, tableId, tableType } = getContext("grid")
</script>

<ImportButton
{disabled}
tableId={$tableId}
{tableType}
on:importrows={rows.actions.refreshData}
/>
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,18 @@
const dispatch = createEventDispatcher()
export let tableId
export let tableType
let rows = []
let allValid = false
let displayColumn = null
let identifierFields = []
async function importData() {
try {
await API.importTableData({
tableId,
rows,
identifierFields,
})
notifications.success("Rows successfully imported")
} catch (error) {
Expand All @@ -45,6 +48,13 @@
</Body>
<Layout gap="XS" noPadding>
<Label grey extraSmall>CSV or JSON file to import</Label>
<TableDataImport {tableId} bind:rows bind:allValid bind:displayColumn />
<TableDataImport
{tableId}
{tableType}
bind:rows
bind:allValid
bind:displayColumn
bind:identifierFields
/>
</Layout>
</ModalContent>
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<script>
import { Select } from "@budibase/bbui"
import { Select, Toggle, Multiselect } from "@budibase/bbui"
import { FIELDS } from "constants/backend"
import { API } from "api"
import { parseFile } from "./utils"
Expand All @@ -9,14 +9,17 @@
let fileType = null
let loading = false
let updateExistingRows = false
let validation = {}
let validateHash = ""
let schema = null
let invalidColumns = []
export let tableId = null
export let tableType
export let rows = []
export let allValid = false
export let identifierFields = []
const typeOptions = [
{
Expand Down Expand Up @@ -159,6 +162,22 @@
</div>
{/each}
</div>
{#if tableType === "internal"}
<br />
<Toggle
bind:value={updateExistingRows}
on:change={() => (identifierFields = [])}
thin
text="Update existing rows"
/>
{#if updateExistingRows}
<Multiselect
label="Identifier field(s)"
options={Object.keys(validation)}
bind:value={identifierFields}
/>
{/if}
{/if}
{#if invalidColumns.length > 0}
<p class="spectrum-FieldLabel spectrum-FieldLabel--sizeM">
The following columns are present in the data you wish to import, but do
Expand Down
6 changes: 4 additions & 2 deletions packages/frontend-core/src/api/tables.js
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,15 @@ export const buildTableEndpoints = API => ({
/**
* Imports data into an existing table
* @param tableId the table ID to import to
* @param data the data import object
* @param rows the data import object
* @param identifierFields column names to be used as keys for overwriting existing rows
*/
importTableData: async ({ tableId, rows }) => {
importTableData: async ({ tableId, rows, identifierFields }) => {
return await API.post({
url: `/api/tables/${tableId}/import`,
body: {
rows,
identifierFields,
},
})
},
Expand Down
2 changes: 2 additions & 0 deletions packages/frontend-core/src/components/grid/layout/Grid.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
export let API = null
export let tableId = null
export let tableType = null
export let schemaOverrides = null
export let allowAddRows = true
export let allowAddColumns = true
Expand Down Expand Up @@ -62,6 +63,7 @@
rand,
config,
tableId: tableIdStore,
tableType,
schemaOverrides: schemaOverridesStore,
}
context = { ...context, ...createEventManagers() }
Expand Down
8 changes: 2 additions & 6 deletions packages/server/src/api/controllers/table/internal.ts
Original file line number Diff line number Diff line change
Expand Up @@ -186,11 +186,7 @@ export async function destroy(ctx: any) {
export async function bulkImport(ctx: any) {
const db = context.getAppDB()
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows } = ctx.request.body
await handleDataImport(ctx.user, table, rows)

// Ensure auto id and other table updates are persisted
await db.put(table)

const { rows, identifierFields } = ctx.request.body
await handleDataImport(ctx.user, table, rows, identifierFields)
return table
}
33 changes: 32 additions & 1 deletion packages/server/src/api/controllers/table/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,12 @@ export function importToRows(
return finalData
}

export async function handleDataImport(user: any, table: any, rows: any) {
export async function handleDataImport(
user: any,
table: any,
rows: any,
identifierFields: Array<string> = []
) {
const schema: unknown = table.schema

if (!rows || !isRows(rows) || !isSchema(schema)) {
Expand All @@ -161,6 +166,32 @@ export async function handleDataImport(user: any, table: any, rows: any) {

let finalData: any = importToRows(data, table, user)

//Set IDs of finalData to match existing row if an update is expected
if (identifierFields.length > 0) {
const allDocs = await db.allDocs(
getRowParams(table._id, null, {
include_docs: true,
})
)
allDocs.rows
.map(existingRow => existingRow.doc)
.forEach((doc: any) => {
finalData.forEach((finalItem: any) => {
let match = true
for (const field of identifierFields) {
if (finalItem[field] !== doc[field]) {
match = false
break
}
}
if (match) {
finalItem._id = doc._id
finalItem._rev = doc._rev
}
})
})
}

await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
tableId: table._id,
})
Expand Down

0 comments on commit d77568c

Please sign in to comment.