Skip to content

Commit

Permalink
Workaround of SQLBoiler issue/328 (#11)
Browse files Browse the repository at this point in the history
  • Loading branch information
tiendc authored Dec 7, 2024
1 parent 954aeb1 commit ba37990
Show file tree
Hide file tree
Showing 3 changed files with 259 additions and 8 deletions.
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ Additional functionalities:
- modelSlice.`InsertIgnoreAll`
- modelSlice.`InsertIgnoreAllByPage`: If you need ACID, call this function within a transaction.
- modelSlice.`UpsertAll`
- modelSlice.`UpsertAllByPage`: If you need ACID, call this function within a transaction.
- modelSlice.`UpsertAllOnConflictColumns`: MySQL only. Workaround of [issues/328](https://github.com/volatiletech/sqlboiler/issues/328).
- modelSlice.`UpsertOnConflictColumns`: MySQL only. Workaround of [issues/328](https://github.com/volatiletech/sqlboiler/issues/328).
- modelSlice.`UpsertAllByPage`: If you need ACID, call this function within a transaction.
- modelSlice.`UpdateAllByPage`: If you need ACID, call this function within a transaction.
- modelSlice.`DeleteAll`
- modelSlice.`DeleteAllByPage`: If you need ACID, call this function within a transaction.
Expand Down
47 changes: 40 additions & 7 deletions templates/boilv4/mysql/111_bulk_upsert.go.tpl
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,53 @@
{{- $alias := .Aliases.Table .Table.Name -}}
{{- $schemaTable := .Table.Name | .SchemaTable}}

// UpsertAll inserts or updates all rows
// Currently it doesn't support "NoContext" and "NoRowsAffected"
// IMPORTANT: this will calculate the widest columns from all items in the slice, be careful if you want to use default column values
// UpsertAll inserts or updates all rows.
// Currently it doesn't support "NoContext" and "NoRowsAffected".
// IMPORTANT: this will calculate the widest columns from all items in the slice, be careful if you want to use default column values.
// IMPORTANT: any AUTO_INCREMENT column should be excluded from `updateColumns` and `insertColumns` including PK.
func (o {{$alias.UpSingular}}Slice) UpsertAll(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) (int64, error) {
return o.upsertAllOnConflictColumns(ctx, exec, nil, updateColumns, insertColumns)
}

// upsertAllOnConflictColumns upserts multiple rows with passing custom conflict columns to allow bypassing
// single column conflict check (see bug https://github.com/volatiletech/sqlboiler/issues/328).
// SQLBoiler only checks column conflict on single column only which is not correct as MySQL PK or UNIQUE index
// can include multiple columns.
// This function allows passing multiple conflict columns, but it cannot check whether they are correct or not.
// So use it at your own risk.
func (o {{$alias.UpSingular}}Slice) UpsertAllOnConflictColumns(ctx context.Context, exec boil.ContextExecutor, conflictColumns []string, updateColumns, insertColumns boil.Columns) (int64, error) {
return o.upsertAllOnConflictColumns(ctx, exec, conflictColumns, updateColumns, insertColumns)
}

func (o {{$alias.UpSingular}}Slice) upsertAllOnConflictColumns(ctx context.Context, exec boil.ContextExecutor, conflictColumns []string, updateColumns, insertColumns boil.Columns) (int64, error) {
if len(o) == 0 {
return 0, nil
}

checkNZUniques := len(conflictColumns) == 0
if len(conflictColumns) > 0 {
mapConflictColumns := make(map[string]struct{}, len(conflictColumns))
for _, col := range conflictColumns {
for _, existCol := range {{$alias.DownSingular}}AllColumns {
if col == existCol {
mapConflictColumns[col] = struct{}{}
break
}
}
}
if len(mapConflictColumns) <= 1 {
return 0, errors.New("custom conflict columns must be 2 columns or more")
}
}

// Calculate the widest columns from all rows need to upsert
insertCols := make(map[string]struct{}, 10)
for _, row := range o {
nzUniques := queries.NonZeroDefaultSet(mySQL{{$alias.UpSingular}}UniqueColumns, row)
if len(nzUniques) == 0 {
return 0, errors.New("cannot upsert with a table that cannot conflict on a unique column")
if checkNZUniques {
nzUniques := queries.NonZeroDefaultSet(mySQL{{$alias.UpSingular}}UniqueColumns, row)
if len(nzUniques) == 0 {
return 0, errors.New("cannot upsert with a table that cannot conflict on a unique column")
}
}
insert, _ := insertColumns.InsertColumnSet(
{{$alias.DownSingular}}AllColumns,
Expand All @@ -28,7 +61,7 @@ func (o {{$alias.UpSingular}}Slice) UpsertAll(ctx context.Context, exec boil.Con
for _, col := range insert {
insertCols[col] = struct{}{}
}
if len(insertCols) == len({{$alias.DownSingular}}AllColumns) {
if len(insertCols) == len({{$alias.DownSingular}}AllColumns) || (insertColumns.IsWhitelist() && len(insertCols) == len(insertColumns.Cols)) {
break
}
}
Expand Down
216 changes: 216 additions & 0 deletions templates/boilv4/mysql/114_upsert_on_conflict_columns.go.tpl
Original file line number Diff line number Diff line change
@@ -0,0 +1,216 @@
{{- if or (not .Table.IsView) .Table.ViewCapabilities.CanUpsert -}}
{{- $alias := .Aliases.Table .Table.Name}}
{{- $schemaTable := .Table.Name | .SchemaTable}}

// Upsert attempts an insert using an executor, and does an update or ignore on conflict with the specified columns.
// This is copied from the built-in Upsert() function with accepting an extra param for conflict columns.
// See bug https://github.com/volatiletech/sqlboiler/issues/328.
// SQLBoiler only checks column conflict on single column only which is not correct as MySQL PK or UNIQUE index
// can include multiple columns.
// This function allows passing multiple conflict columns, but it cannot check whether they are correct or not.
// So use it at your own risk.
// IMPORTANT: any AUTO_INCREMENT column should be excluded from `updateColumns` and `insertColumns` including PK.
func (o *{{$alias.UpSingular}}) UpsertOnConflictColumns({{if .NoContext}}exec boil.Executor{{else}}ctx context.Context, exec boil.ContextExecutor{{end}}, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {
if o == nil {
return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
}

{{- template "timestamp_upsert_helper" . }}

{{if not .NoHooks -}}
if err := o.doBeforeUpsertHooks({{if not .NoContext}}ctx, {{end -}} exec); err != nil {
return err
}
{{- end}}

nzDefaults := queries.NonZeroDefaultSet({{$alias.DownSingular}}ColumnsWithDefault, o)

nzUniques := make([]string, 0, len(conflictColumns))
for _, col := range conflictColumns {
for _, existCol := range {{$alias.DownSingular}}AllColumns {
if col == existCol {
nzUniques = append(nzUniques, col)
break
}
}
}
if len(nzUniques) <= 1 {
return errors.New("custom conflict columns must be 2 columns or more")
}

// Build cache key in-line uglily - mysql vs psql problems
buf := strmangle.GetBuffer()
buf.WriteString(strconv.Itoa(updateColumns.Kind))
for _, c := range updateColumns.Cols {
buf.WriteString(c)
}
buf.WriteByte('.')
buf.WriteString(strconv.Itoa(insertColumns.Kind))
for _, c := range insertColumns.Cols {
buf.WriteString(c)
}
buf.WriteByte('.')
for _, c := range nzDefaults {
buf.WriteString(c)
}
buf.WriteByte('.')
for _, c := range nzUniques {
buf.WriteString(c)
}
key := buf.String()
strmangle.PutBuffer(buf)

{{$alias.DownSingular}}UpsertCacheMut.RLock()
cache, cached := {{$alias.DownSingular}}UpsertCache[key]
{{$alias.DownSingular}}UpsertCacheMut.RUnlock()

var err error

if !cached {
insert, _ := insertColumns.InsertColumnSet(
{{$alias.DownSingular}}AllColumns,
{{$alias.DownSingular}}ColumnsWithDefault,
{{$alias.DownSingular}}ColumnsWithoutDefault,
nzDefaults,
)

update := updateColumns.UpdateColumnSet(
{{$alias.DownSingular}}AllColumns,
{{$alias.DownSingular}}PrimaryKeyColumns,
)
{{if filterColumnsByAuto true .Table.Columns }}
insert = strmangle.SetComplement(insert, {{$alias.DownSingular}}GeneratedColumns)
update = strmangle.SetComplement(update, {{$alias.DownSingular}}GeneratedColumns)
{{- end }}

if !updateColumns.IsNone() && len(update) == 0 {
return errors.New("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
}

ret := strmangle.SetComplement({{$alias.DownSingular}}AllColumns, strmangle.SetIntersect(insert, update))

cache.query = buildUpsertQueryMySQL(dialect, "{{$schemaTable}}", update, insert)
cache.retQuery = fmt.Sprintf(
"SELECT %s FROM {{.LQ}}{{.Table.Name}}{{.RQ}} WHERE %s",
strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), ","),
strmangle.WhereClause("{{.LQ}}", "{{.RQ}}", 0, nzUniques),
)

cache.valueMapping, err = queries.BindMapping({{$alias.DownSingular}}Type, {{$alias.DownSingular}}Mapping, insert)
if err != nil {
return err
}
if len(ret) != 0 {
cache.retMapping, err = queries.BindMapping({{$alias.DownSingular}}Type, {{$alias.DownSingular}}Mapping, ret)
if err != nil {
return err
}
}
}

value := reflect.Indirect(reflect.ValueOf(o))
vals := queries.ValuesFromMapping(value, cache.valueMapping)
var returns []interface{}
if len(cache.retMapping) != 0 {
returns = queries.PtrsFromMapping(value, cache.retMapping)
}

{{if .NoContext -}}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, cache.query)
fmt.Fprintln(boil.DebugWriter, vals)
}
{{else -}}
if boil.IsDebug(ctx) {
writer := boil.DebugWriterFrom(ctx)
fmt.Fprintln(writer, cache.query)
fmt.Fprintln(writer, vals)
}
{{end -}}

{{$canLastInsertID := .Table.CanLastInsertID -}}
{{if $canLastInsertID -}}
{{if .NoContext -}}
result, err := exec.Exec(cache.query, vals...)
{{else -}}
result, err := exec.ExecContext(ctx, cache.query, vals...)
{{end -}}
{{else -}}
{{if .NoContext -}}
_, err = exec.Exec(cache.query, vals...)
{{else -}}
_, err = exec.ExecContext(ctx, cache.query, vals...)
{{end -}}
{{- end}}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to upsert for {{.Table.Name}}")
}

{{if $canLastInsertID -}}
var lastID int64
{{- end}}
var uniqueMap []uint64
var nzUniqueCols []interface{}

if len(cache.retMapping) == 0 {
goto CacheNoHooks
}

{{if $canLastInsertID -}}
lastID, err = result.LastInsertId()
if err != nil {
return ErrSyncFail
}

{{$colName := index .Table.PKey.Columns 0 -}}
{{- $col := .Table.GetColumn $colName -}}
{{- $colTitled := $alias.Column $colName}}
o.{{$colTitled}} = {{$col.Type}}(lastID)
if lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == {{$alias.DownSingular}}Mapping["{{$colName}}"] {
goto CacheNoHooks
}
{{- end}}

uniqueMap, err = queries.BindMapping({{$alias.DownSingular}}Type, {{$alias.DownSingular}}Mapping, nzUniques)
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to retrieve unique values for {{.Table.Name}}")
}
nzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)

{{if .NoContext -}}
if boil.DebugMode {
fmt.Fprintln(boil.DebugWriter, cache.retQuery)
fmt.Fprintln(boil.DebugWriter, nzUniqueCols...)
}
{{else -}}
if boil.IsDebug(ctx) {
writer := boil.DebugWriterFrom(ctx)
fmt.Fprintln(writer, cache.retQuery)
fmt.Fprintln(writer, nzUniqueCols...)
}
{{end -}}

{{if .NoContext -}}
err = exec.QueryRow(cache.retQuery, nzUniqueCols...).Scan(returns...)
{{else -}}
err = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)
{{end -}}
if err != nil {
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
}

CacheNoHooks:
if !cached {
{{$alias.DownSingular}}UpsertCacheMut.Lock()
{{$alias.DownSingular}}UpsertCache[key] = cache
{{$alias.DownSingular}}UpsertCacheMut.Unlock()
}

{{if not .NoHooks -}}
return o.doAfterUpsertHooks({{if not .NoContext}}ctx, {{end -}} exec)
{{- else -}}
return nil
{{- end}}
}

{{end}}

0 comments on commit ba37990

Please sign in to comment.