|  | 
|  | 1 | +// | 
|  | 2 | +// DISCLAIMER | 
|  | 3 | +// | 
|  | 4 | +// Copyright 2025 ArangoDB GmbH, Cologne, Germany | 
|  | 5 | +// | 
|  | 6 | +// Licensed under the Apache License, Version 2.0 (the "License"); | 
|  | 7 | +// you may not use this file except in compliance with the License. | 
|  | 8 | +// You may obtain a copy of the License at | 
|  | 9 | +// | 
|  | 10 | +// http://www.apache.org/licenses/LICENSE-2.0 | 
|  | 11 | +// | 
|  | 12 | +// Unless required by applicable law or agreed to in writing, software | 
|  | 13 | +// distributed under the License is distributed on an "AS IS" BASIS, | 
|  | 14 | +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
|  | 15 | +// See the License for the specific language governing permissions and | 
|  | 16 | +// limitations under the License. | 
|  | 17 | +// | 
|  | 18 | +// Copyright holder is ArangoDB GmbH, Cologne, Germany | 
|  | 19 | +// | 
|  | 20 | + | 
|  | 21 | +package arangodb | 
|  | 22 | + | 
|  | 23 | +import ( | 
|  | 24 | +	"context" | 
|  | 25 | + | 
|  | 26 | +	"github.com/arangodb/go-driver/v2/connection" | 
|  | 27 | +) | 
|  | 28 | + | 
|  | 29 | +const ( | 
|  | 30 | +	QueryFromPrefix  = "fromPrefix" | 
|  | 31 | +	QueryToPrefix    = "toPrefix" | 
|  | 32 | +	QueryComplete    = "complete" | 
|  | 33 | +	QueryOnDuplicate = "onDuplicate" | 
|  | 34 | +) | 
|  | 35 | + | 
|  | 36 | +// CollectionDocumentDelete removes document(s) with given key(s) from the collection | 
|  | 37 | +// https://docs.arangodb.com/stable/develop/http-api/documents/#remove-a-document | 
|  | 38 | +type CollectionDocumentImport interface { | 
|  | 39 | + | 
|  | 40 | +	// ImportDocuments imports one or more documents into the collection. | 
|  | 41 | +	// The document data is loaded from the given documents argument, statistics are returned. | 
|  | 42 | +	// The documents argument can be one of the following: | 
|  | 43 | +	// - An array of structs: All structs will be imported as individual documents. | 
|  | 44 | +	// - An array of maps: All maps will be imported as individual documents. | 
|  | 45 | +	// To wait until all documents have been synced to disk, prepare a context with `WithWaitForSync`. | 
|  | 46 | +	// To return details about documents that could not be imported, prepare a context with `WithImportDetails`. | 
|  | 47 | +	ImportDocuments(ctx context.Context, documents string, documentsType CollectionDocumentImportDocumentType) (CollectionDocumentImportResponse, error) | 
|  | 48 | +	ImportDocumentsWithOptions(ctx context.Context, documents string, documentsType CollectionDocumentImportDocumentType, options *CollectionDocumentImportOptions) (CollectionDocumentImportResponse, error) | 
|  | 49 | +} | 
|  | 50 | + | 
|  | 51 | +type CollectionDocumentImportResponse struct { | 
|  | 52 | +	CollectionDocumentImportStatistics `json:",inline"` | 
|  | 53 | +} | 
|  | 54 | + | 
|  | 55 | +// ImportDocumentRequest holds Query parameters for /import. | 
|  | 56 | +type CollectionDocumentImportRequest struct { | 
|  | 57 | +	CollectionDocumentImportOptions `json:",inline"` | 
|  | 58 | +	Collection                      *string                               `json:"collection,inline"` | 
|  | 59 | +	Type                            *CollectionDocumentImportDocumentType `json:"type,inline"` | 
|  | 60 | +} | 
|  | 61 | + | 
|  | 62 | +// ImportDocumentOptions holds optional options that control the import document process. | 
|  | 63 | +type CollectionDocumentImportOptions struct { | 
|  | 64 | +	// FromPrefix is an optional prefix for the values in _from attributes. If specified, the value is automatically | 
|  | 65 | +	// prepended to each _from input value. This allows specifying just the keys for _from. | 
|  | 66 | +	FromPrefix *string `json:"fromPrefix,omitempty"` | 
|  | 67 | +	// ToPrefix is an optional prefix for the values in _to attributes. If specified, the value is automatically | 
|  | 68 | +	// prepended to each _to input value. This allows specifying just the keys for _to. | 
|  | 69 | +	ToPrefix *string `json:"toPrefix,omitempty"` | 
|  | 70 | +	// Overwrite is a flag that if set, then all data in the collection will be removed prior to the import. | 
|  | 71 | +	// Note that any existing index definitions will be preseved. | 
|  | 72 | +	Overwrite *bool `json:"overwrite,omitempty"` | 
|  | 73 | +	// OnDuplicate controls what action is carried out in case of a unique key constraint violation. | 
|  | 74 | +	// Possible values are: | 
|  | 75 | +	// - ImportOnDuplicateError | 
|  | 76 | +	// - ImportOnDuplicateUpdate | 
|  | 77 | +	// - ImportOnDuplicateReplace | 
|  | 78 | +	// - ImportOnDuplicateIgnore | 
|  | 79 | +	OnDuplicate *CollectionDocumentImportOnDuplicate `json:"onDuplicate,omitempty"` | 
|  | 80 | +	// Complete is a flag that if set, will make the whole import fail if any error occurs. | 
|  | 81 | +	// Otherwise the import will continue even if some documents cannot be imported. | 
|  | 82 | +	Complete *bool `json:"complete,omitempty"` | 
|  | 83 | + | 
|  | 84 | +	// Wait until the deletion operation has been synced to disk. | 
|  | 85 | +	WithWaitForSync *bool | 
|  | 86 | +} | 
|  | 87 | + | 
|  | 88 | +type CollectionDocumentImportDocumentType string | 
|  | 89 | + | 
|  | 90 | +const ( | 
|  | 91 | +	// ImportDocumentTypeDocuments | 
|  | 92 | +	//   Each line is expected to be one JSON object. | 
|  | 93 | +	//   example : | 
|  | 94 | +	//	  {"_key":"john","name":"John Smith","age":35} | 
|  | 95 | +	//	  {"_key":"katie","name":"Katie Foster","age":28} | 
|  | 96 | +	ImportDocumentTypeDocuments CollectionDocumentImportDocumentType = CollectionDocumentImportDocumentType("documents") | 
|  | 97 | + | 
|  | 98 | +	// ImportDocumentTypeArray | 
|  | 99 | +	//   The request body is expected to be a JSON array of objects. | 
|  | 100 | +	//   example : | 
|  | 101 | +	//	  [ | 
|  | 102 | +	//      {"_key":"john","name":"John Smith","age":35}, | 
|  | 103 | +	//      {"_key":"katie","name":"Katie Foster","age":28} | 
|  | 104 | +	//    ] | 
|  | 105 | +	ImportDocumentTypeArray CollectionDocumentImportDocumentType = CollectionDocumentImportDocumentType("array") | 
|  | 106 | + | 
|  | 107 | +	// ImportDocumentTypeAuto | 
|  | 108 | +	//   Automatically determines the type either documents(ImportDocumentTypeDocumentsError) or array(ImportDocumentTypeArrayError) | 
|  | 109 | +	ImportDocumentTypeAuto CollectionDocumentImportDocumentType = CollectionDocumentImportDocumentType("auto") | 
|  | 110 | + | 
|  | 111 | +	// ImportDocumentTypeTabular | 
|  | 112 | +	//   The first line is an array of strings that defines the attribute keys. The subsequent lines are arrays with the attribute values. | 
|  | 113 | +	//   The keys and values are matched by the order of the array elements. | 
|  | 114 | +	//   example: | 
|  | 115 | +	//     ["_key","name","age"] | 
|  | 116 | +	//     ["john","John Smith",35] | 
|  | 117 | +	//     ["katie","Katie Foster",28] | 
|  | 118 | +	ImportDocumentTypeTabular CollectionDocumentImportDocumentType = CollectionDocumentImportDocumentType("") | 
|  | 119 | +) | 
|  | 120 | + | 
|  | 121 | +type CollectionDocumentImportOnDuplicate string | 
|  | 122 | + | 
|  | 123 | +const ( | 
|  | 124 | +	// ImportOnDuplicateError will not import the current document because of the unique key constraint violation. | 
|  | 125 | +	// This is the default setting. | 
|  | 126 | +	ImportOnDuplicateError CollectionDocumentImportOnDuplicate = CollectionDocumentImportOnDuplicate("error") | 
|  | 127 | +	// ImportOnDuplicateUpdate will update an existing document in the database with the data specified in the request. | 
|  | 128 | +	// Attributes of the existing document that are not present in the request will be preserved. | 
|  | 129 | +	ImportOnDuplicateUpdate CollectionDocumentImportOnDuplicate = CollectionDocumentImportOnDuplicate("update") | 
|  | 130 | +	// ImportOnDuplicateReplace will replace an existing document in the database with the data specified in the request. | 
|  | 131 | +	ImportOnDuplicateReplace CollectionDocumentImportOnDuplicate = CollectionDocumentImportOnDuplicate("replace") | 
|  | 132 | +	// ImportOnDuplicateIgnore will not update an existing document and simply ignore the error caused by a unique key constraint violation. | 
|  | 133 | +	ImportOnDuplicateIgnore CollectionDocumentImportOnDuplicate = CollectionDocumentImportOnDuplicate("ignore") | 
|  | 134 | +) | 
|  | 135 | + | 
|  | 136 | +// CollectionDocumentImportResponse holds statistics of an import action. | 
|  | 137 | +type CollectionDocumentImportStatistics struct { | 
|  | 138 | +	// Created holds the number of documents imported. | 
|  | 139 | +	Created int64 `json:"created,omitempty"` | 
|  | 140 | +	// Errors holds the number of documents that were not imported due to an error. | 
|  | 141 | +	Errors int64 `json:"errors,omitempty"` | 
|  | 142 | +	// Empty holds the number of empty lines found in the input (will only contain a value greater zero for types documents or auto). | 
|  | 143 | +	Empty int64 `json:"empty,omitempty"` | 
|  | 144 | +	// Updated holds the number of updated/replaced documents (in case onDuplicate was set to either update or replace). | 
|  | 145 | +	Updated int64 `json:"updated,omitempty"` | 
|  | 146 | +	// Ignored holds the number of failed but ignored insert operations (in case onDuplicate was set to ignore). | 
|  | 147 | +	Ignored int64 `json:"ignored,omitempty"` | 
|  | 148 | +	// if query parameter details is set to true, the result will contain a details attribute which is an array | 
|  | 149 | +	// with more detailed information about which documents could not be inserted. | 
|  | 150 | +	Details []string | 
|  | 151 | +} | 
|  | 152 | + | 
|  | 153 | +func (c *CollectionDocumentImportOptions) modifyRequest(r connection.Request) error { | 
|  | 154 | +	if c == nil { | 
|  | 155 | +		return nil | 
|  | 156 | +	} | 
|  | 157 | + | 
|  | 158 | +	if c.FromPrefix != nil { | 
|  | 159 | +		r.AddQuery(QueryFromPrefix, *c.FromPrefix) | 
|  | 160 | +	} | 
|  | 161 | + | 
|  | 162 | +	if c.ToPrefix != nil { | 
|  | 163 | +		r.AddQuery(QueryToPrefix, *c.ToPrefix) | 
|  | 164 | +	} | 
|  | 165 | + | 
|  | 166 | +	if c.Overwrite != nil { | 
|  | 167 | +		r.AddQuery(QueryOverwrite, boolToString(*c.Overwrite)) | 
|  | 168 | +	} | 
|  | 169 | + | 
|  | 170 | +	if c.OnDuplicate != nil { | 
|  | 171 | +		r.AddQuery(QueryOnDuplicate, string(*c.OnDuplicate)) | 
|  | 172 | +	} | 
|  | 173 | + | 
|  | 174 | +	if c.Complete != nil { | 
|  | 175 | +		r.AddQuery(QueryComplete, boolToString(*c.Complete)) | 
|  | 176 | +	} | 
|  | 177 | + | 
|  | 178 | +	if c.WithWaitForSync != nil { | 
|  | 179 | +		r.AddQuery(QueryWaitForSync, boolToString(*c.WithWaitForSync)) | 
|  | 180 | +	} | 
|  | 181 | + | 
|  | 182 | +	return nil | 
|  | 183 | +} | 
|  | 184 | + | 
|  | 185 | +func (c *CollectionDocumentImportRequest) modifyRequest(r connection.Request) error { | 
|  | 186 | +	if c == nil { | 
|  | 187 | +		return nil | 
|  | 188 | +	} | 
|  | 189 | + | 
|  | 190 | +	if c.Collection != nil { | 
|  | 191 | +		r.AddQuery(QueryCollection, *c.Collection) | 
|  | 192 | +	} | 
|  | 193 | + | 
|  | 194 | +	if c.Type != nil && string(*c.Type) != "" { | 
|  | 195 | +		r.AddQuery(QueryType, string(*c.Type)) | 
|  | 196 | +	} | 
|  | 197 | + | 
|  | 198 | +	r.AddHeader(connection.ContentType, "text/plain") | 
|  | 199 | +	r.AddHeader("Accept", "text/plain") | 
|  | 200 | + | 
|  | 201 | +	c.CollectionDocumentImportOptions.modifyRequest(r) | 
|  | 202 | + | 
|  | 203 | +	return nil | 
|  | 204 | +} | 
0 commit comments