xref: /aosp_15_r20/external/googleapis/google/cloud/discoveryengine/v1/import_config.proto (revision d5c09012810ac0c9f33fe448fb6da8260d444cc9)
1// Copyright 2022 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15syntax = "proto3";
16
17package google.cloud.discoveryengine.v1;
18
19import "google/api/field_behavior.proto";
20import "google/api/resource.proto";
21import "google/cloud/discoveryengine/v1/completion.proto";
22import "google/cloud/discoveryengine/v1/document.proto";
23import "google/cloud/discoveryengine/v1/user_event.proto";
24import "google/protobuf/field_mask.proto";
25import "google/protobuf/timestamp.proto";
26import "google/rpc/status.proto";
27import "google/type/date.proto";
28
29option csharp_namespace = "Google.Cloud.DiscoveryEngine.V1";
30option go_package = "cloud.google.com/go/discoveryengine/apiv1/discoveryenginepb;discoveryenginepb";
31option java_multiple_files = true;
32option java_outer_classname = "ImportConfigProto";
33option java_package = "com.google.cloud.discoveryengine.v1";
34option objc_class_prefix = "DISCOVERYENGINE";
35option php_namespace = "Google\\Cloud\\DiscoveryEngine\\V1";
36option ruby_package = "Google::Cloud::DiscoveryEngine::V1";
37
38// Cloud Storage location for input content.
39message GcsSource {
40  // Required. Cloud Storage URIs to input files. URI can be up to
41  // 2000 characters long. URIs can match the full object path (for example,
42  // `gs://bucket/directory/object.json`) or a pattern matching one or more
43  // files, such as `gs://bucket/directory/*.json`.
44  //
45  // A request can contain at most 100 files (or 100,000 files if `data_schema`
46  // is `content`). Each file can be up to 2 GB (or 100 MB if `data_schema` is
47  // `content`).
48  repeated string input_uris = 1 [(google.api.field_behavior) = REQUIRED];
49
50  // The schema to use when parsing the data from the source.
51  //
52  // Supported values for document imports:
53  //
54  // * `document` (default): One JSON
55  // [Document][google.cloud.discoveryengine.v1.Document] per line. Each
56  // document must
57  //   have a valid [Document.id][google.cloud.discoveryengine.v1.Document.id].
58  // * `content`: Unstructured data (e.g. PDF, HTML). Each file matched by
59  //   `input_uris` becomes a document, with the ID set to the first 128
60  //   bits of SHA256(URI) encoded as a hex string.
61  // * `custom`: One custom data JSON per row in arbitrary format that conforms
62  //   to the defined [Schema][google.cloud.discoveryengine.v1.Schema] of the
63  //   data store. This can only be used by the GENERIC Data Store vertical.
64  // * `csv`: A CSV file with header conforming to the defined
65  // [Schema][google.cloud.discoveryengine.v1.Schema] of the
66  //   data store. Each entry after the header is imported as a Document.
67  //   This can only be used by the GENERIC Data Store vertical.
68  //
69  // Supported values for user even imports:
70  //
71  // * `user_event` (default): One JSON
72  // [UserEvent][google.cloud.discoveryengine.v1.UserEvent] per line.
73  string data_schema = 2;
74}
75
76// BigQuery source import data from.
77message BigQuerySource {
78  // BigQuery table partition info. Leave this empty if the BigQuery table
79  // is not partitioned.
80  oneof partition {
81    // BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
82    google.type.Date partition_date = 5;
83  }
84
85  // The project ID (can be project # or ID) that the BigQuery source is in with
86  // a length limit of 128 characters. If not specified, inherits the project
87  // ID from the parent request.
88  string project_id = 1;
89
90  // Required. The BigQuery data set to copy the data from with a length limit
91  // of 1,024 characters.
92  string dataset_id = 2 [(google.api.field_behavior) = REQUIRED];
93
94  // Required. The BigQuery table to copy the data from with a length limit of
95  // 1,024 characters.
96  string table_id = 3 [(google.api.field_behavior) = REQUIRED];
97
98  // Intermediate Cloud Storage directory used for the import with a length
99  // limit of 2,000 characters. Can be specified if one wants to have the
100  // BigQuery export to a specific Cloud Storage directory.
101  string gcs_staging_dir = 4;
102
103  // The schema to use when parsing the data from the source.
104  //
105  // Supported values for user event imports:
106  //
107  // * `user_event` (default): One
108  // [UserEvent][google.cloud.discoveryengine.v1.UserEvent] per row.
109  //
110  // Supported values for document imports:
111  //
112  // * `document` (default): One
113  // [Document][google.cloud.discoveryengine.v1.Document] format per
114  //   row. Each document must have a valid
115  //   [Document.id][google.cloud.discoveryengine.v1.Document.id] and one of
116  //   [Document.json_data][google.cloud.discoveryengine.v1.Document.json_data]
117  //   or
118  //   [Document.struct_data][google.cloud.discoveryengine.v1.Document.struct_data].
119  // * `custom`: One custom data per row in arbitrary format that conforms to
120  //   the defined [Schema][google.cloud.discoveryengine.v1.Schema] of the data
121  //   store. This can only be used by the GENERIC Data Store vertical.
122  string data_schema = 6;
123}
124
125// The Spanner source for importing data
126message SpannerSource {
127  // The project ID that the Spanner source is in with a length limit of 128
128  // characters. If not specified, inherits the project ID from the parent
129  // request.
130  string project_id = 1;
131
132  // Required. The instance ID of the source Spanner table.
133  string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
134
135  // Required. The database ID of the source Spanner table.
136  string database_id = 3 [(google.api.field_behavior) = REQUIRED];
137
138  // Required. The table name of the Spanner database that needs to be imported.
139  string table_id = 4 [(google.api.field_behavior) = REQUIRED];
140
141  // Whether to apply data boost on Spanner export. Enabling this option will
142  // incur additional cost. More info can be found
143  // [here](https://cloud.google.com/spanner/docs/databoost/databoost-overview#billing_and_quotas).
144  bool enable_data_boost = 5;
145}
146
147// The Bigtable Options object that contains information to support
148// the import.
149message BigtableOptions {
150  // The column family of the Bigtable.
151  message BigtableColumnFamily {
152    // The field name to use for this column family in the document. The
153    // name has to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`. If not set,
154    // it is parsed from the family name with best effort. However, due to
155    // different naming patterns, field name collisions could happen, where
156    // parsing behavior is undefined.
157    string field_name = 1;
158
159    // The encoding mode of the values when the type is not STRING.
160    // Acceptable encoding values are:
161    //
162    // * `TEXT`: indicates values are alphanumeric text strings.
163    // * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
164    // family of functions. This can be overridden for a specific column
165    // by listing that column in `columns` and specifying an encoding for it.
166    Encoding encoding = 2;
167
168    // The type of values in this column family.
169    // The values are expected to be encoded using `HBase Bytes.toBytes`
170    // function when the encoding value is set to `BINARY`.
171    Type type = 3;
172
173    // The list of objects that contains column level information for each
174    // column. If a column is not present in this list it will be ignored.
175    repeated BigtableColumn columns = 4;
176  }
177
178  // The column of the Bigtable.
179  message BigtableColumn {
180    // Required. Qualifier of the column. If it cannot be decoded with utf-8,
181    // use a base-64 encoded string instead.
182    bytes qualifier = 1 [(google.api.field_behavior) = REQUIRED];
183
184    // The field name to use for this column in the document. The name has to
185    // match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
186    // If not set, it is parsed from the qualifier bytes with best effort.
187    // However, due to different naming patterns, field name collisions could
188    // happen, where parsing behavior is undefined.
189    string field_name = 2;
190
191    // The encoding mode of the values when the type is not `STRING`.
192    // Acceptable encoding values are:
193    //
194    // * `TEXT`: indicates values are alphanumeric text strings.
195    // * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
196    // family of functions. This can be overridden for a specific column
197    // by listing that column in `columns` and specifying an encoding for it.
198    Encoding encoding = 3;
199
200    // The type of values in this column family.
201    // The values are expected to be encoded using `HBase Bytes.toBytes`
202    // function when the encoding value is set to `BINARY`.
203    Type type = 4;
204  }
205
206  // The type of values in a Bigtable column or column family.
207  // The values are expected to be encoded using
208  // [HBase
209  // Bytes.toBytes](https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/util/Bytes.html)
210  // function when the encoding value is set to `BINARY`.
211  enum Type {
212    // The type is unspecified.
213    TYPE_UNSPECIFIED = 0;
214
215    // String type.
216    STRING = 1;
217
218    // Numerical type.
219    NUMBER = 2;
220
221    // Integer type.
222    INTEGER = 3;
223
224    // Variable length integer type.
225    VAR_INTEGER = 4;
226
227    // BigDecimal type.
228    BIG_NUMERIC = 5;
229
230    // Boolean type.
231    BOOLEAN = 6;
232
233    // JSON type.
234    JSON = 7;
235  }
236
237  // The encoding mode of a Bigtable column or column family.
238  enum Encoding {
239    // The encoding is unspecified.
240    ENCODING_UNSPECIFIED = 0;
241
242    // Text encoding.
243    TEXT = 1;
244
245    // Binary encoding.
246    BINARY = 2;
247  }
248
249  // The field name used for saving row key value in the document. The name has
250  // to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
251  string key_field_name = 1;
252
253  // The mapping from family names to an object that contains column families
254  // level information for the given column family. If a family is not present
255  // in this map it will be ignored.
256  map<string, BigtableColumnFamily> families = 2;
257}
258
259// The Cloud Bigtable source for importing data.
260message BigtableSource {
261  // The project ID that the Bigtable source is in with a length limit of 128
262  // characters. If not specified, inherits the project ID from the parent
263  // request.
264  string project_id = 1;
265
266  // Required. The instance ID of the Cloud Bigtable that needs to be imported.
267  string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
268
269  // Required. The table ID of the Cloud Bigtable that needs to be imported.
270  string table_id = 3 [(google.api.field_behavior) = REQUIRED];
271
272  // Required. Bigtable options that contains information needed when parsing
273  // data into typed structures. For example, column type annotations.
274  BigtableOptions bigtable_options = 4 [(google.api.field_behavior) = REQUIRED];
275}
276
277// Cloud FhirStore source import data from.
278message FhirStoreSource {
279  // Required. The full resource name of the FHIR store to import data from, in
280  // the format of
281  // `projects/{project}/locations/{location}/datasets/{dataset}/fhirStores/{fhir_store}`.
282  string fhir_store = 1 [
283    (google.api.field_behavior) = REQUIRED,
284    (google.api.resource_reference) = {
285      type: "healthcare.googleapis.com/FhirStore"
286    }
287  ];
288
289  // Intermediate Cloud Storage directory used for the import with a length
290  // limit of 2,000 characters. Can be specified if one wants to have the
291  // FhirStore export to a specific Cloud Storage directory.
292  string gcs_staging_dir = 2;
293}
294
295// Cloud SQL source import data from.
296message CloudSqlSource {
297  // The project ID that the Cloud SQL source is in with a length limit of 128
298  // characters. If not specified, inherits the project ID from the parent
299  // request.
300  string project_id = 1;
301
302  // Required. The Cloud SQL instance to copy the data from with a length limit
303  // of 256 characters.
304  string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
305
306  // Required. The Cloud SQL database to copy the data from with a length limit
307  // of 256 characters.
308  string database_id = 3 [(google.api.field_behavior) = REQUIRED];
309
310  // Required. The Cloud SQL table to copy the data from with a length limit of
311  // 256 characters.
312  string table_id = 4 [(google.api.field_behavior) = REQUIRED];
313
314  // Intermediate Cloud Storage directory used for the import with a length
315  // limit of 2,000 characters. Can be specified if one wants to have the
316  // Cloud SQL export to a specific Cloud Storage directory.
317  //
318  // Ensure that the Cloud SQL service account has the necessary Cloud
319  // Storage Admin permissions to access the specified Cloud Storage directory.
320  string gcs_staging_dir = 5;
321
322  // Option for serverless export. Enabling this option will incur additional
323  // cost. More info can be found
324  // [here](https://cloud.google.com/sql/pricing#serverless).
325  bool offload = 6;
326}
327
328// Firestore source import data from.
329message FirestoreSource {
330  // The project ID that the Cloud SQL source is in with a length limit of 128
331  // characters. If not specified, inherits the project ID from the parent
332  // request.
333  string project_id = 1;
334
335  // Required. The Firestore database to copy the data from with a length limit
336  // of 256 characters.
337  string database_id = 2 [(google.api.field_behavior) = REQUIRED];
338
339  // Required. The Firestore collection to copy the data from with a length
340  // limit of 1,500 characters.
341  string collection_id = 3 [(google.api.field_behavior) = REQUIRED];
342
343  // Intermediate Cloud Storage directory used for the import with a length
344  // limit of 2,000 characters. Can be specified if one wants to have the
345  // Firestore export to a specific Cloud Storage directory.
346  //
347  // Ensure that the Firestore service account has the necessary Cloud
348  // Storage Admin permissions to access the specified Cloud Storage directory.
349  string gcs_staging_dir = 4;
350}
351
352// Configuration of destination for Import related errors.
353message ImportErrorConfig {
354  // Required. Errors destination.
355  oneof destination {
356    // Cloud Storage prefix for import errors. This must be an empty,
357    // existing Cloud Storage directory. Import errors are written to
358    // sharded files in this directory, one per line, as a JSON-encoded
359    // `google.rpc.Status` message.
360    string gcs_prefix = 1;
361  }
362}
363
364// Request message for the ImportUserEvents request.
365message ImportUserEventsRequest {
366  // The inline source for the input config for ImportUserEvents method.
367  message InlineSource {
368    // Required. A list of user events to import. Recommended max of 10k items.
369    repeated UserEvent user_events = 1 [(google.api.field_behavior) = REQUIRED];
370  }
371
372  // Required - The desired input source of the user event data.
373  oneof source {
374    // The Inline source for the input content for UserEvents.
375    InlineSource inline_source = 2;
376
377    // Cloud Storage location for the input content.
378    GcsSource gcs_source = 3;
379
380    // BigQuery input source.
381    BigQuerySource bigquery_source = 4;
382  }
383
384  // Required. Parent DataStore resource name, of the form
385  // `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`
386  string parent = 1 [
387    (google.api.field_behavior) = REQUIRED,
388    (google.api.resource_reference) = {
389      type: "discoveryengine.googleapis.com/DataStore"
390    }
391  ];
392
393  // The desired location of errors incurred during the Import. Cannot be set
394  // for inline user event imports.
395  ImportErrorConfig error_config = 5;
396}
397
398// Response of the ImportUserEventsRequest. If the long running
399// operation was successful, then this message is returned by the
400// google.longrunning.Operations.response field if the operation was successful.
401message ImportUserEventsResponse {
402  // A sample of errors encountered while processing the request.
403  repeated google.rpc.Status error_samples = 1;
404
405  // Echoes the destination for the complete errors if this field was set in
406  // the request.
407  ImportErrorConfig error_config = 2;
408
409  // Count of user events imported with complete existing Documents.
410  int64 joined_events_count = 3;
411
412  // Count of user events imported, but with Document information not found
413  // in the existing Branch.
414  int64 unjoined_events_count = 4;
415}
416
417// Metadata related to the progress of the Import operation. This is
418// returned by the google.longrunning.Operation.metadata field.
419message ImportUserEventsMetadata {
420  // Operation create time.
421  google.protobuf.Timestamp create_time = 1;
422
423  // Operation last update time. If the operation is done, this is also the
424  // finish time.
425  google.protobuf.Timestamp update_time = 2;
426
427  // Count of entries that were processed successfully.
428  int64 success_count = 3;
429
430  // Count of entries that encountered errors while processing.
431  int64 failure_count = 4;
432}
433
434// Metadata related to the progress of the ImportDocuments operation. This is
435// returned by the google.longrunning.Operation.metadata field.
436message ImportDocumentsMetadata {
437  // Operation create time.
438  google.protobuf.Timestamp create_time = 1;
439
440  // Operation last update time. If the operation is done, this is also the
441  // finish time.
442  google.protobuf.Timestamp update_time = 2;
443
444  // Count of entries that were processed successfully.
445  int64 success_count = 3;
446
447  // Count of entries that encountered errors while processing.
448  int64 failure_count = 4;
449
450  // Total count of entries that were processed.
451  int64 total_count = 5;
452}
453
454// Request message for Import methods.
455message ImportDocumentsRequest {
456  // The inline source for the input config for ImportDocuments method.
457  message InlineSource {
458    // Required. A list of documents to update/create. Each document must have a
459    // valid [Document.id][google.cloud.discoveryengine.v1.Document.id].
460    // Recommended max of 100 items.
461    repeated Document documents = 1 [(google.api.field_behavior) = REQUIRED];
462  }
463
464  // Indicates how imported documents are reconciled with the existing documents
465  // created or imported before.
466  enum ReconciliationMode {
467    // Defaults to `INCREMENTAL`.
468    RECONCILIATION_MODE_UNSPECIFIED = 0;
469
470    // Inserts new documents or updates existing documents.
471    INCREMENTAL = 1;
472
473    // Calculates diff and replaces the entire document dataset. Existing
474    // documents may be deleted if they are not present in the source location.
475    FULL = 2;
476  }
477
478  // Required. The source of the input.
479  oneof source {
480    // The Inline source for the input content for documents.
481    InlineSource inline_source = 2;
482
483    // Cloud Storage location for the input content.
484    GcsSource gcs_source = 3;
485
486    // BigQuery input source.
487    BigQuerySource bigquery_source = 4;
488
489    // FhirStore input source.
490    FhirStoreSource fhir_store_source = 10;
491
492    // Spanner input source.
493    SpannerSource spanner_source = 11;
494
495    // Cloud SQL input source.
496    CloudSqlSource cloud_sql_source = 12;
497
498    // Firestore input source.
499    FirestoreSource firestore_source = 13;
500
501    // Cloud Bigtable input source.
502    BigtableSource bigtable_source = 15;
503  }
504
505  // Required. The parent branch resource name, such as
506  // `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`.
507  // Requires create/update permission.
508  string parent = 1 [
509    (google.api.field_behavior) = REQUIRED,
510    (google.api.resource_reference) = {
511      type: "discoveryengine.googleapis.com/Branch"
512    }
513  ];
514
515  // The desired location of errors incurred during the Import.
516  ImportErrorConfig error_config = 5;
517
518  // The mode of reconciliation between existing documents and the documents to
519  // be imported. Defaults to
520  // [ReconciliationMode.INCREMENTAL][google.cloud.discoveryengine.v1.ImportDocumentsRequest.ReconciliationMode.INCREMENTAL].
521  ReconciliationMode reconciliation_mode = 6;
522
523  // Indicates which fields in the provided imported documents to update. If
524  // not set, the default is to update all fields.
525  google.protobuf.FieldMask update_mask = 7;
526
527  // Whether to automatically generate IDs for the documents if absent.
528  //
529  // If set to `true`,
530  // [Document.id][google.cloud.discoveryengine.v1.Document.id]s are
531  // automatically generated based on the hash of the payload, where IDs may not
532  // be consistent during multiple imports. In which case
533  // [ReconciliationMode.FULL][google.cloud.discoveryengine.v1.ImportDocumentsRequest.ReconciliationMode.FULL]
534  // is highly recommended to avoid duplicate contents. If unset or set to
535  // `false`, [Document.id][google.cloud.discoveryengine.v1.Document.id]s have
536  // to be specified using
537  // [id_field][google.cloud.discoveryengine.v1.ImportDocumentsRequest.id_field],
538  // otherwise, documents without IDs fail to be imported.
539  //
540  // Supported data sources:
541  //
542  // * [GcsSource][google.cloud.discoveryengine.v1.GcsSource].
543  // [GcsSource.data_schema][google.cloud.discoveryengine.v1.GcsSource.data_schema]
544  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
545  // * [BigQuerySource][google.cloud.discoveryengine.v1.BigQuerySource].
546  // [BigQuerySource.data_schema][google.cloud.discoveryengine.v1.BigQuerySource.data_schema]
547  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
548  // * [SpannerSource][google.cloud.discoveryengine.v1.SpannerSource].
549  // * [CloudSqlSource][google.cloud.discoveryengine.v1.CloudSqlSource].
550  // * [FirestoreSource][google.cloud.discoveryengine.v1.FirestoreSource].
551  // * [BigtableSource][google.cloud.discoveryengine.v1.BigtableSource].
552  bool auto_generate_ids = 8;
553
554  // The field indicates the ID field or column to be used as unique IDs of
555  // the documents.
556  //
557  // For [GcsSource][google.cloud.discoveryengine.v1.GcsSource] it is the key of
558  // the JSON field. For instance, `my_id` for JSON `{"my_id": "some_uuid"}`.
559  // For others, it may be the column name of the table where the unique ids are
560  // stored.
561  //
562  // The values of the JSON field or the table column are used as the
563  // [Document.id][google.cloud.discoveryengine.v1.Document.id]s. The JSON field
564  // or the table column must be of string type, and the values must be set as
565  // valid strings conform to [RFC-1034](https://tools.ietf.org/html/rfc1034)
566  // with 1-63 characters. Otherwise, documents without valid IDs fail to be
567  // imported.
568  //
569  // Only set this field when
570  // [auto_generate_ids][google.cloud.discoveryengine.v1.ImportDocumentsRequest.auto_generate_ids]
571  // is unset or set as `false`. Otherwise, an INVALID_ARGUMENT error is thrown.
572  //
573  // If it is unset, a default value `_id` is used when importing from the
574  // allowed data sources.
575  //
576  // Supported data sources:
577  //
578  // * [GcsSource][google.cloud.discoveryengine.v1.GcsSource].
579  // [GcsSource.data_schema][google.cloud.discoveryengine.v1.GcsSource.data_schema]
580  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
581  // * [BigQuerySource][google.cloud.discoveryengine.v1.BigQuerySource].
582  // [BigQuerySource.data_schema][google.cloud.discoveryengine.v1.BigQuerySource.data_schema]
583  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
584  // * [SpannerSource][google.cloud.discoveryengine.v1.SpannerSource].
585  // * [CloudSqlSource][google.cloud.discoveryengine.v1.CloudSqlSource].
586  // * [FirestoreSource][google.cloud.discoveryengine.v1.FirestoreSource].
587  // * [BigtableSource][google.cloud.discoveryengine.v1.BigtableSource].
588  string id_field = 9;
589}
590
591// Response of the
592// [ImportDocumentsRequest][google.cloud.discoveryengine.v1.ImportDocumentsRequest].
593// If the long running operation is done, then this message is returned by the
594// google.longrunning.Operations.response field if the operation was successful.
595message ImportDocumentsResponse {
596  // A sample of errors encountered while processing the request.
597  repeated google.rpc.Status error_samples = 1;
598
599  // Echoes the destination for the complete errors in the request if set.
600  ImportErrorConfig error_config = 2;
601}
602
603// Request message for
604// [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries]
605// method.
606message ImportSuggestionDenyListEntriesRequest {
607  // The inline source for SuggestionDenyListEntry.
608  message InlineSource {
609    // Required. A list of all denylist entries to import. Max of 1000 items.
610    repeated SuggestionDenyListEntry entries = 1
611        [(google.api.field_behavior) = REQUIRED];
612  }
613
614  // The source of the updated SuggestionDenyList.
615  oneof source {
616    // The Inline source for the input content for suggestion deny list entries.
617    InlineSource inline_source = 2;
618
619    // Cloud Storage location for the input content.
620    //
621    // Only 1 file can be specified that contains all entries to import.
622    // Supported values `gcs_source.schema` for autocomplete suggestion deny
623    // list entry imports:
624    //
625    // * `suggestion_deny_list` (default): One JSON [SuggestionDenyListEntry]
626    // per line.
627    GcsSource gcs_source = 3;
628  }
629
630  // Required. The parent data store resource name for which to import denylist
631  // entries. Follows pattern projects/*/locations/*/collections/*/dataStores/*.
632  string parent = 1 [
633    (google.api.field_behavior) = REQUIRED,
634    (google.api.resource_reference) = {
635      type: "discoveryengine.googleapis.com/DataStore"
636    }
637  ];
638}
639
640// Response message for
641// [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries]
642// method.
643message ImportSuggestionDenyListEntriesResponse {
644  // A sample of errors encountered while processing the request.
645  repeated google.rpc.Status error_samples = 1;
646
647  // Count of deny list entries successfully imported.
648  int64 imported_entries_count = 2;
649
650  // Count of deny list entries that failed to be imported.
651  int64 failed_entries_count = 3;
652}
653
654// Metadata related to the progress of the ImportSuggestionDenyListEntries
655// operation. This is returned by the google.longrunning.Operation.metadata
656// field.
657message ImportSuggestionDenyListEntriesMetadata {
658  // Operation create time.
659  google.protobuf.Timestamp create_time = 1;
660
661  // Operation last update time. If the operation is done, this is also the
662  // finish time.
663  google.protobuf.Timestamp update_time = 2;
664}
665