xref: /aosp_15_r20/external/googleapis/google/cloud/discoveryengine/v1beta/import_config.proto (revision d5c09012810ac0c9f33fe448fb6da8260d444cc9)
1// Copyright 2022 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15syntax = "proto3";
16
17package google.cloud.discoveryengine.v1beta;
18
19import "google/api/field_behavior.proto";
20import "google/api/resource.proto";
21import "google/cloud/discoveryengine/v1beta/completion.proto";
22import "google/cloud/discoveryengine/v1beta/document.proto";
23import "google/cloud/discoveryengine/v1beta/user_event.proto";
24import "google/protobuf/field_mask.proto";
25import "google/protobuf/timestamp.proto";
26import "google/rpc/status.proto";
27import "google/type/date.proto";
28
29option csharp_namespace = "Google.Cloud.DiscoveryEngine.V1Beta";
30option go_package = "cloud.google.com/go/discoveryengine/apiv1beta/discoveryenginepb;discoveryenginepb";
31option java_multiple_files = true;
32option java_outer_classname = "ImportConfigProto";
33option java_package = "com.google.cloud.discoveryengine.v1beta";
34option objc_class_prefix = "DISCOVERYENGINE";
35option php_namespace = "Google\\Cloud\\DiscoveryEngine\\V1beta";
36option ruby_package = "Google::Cloud::DiscoveryEngine::V1beta";
37
38// Cloud Storage location for input content.
39message GcsSource {
40  // Required. Cloud Storage URIs to input files. URI can be up to
41  // 2000 characters long. URIs can match the full object path (for example,
42  // `gs://bucket/directory/object.json`) or a pattern matching one or more
43  // files, such as `gs://bucket/directory/*.json`.
44  //
45  // A request can contain at most 100 files (or 100,000 files if `data_schema`
46  // is `content`). Each file can be up to 2 GB (or 100 MB if `data_schema` is
47  // `content`).
48  repeated string input_uris = 1 [(google.api.field_behavior) = REQUIRED];
49
50  // The schema to use when parsing the data from the source.
51  //
52  // Supported values for document imports:
53  //
54  // * `document` (default): One JSON
55  // [Document][google.cloud.discoveryengine.v1beta.Document] per line. Each
56  // document must
57  //   have a valid
58  //   [Document.id][google.cloud.discoveryengine.v1beta.Document.id].
59  // * `content`: Unstructured data (e.g. PDF, HTML). Each file matched by
60  //   `input_uris` becomes a document, with the ID set to the first 128
61  //   bits of SHA256(URI) encoded as a hex string.
62  // * `custom`: One custom data JSON per row in arbitrary format that conforms
63  //   to the defined [Schema][google.cloud.discoveryengine.v1beta.Schema] of
64  //   the data store. This can only be used by the GENERIC Data Store vertical.
65  // * `csv`: A CSV file with header conforming to the defined
66  // [Schema][google.cloud.discoveryengine.v1beta.Schema] of the
67  //   data store. Each entry after the header is imported as a Document.
68  //   This can only be used by the GENERIC Data Store vertical.
69  //
70  // Supported values for user even imports:
71  //
72  // * `user_event` (default): One JSON
73  // [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] per line.
74  string data_schema = 2;
75}
76
77// BigQuery source import data from.
78message BigQuerySource {
79  // BigQuery table partition info. Leave this empty if the BigQuery table
80  // is not partitioned.
81  oneof partition {
82    // BigQuery time partitioned table's _PARTITIONDATE in YYYY-MM-DD format.
83    google.type.Date partition_date = 5;
84  }
85
86  // The project ID (can be project # or ID) that the BigQuery source is in with
87  // a length limit of 128 characters. If not specified, inherits the project
88  // ID from the parent request.
89  string project_id = 1;
90
91  // Required. The BigQuery data set to copy the data from with a length limit
92  // of 1,024 characters.
93  string dataset_id = 2 [(google.api.field_behavior) = REQUIRED];
94
95  // Required. The BigQuery table to copy the data from with a length limit of
96  // 1,024 characters.
97  string table_id = 3 [(google.api.field_behavior) = REQUIRED];
98
99  // Intermediate Cloud Storage directory used for the import with a length
100  // limit of 2,000 characters. Can be specified if one wants to have the
101  // BigQuery export to a specific Cloud Storage directory.
102  string gcs_staging_dir = 4;
103
104  // The schema to use when parsing the data from the source.
105  //
106  // Supported values for user event imports:
107  //
108  // * `user_event` (default): One
109  // [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] per row.
110  //
111  // Supported values for document imports:
112  //
113  // * `document` (default): One
114  // [Document][google.cloud.discoveryengine.v1beta.Document] format per
115  //   row. Each document must have a valid
116  //   [Document.id][google.cloud.discoveryengine.v1beta.Document.id] and one of
117  //   [Document.json_data][google.cloud.discoveryengine.v1beta.Document.json_data]
118  //   or
119  //   [Document.struct_data][google.cloud.discoveryengine.v1beta.Document.struct_data].
120  // * `custom`: One custom data per row in arbitrary format that conforms to
121  //   the defined [Schema][google.cloud.discoveryengine.v1beta.Schema] of the
122  //   data store. This can only be used by the GENERIC Data Store vertical.
123  string data_schema = 6;
124}
125
126// The Spanner source for importing data
127message SpannerSource {
128  // The project ID that the Spanner source is in with a length limit of 128
129  // characters. If not specified, inherits the project ID from the parent
130  // request.
131  string project_id = 1;
132
133  // Required. The instance ID of the source Spanner table.
134  string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
135
136  // Required. The database ID of the source Spanner table.
137  string database_id = 3 [(google.api.field_behavior) = REQUIRED];
138
139  // Required. The table name of the Spanner database that needs to be imported.
140  string table_id = 4 [(google.api.field_behavior) = REQUIRED];
141
142  // Whether to apply data boost on Spanner export. Enabling this option will
143  // incur additional cost. More info can be found
144  // [here](https://cloud.google.com/spanner/docs/databoost/databoost-overview#billing_and_quotas).
145  bool enable_data_boost = 5;
146}
147
148// The Bigtable Options object that contains information to support
149// the import.
150message BigtableOptions {
151  // The column family of the Bigtable.
152  message BigtableColumnFamily {
153    // The field name to use for this column family in the document. The
154    // name has to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`. If not set,
155    // it is parsed from the family name with best effort. However, due to
156    // different naming patterns, field name collisions could happen, where
157    // parsing behavior is undefined.
158    string field_name = 1;
159
160    // The encoding mode of the values when the type is not STRING.
161    // Acceptable encoding values are:
162    //
163    // * `TEXT`: indicates values are alphanumeric text strings.
164    // * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
165    // family of functions. This can be overridden for a specific column
166    // by listing that column in `columns` and specifying an encoding for it.
167    Encoding encoding = 2;
168
169    // The type of values in this column family.
170    // The values are expected to be encoded using `HBase Bytes.toBytes`
171    // function when the encoding value is set to `BINARY`.
172    Type type = 3;
173
174    // The list of objects that contains column level information for each
175    // column. If a column is not present in this list it will be ignored.
176    repeated BigtableColumn columns = 4;
177  }
178
179  // The column of the Bigtable.
180  message BigtableColumn {
181    // Required. Qualifier of the column. If it cannot be decoded with utf-8,
182    // use a base-64 encoded string instead.
183    bytes qualifier = 1 [(google.api.field_behavior) = REQUIRED];
184
185    // The field name to use for this column in the document. The name has to
186    // match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
187    // If not set, it is parsed from the qualifier bytes with best effort.
188    // However, due to different naming patterns, field name collisions could
189    // happen, where parsing behavior is undefined.
190    string field_name = 2;
191
192    // The encoding mode of the values when the type is not `STRING`.
193    // Acceptable encoding values are:
194    //
195    // * `TEXT`: indicates values are alphanumeric text strings.
196    // * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
197    // family of functions. This can be overridden for a specific column
198    // by listing that column in `columns` and specifying an encoding for it.
199    Encoding encoding = 3;
200
201    // The type of values in this column family.
202    // The values are expected to be encoded using `HBase Bytes.toBytes`
203    // function when the encoding value is set to `BINARY`.
204    Type type = 4;
205  }
206
207  // The type of values in a Bigtable column or column family.
208  // The values are expected to be encoded using
209  // [HBase
210  // Bytes.toBytes](https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/util/Bytes.html)
211  // function when the encoding value is set to `BINARY`.
212  enum Type {
213    // The type is unspecified.
214    TYPE_UNSPECIFIED = 0;
215
216    // String type.
217    STRING = 1;
218
219    // Numerical type.
220    NUMBER = 2;
221
222    // Integer type.
223    INTEGER = 3;
224
225    // Variable length integer type.
226    VAR_INTEGER = 4;
227
228    // BigDecimal type.
229    BIG_NUMERIC = 5;
230
231    // Boolean type.
232    BOOLEAN = 6;
233
234    // JSON type.
235    JSON = 7;
236  }
237
238  // The encoding mode of a Bigtable column or column family.
239  enum Encoding {
240    // The encoding is unspecified.
241    ENCODING_UNSPECIFIED = 0;
242
243    // Text encoding.
244    TEXT = 1;
245
246    // Binary encoding.
247    BINARY = 2;
248  }
249
250  // The field name used for saving row key value in the document. The name has
251  // to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
252  string key_field_name = 1;
253
254  // The mapping from family names to an object that contains column families
255  // level information for the given column family. If a family is not present
256  // in this map it will be ignored.
257  map<string, BigtableColumnFamily> families = 2;
258}
259
260// The Cloud Bigtable source for importing data.
261message BigtableSource {
262  // The project ID that the Bigtable source is in with a length limit of 128
263  // characters. If not specified, inherits the project ID from the parent
264  // request.
265  string project_id = 1;
266
267  // Required. The instance ID of the Cloud Bigtable that needs to be imported.
268  string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
269
270  // Required. The table ID of the Cloud Bigtable that needs to be imported.
271  string table_id = 3 [(google.api.field_behavior) = REQUIRED];
272
273  // Required. Bigtable options that contains information needed when parsing
274  // data into typed structures. For example, column type annotations.
275  BigtableOptions bigtable_options = 4 [(google.api.field_behavior) = REQUIRED];
276}
277
278// Cloud FhirStore source import data from.
279message FhirStoreSource {
280  // Required. The full resource name of the FHIR store to import data from, in
281  // the format of
282  // `projects/{project}/locations/{location}/datasets/{dataset}/fhirStores/{fhir_store}`.
283  string fhir_store = 1 [
284    (google.api.field_behavior) = REQUIRED,
285    (google.api.resource_reference) = {
286      type: "healthcare.googleapis.com/FhirStore"
287    }
288  ];
289
290  // Intermediate Cloud Storage directory used for the import with a length
291  // limit of 2,000 characters. Can be specified if one wants to have the
292  // FhirStore export to a specific Cloud Storage directory.
293  string gcs_staging_dir = 2;
294}
295
296// Cloud SQL source import data from.
297message CloudSqlSource {
298  // The project ID that the Cloud SQL source is in with a length limit of 128
299  // characters. If not specified, inherits the project ID from the parent
300  // request.
301  string project_id = 1;
302
303  // Required. The Cloud SQL instance to copy the data from with a length limit
304  // of 256 characters.
305  string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
306
307  // Required. The Cloud SQL database to copy the data from with a length limit
308  // of 256 characters.
309  string database_id = 3 [(google.api.field_behavior) = REQUIRED];
310
311  // Required. The Cloud SQL table to copy the data from with a length limit of
312  // 256 characters.
313  string table_id = 4 [(google.api.field_behavior) = REQUIRED];
314
315  // Intermediate Cloud Storage directory used for the import with a length
316  // limit of 2,000 characters. Can be specified if one wants to have the
317  // Cloud SQL export to a specific Cloud Storage directory.
318  //
319  // Ensure that the Cloud SQL service account has the necessary Cloud
320  // Storage Admin permissions to access the specified Cloud Storage directory.
321  string gcs_staging_dir = 5;
322
323  // Option for serverless export. Enabling this option will incur additional
324  // cost. More info can be found
325  // [here](https://cloud.google.com/sql/pricing#serverless).
326  bool offload = 6;
327}
328
329// Firestore source import data from.
330message FirestoreSource {
331  // The project ID that the Cloud SQL source is in with a length limit of 128
332  // characters. If not specified, inherits the project ID from the parent
333  // request.
334  string project_id = 1;
335
336  // Required. The Firestore database to copy the data from with a length limit
337  // of 256 characters.
338  string database_id = 2 [(google.api.field_behavior) = REQUIRED];
339
340  // Required. The Firestore collection to copy the data from with a length
341  // limit of 1,500 characters.
342  string collection_id = 3 [(google.api.field_behavior) = REQUIRED];
343
344  // Intermediate Cloud Storage directory used for the import with a length
345  // limit of 2,000 characters. Can be specified if one wants to have the
346  // Firestore export to a specific Cloud Storage directory.
347  //
348  // Ensure that the Firestore service account has the necessary Cloud
349  // Storage Admin permissions to access the specified Cloud Storage directory.
350  string gcs_staging_dir = 4;
351}
352
353// Configuration of destination for Import related errors.
354message ImportErrorConfig {
355  // Required. Errors destination.
356  oneof destination {
357    // Cloud Storage prefix for import errors. This must be an empty,
358    // existing Cloud Storage directory. Import errors are written to
359    // sharded files in this directory, one per line, as a JSON-encoded
360    // `google.rpc.Status` message.
361    string gcs_prefix = 1;
362  }
363}
364
365// Request message for the ImportUserEvents request.
366message ImportUserEventsRequest {
367  // The inline source for the input config for ImportUserEvents method.
368  message InlineSource {
369    // Required. A list of user events to import. Recommended max of 10k items.
370    repeated UserEvent user_events = 1 [(google.api.field_behavior) = REQUIRED];
371  }
372
373  // Required - The desired input source of the user event data.
374  oneof source {
375    // The Inline source for the input content for UserEvents.
376    InlineSource inline_source = 2;
377
378    // Cloud Storage location for the input content.
379    GcsSource gcs_source = 3;
380
381    // BigQuery input source.
382    BigQuerySource bigquery_source = 4;
383  }
384
385  // Required. Parent DataStore resource name, of the form
386  // `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}`
387  string parent = 1 [
388    (google.api.field_behavior) = REQUIRED,
389    (google.api.resource_reference) = {
390      type: "discoveryengine.googleapis.com/DataStore"
391    }
392  ];
393
394  // The desired location of errors incurred during the Import. Cannot be set
395  // for inline user event imports.
396  ImportErrorConfig error_config = 5;
397}
398
399// Response of the ImportUserEventsRequest. If the long running
400// operation was successful, then this message is returned by the
401// google.longrunning.Operations.response field if the operation was successful.
402message ImportUserEventsResponse {
403  // A sample of errors encountered while processing the request.
404  repeated google.rpc.Status error_samples = 1;
405
406  // Echoes the destination for the complete errors if this field was set in
407  // the request.
408  ImportErrorConfig error_config = 2;
409
410  // Count of user events imported with complete existing Documents.
411  int64 joined_events_count = 3;
412
413  // Count of user events imported, but with Document information not found
414  // in the existing Branch.
415  int64 unjoined_events_count = 4;
416}
417
418// Metadata related to the progress of the Import operation. This is
419// returned by the google.longrunning.Operation.metadata field.
420message ImportUserEventsMetadata {
421  // Operation create time.
422  google.protobuf.Timestamp create_time = 1;
423
424  // Operation last update time. If the operation is done, this is also the
425  // finish time.
426  google.protobuf.Timestamp update_time = 2;
427
428  // Count of entries that were processed successfully.
429  int64 success_count = 3;
430
431  // Count of entries that encountered errors while processing.
432  int64 failure_count = 4;
433}
434
435// Metadata related to the progress of the ImportDocuments operation. This is
436// returned by the google.longrunning.Operation.metadata field.
437message ImportDocumentsMetadata {
438  // Operation create time.
439  google.protobuf.Timestamp create_time = 1;
440
441  // Operation last update time. If the operation is done, this is also the
442  // finish time.
443  google.protobuf.Timestamp update_time = 2;
444
445  // Count of entries that were processed successfully.
446  int64 success_count = 3;
447
448  // Count of entries that encountered errors while processing.
449  int64 failure_count = 4;
450
451  // Total count of entries that were processed.
452  int64 total_count = 5;
453}
454
455// Request message for Import methods.
456message ImportDocumentsRequest {
457  // The inline source for the input config for ImportDocuments method.
458  message InlineSource {
459    // Required. A list of documents to update/create. Each document must have a
460    // valid [Document.id][google.cloud.discoveryengine.v1beta.Document.id].
461    // Recommended max of 100 items.
462    repeated Document documents = 1 [(google.api.field_behavior) = REQUIRED];
463  }
464
465  // Indicates how imported documents are reconciled with the existing documents
466  // created or imported before.
467  enum ReconciliationMode {
468    // Defaults to `INCREMENTAL`.
469    RECONCILIATION_MODE_UNSPECIFIED = 0;
470
471    // Inserts new documents or updates existing documents.
472    INCREMENTAL = 1;
473
474    // Calculates diff and replaces the entire document dataset. Existing
475    // documents may be deleted if they are not present in the source location.
476    FULL = 2;
477  }
478
479  // Required. The source of the input.
480  oneof source {
481    // The Inline source for the input content for documents.
482    InlineSource inline_source = 2;
483
484    // Cloud Storage location for the input content.
485    GcsSource gcs_source = 3;
486
487    // BigQuery input source.
488    BigQuerySource bigquery_source = 4;
489
490    // FhirStore input source.
491    FhirStoreSource fhir_store_source = 10;
492
493    // Spanner input source.
494    SpannerSource spanner_source = 11;
495
496    // Cloud SQL input source.
497    CloudSqlSource cloud_sql_source = 12;
498
499    // Firestore input source.
500    FirestoreSource firestore_source = 13;
501
502    // Cloud Bigtable input source.
503    BigtableSource bigtable_source = 15;
504  }
505
506  // Required. The parent branch resource name, such as
507  // `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}`.
508  // Requires create/update permission.
509  string parent = 1 [
510    (google.api.field_behavior) = REQUIRED,
511    (google.api.resource_reference) = {
512      type: "discoveryengine.googleapis.com/Branch"
513    }
514  ];
515
516  // The desired location of errors incurred during the Import.
517  ImportErrorConfig error_config = 5;
518
519  // The mode of reconciliation between existing documents and the documents to
520  // be imported. Defaults to
521  // [ReconciliationMode.INCREMENTAL][google.cloud.discoveryengine.v1beta.ImportDocumentsRequest.ReconciliationMode.INCREMENTAL].
522  ReconciliationMode reconciliation_mode = 6;
523
524  // Indicates which fields in the provided imported documents to update. If
525  // not set, the default is to update all fields.
526  google.protobuf.FieldMask update_mask = 7;
527
528  // Whether to automatically generate IDs for the documents if absent.
529  //
530  // If set to `true`,
531  // [Document.id][google.cloud.discoveryengine.v1beta.Document.id]s are
532  // automatically generated based on the hash of the payload, where IDs may not
533  // be consistent during multiple imports. In which case
534  // [ReconciliationMode.FULL][google.cloud.discoveryengine.v1beta.ImportDocumentsRequest.ReconciliationMode.FULL]
535  // is highly recommended to avoid duplicate contents. If unset or set to
536  // `false`, [Document.id][google.cloud.discoveryengine.v1beta.Document.id]s
537  // have to be specified using
538  // [id_field][google.cloud.discoveryengine.v1beta.ImportDocumentsRequest.id_field],
539  // otherwise, documents without IDs fail to be imported.
540  //
541  // Supported data sources:
542  //
543  // * [GcsSource][google.cloud.discoveryengine.v1beta.GcsSource].
544  // [GcsSource.data_schema][google.cloud.discoveryengine.v1beta.GcsSource.data_schema]
545  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
546  // * [BigQuerySource][google.cloud.discoveryengine.v1beta.BigQuerySource].
547  // [BigQuerySource.data_schema][google.cloud.discoveryengine.v1beta.BigQuerySource.data_schema]
548  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
549  // * [SpannerSource][google.cloud.discoveryengine.v1beta.SpannerSource].
550  // * [CloudSqlSource][google.cloud.discoveryengine.v1beta.CloudSqlSource].
551  // * [FirestoreSource][google.cloud.discoveryengine.v1beta.FirestoreSource].
552  // * [BigtableSource][google.cloud.discoveryengine.v1beta.BigtableSource].
553  bool auto_generate_ids = 8;
554
555  // The field indicates the ID field or column to be used as unique IDs of
556  // the documents.
557  //
558  // For [GcsSource][google.cloud.discoveryengine.v1beta.GcsSource] it is the
559  // key of the JSON field. For instance, `my_id` for JSON `{"my_id":
560  // "some_uuid"}`. For others, it may be the column name of the table where the
561  // unique ids are stored.
562  //
563  // The values of the JSON field or the table column are used as the
564  // [Document.id][google.cloud.discoveryengine.v1beta.Document.id]s. The JSON
565  // field or the table column must be of string type, and the values must be
566  // set as valid strings conform to
567  // [RFC-1034](https://tools.ietf.org/html/rfc1034) with 1-63 characters.
568  // Otherwise, documents without valid IDs fail to be imported.
569  //
570  // Only set this field when
571  // [auto_generate_ids][google.cloud.discoveryengine.v1beta.ImportDocumentsRequest.auto_generate_ids]
572  // is unset or set as `false`. Otherwise, an INVALID_ARGUMENT error is thrown.
573  //
574  // If it is unset, a default value `_id` is used when importing from the
575  // allowed data sources.
576  //
577  // Supported data sources:
578  //
579  // * [GcsSource][google.cloud.discoveryengine.v1beta.GcsSource].
580  // [GcsSource.data_schema][google.cloud.discoveryengine.v1beta.GcsSource.data_schema]
581  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
582  // * [BigQuerySource][google.cloud.discoveryengine.v1beta.BigQuerySource].
583  // [BigQuerySource.data_schema][google.cloud.discoveryengine.v1beta.BigQuerySource.data_schema]
584  // must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
585  // * [SpannerSource][google.cloud.discoveryengine.v1beta.SpannerSource].
586  // * [CloudSqlSource][google.cloud.discoveryengine.v1beta.CloudSqlSource].
587  // * [FirestoreSource][google.cloud.discoveryengine.v1beta.FirestoreSource].
588  // * [BigtableSource][google.cloud.discoveryengine.v1beta.BigtableSource].
589  string id_field = 9;
590}
591
592// Response of the
593// [ImportDocumentsRequest][google.cloud.discoveryengine.v1beta.ImportDocumentsRequest].
594// If the long running operation is done, then this message is returned by the
595// google.longrunning.Operations.response field if the operation was successful.
596message ImportDocumentsResponse {
597  // A sample of errors encountered while processing the request.
598  repeated google.rpc.Status error_samples = 1;
599
600  // Echoes the destination for the complete errors in the request if set.
601  ImportErrorConfig error_config = 2;
602}
603
604// Request message for
605// [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries]
606// method.
607message ImportSuggestionDenyListEntriesRequest {
608  // The inline source for SuggestionDenyListEntry.
609  message InlineSource {
610    // Required. A list of all denylist entries to import. Max of 1000 items.
611    repeated SuggestionDenyListEntry entries = 1
612        [(google.api.field_behavior) = REQUIRED];
613  }
614
615  // The source of the updated SuggestionDenyList.
616  oneof source {
617    // The Inline source for the input content for suggestion deny list entries.
618    InlineSource inline_source = 2;
619
620    // Cloud Storage location for the input content.
621    //
622    // Only 1 file can be specified that contains all entries to import.
623    // Supported values `gcs_source.schema` for autocomplete suggestion deny
624    // list entry imports:
625    //
626    // * `suggestion_deny_list` (default): One JSON [SuggestionDenyListEntry]
627    // per line.
628    GcsSource gcs_source = 3;
629  }
630
631  // Required. The parent data store resource name for which to import denylist
632  // entries. Follows pattern projects/*/locations/*/collections/*/dataStores/*.
633  string parent = 1 [
634    (google.api.field_behavior) = REQUIRED,
635    (google.api.resource_reference) = {
636      type: "discoveryengine.googleapis.com/DataStore"
637    }
638  ];
639}
640
641// Response message for
642// [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1beta.CompletionService.ImportSuggestionDenyListEntries]
643// method.
644message ImportSuggestionDenyListEntriesResponse {
645  // A sample of errors encountered while processing the request.
646  repeated google.rpc.Status error_samples = 1;
647
648  // Count of deny list entries successfully imported.
649  int64 imported_entries_count = 2;
650
651  // Count of deny list entries that failed to be imported.
652  int64 failed_entries_count = 3;
653}
654
655// Metadata related to the progress of the ImportSuggestionDenyListEntries
656// operation. This is returned by the google.longrunning.Operation.metadata
657// field.
658message ImportSuggestionDenyListEntriesMetadata {
659  // Operation create time.
660  google.protobuf.Timestamp create_time = 1;
661
662  // Operation last update time. If the operation is done, this is also the
663  // finish time.
664  google.protobuf.Timestamp update_time = 2;
665}
666