xref: /aosp_15_r20/external/googleapis/google/cloud/bigquery/datatransfer/v1/transfer.proto (revision d5c09012810ac0c9f33fe448fb6da8260d444cc9)
1// Copyright 2023 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15syntax = "proto3";
16
17package google.cloud.bigquery.datatransfer.v1;
18
19import "google/api/field_behavior.proto";
20import "google/api/resource.proto";
21import "google/protobuf/struct.proto";
22import "google/protobuf/timestamp.proto";
23import "google/protobuf/wrappers.proto";
24import "google/rpc/status.proto";
25
26option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1";
27option go_package = "cloud.google.com/go/bigquery/datatransfer/apiv1/datatransferpb;datatransferpb";
28option java_multiple_files = true;
29option java_outer_classname = "TransferProto";
30option java_package = "com.google.cloud.bigquery.datatransfer.v1";
31option objc_class_prefix = "GCBDT";
32option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";
33option ruby_package = "Google::Cloud::Bigquery::DataTransfer::V1";
34
35// DEPRECATED. Represents data transfer type.
36enum TransferType {
37  option deprecated = true;
38
39  // Invalid or Unknown transfer type placeholder.
40  TRANSFER_TYPE_UNSPECIFIED = 0;
41
42  // Batch data transfer.
43  BATCH = 1;
44
45  // Streaming data transfer. Streaming data source currently doesn't
46  // support multiple transfer configs per project.
47  STREAMING = 2;
48}
49
50// Represents data transfer run state.
51enum TransferState {
52  // State placeholder (0).
53  TRANSFER_STATE_UNSPECIFIED = 0;
54
55  // Data transfer is scheduled and is waiting to be picked up by
56  // data transfer backend (2).
57  PENDING = 2;
58
59  // Data transfer is in progress (3).
60  RUNNING = 3;
61
62  // Data transfer completed successfully (4).
63  SUCCEEDED = 4;
64
65  // Data transfer failed (5).
66  FAILED = 5;
67
68  // Data transfer is cancelled (6).
69  CANCELLED = 6;
70}
71
72// Represents preferences for sending email notifications for transfer run
73// events.
74message EmailPreferences {
75  // If true, email notifications will be sent on transfer run failures.
76  bool enable_failure_email = 1;
77}
78
79// Options customizing the data transfer schedule.
80message ScheduleOptions {
81  // If true, automatic scheduling of data transfer runs for this configuration
82  // will be disabled. The runs can be started on ad-hoc basis using
83  // StartManualTransferRuns API. When automatic scheduling is disabled, the
84  // TransferConfig.schedule field will be ignored.
85  bool disable_auto_scheduling = 3;
86
87  // Specifies time to start scheduling transfer runs. The first run will be
88  // scheduled at or after the start time according to a recurrence pattern
89  // defined in the schedule string. The start time can be changed at any
90  // moment. The time when a data transfer can be triggered manually is not
91  // limited by this option.
92  google.protobuf.Timestamp start_time = 1;
93
94  // Defines time to stop scheduling transfer runs. A transfer run cannot be
95  // scheduled at or after the end time. The end time can be changed at any
96  // moment. The time when a data transfer can be triggered manually is not
97  // limited by this option.
98  google.protobuf.Timestamp end_time = 2;
99}
100
101// Information about a user.
102message UserInfo {
103  // E-mail address of the user.
104  optional string email = 1;
105}
106
107// Represents a data transfer configuration. A transfer configuration
108// contains all metadata needed to perform a data transfer. For example,
109// `destination_dataset_id` specifies where data should be stored.
110// When a new transfer configuration is created, the specified
111// `destination_dataset_id` is created when needed and shared with the
112// appropriate data source service account.
113message TransferConfig {
114  option (google.api.resource) = {
115    type: "bigquerydatatransfer.googleapis.com/TransferConfig"
116    pattern: "projects/{project}/transferConfigs/{transfer_config}"
117    pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}"
118  };
119
120  // Identifier. The resource name of the transfer config.
121  // Transfer config names have the form either
122  // `projects/{project_id}/locations/{region}/transferConfigs/{config_id}` or
123  // `projects/{project_id}/transferConfigs/{config_id}`,
124  // where `config_id` is usually a UUID, even though it is not
125  // guaranteed or required. The name is ignored when creating a transfer
126  // config.
127  string name = 1 [(google.api.field_behavior) = IDENTIFIER];
128
129  // The desination of the transfer config.
130  oneof destination {
131    // The BigQuery target dataset id.
132    string destination_dataset_id = 2;
133  }
134
135  // User specified display name for the data transfer.
136  string display_name = 3;
137
138  // Data source ID. This cannot be changed once data transfer is created. The
139  // full list of available data source IDs can be returned through an API call:
140  // https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list
141  string data_source_id = 5;
142
143  // Parameters specific to each data source. For more information see the
144  // bq tab in the 'Setting up a data transfer' section for each data source.
145  // For example the parameters for Cloud Storage transfers are listed here:
146  // https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
147  google.protobuf.Struct params = 9;
148
149  // Data transfer schedule.
150  // If the data source does not support a custom schedule, this should be
151  // empty. If it is empty, the default value for the data source will be used.
152  // The specified times are in UTC.
153  // Examples of valid format:
154  // `1st,3rd monday of month 15:30`,
155  // `every wed,fri of jan,jun 13:15`, and
156  // `first sunday of quarter 00:00`.
157  // See more explanation about the format here:
158  // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
159  //
160  // NOTE: The minimum interval time between recurring transfers depends on the
161  // data source; refer to the documentation for your data source.
162  string schedule = 7;
163
164  // Options customizing the data transfer schedule.
165  ScheduleOptions schedule_options = 24;
166
167  // The number of days to look back to automatically refresh the data.
168  // For example, if `data_refresh_window_days = 10`, then every day
169  // BigQuery reingests data for [today-10, today-1], rather than ingesting data
170  // for just [today-1].
171  // Only valid if the data source supports the feature. Set the value to 0
172  // to use the default value.
173  int32 data_refresh_window_days = 12;
174
175  // Is this config disabled. When set to true, no runs will be scheduled for
176  // this transfer config.
177  bool disabled = 13;
178
179  // Output only. Data transfer modification time. Ignored by server on input.
180  google.protobuf.Timestamp update_time = 4
181      [(google.api.field_behavior) = OUTPUT_ONLY];
182
183  // Output only. Next time when data transfer will run.
184  google.protobuf.Timestamp next_run_time = 8
185      [(google.api.field_behavior) = OUTPUT_ONLY];
186
187  // Output only. State of the most recently updated transfer run.
188  TransferState state = 10 [(google.api.field_behavior) = OUTPUT_ONLY];
189
190  // Deprecated. Unique ID of the user on whose behalf transfer is done.
191  int64 user_id = 11;
192
193  // Output only. Region in which BigQuery dataset is located.
194  string dataset_region = 14 [(google.api.field_behavior) = OUTPUT_ONLY];
195
196  // Pub/Sub topic where notifications will be sent after transfer runs
197  // associated with this transfer config finish.
198  //
199  // The format for specifying a pubsub topic is:
200  // `projects/{project_id}/topics/{topic_id}`
201  string notification_pubsub_topic = 15;
202
203  // Email notifications will be sent according to these preferences
204  // to the email address of the user who owns this transfer config.
205  EmailPreferences email_preferences = 18;
206
207  // Output only. Information about the user whose credentials are used to
208  // transfer data. Populated only for `transferConfigs.get` requests. In case
209  // the user information is not available, this field will not be populated.
210  optional UserInfo owner_info = 27 [(google.api.field_behavior) = OUTPUT_ONLY];
211
212  // The encryption configuration part. Currently, it is only used for the
213  // optional KMS key name. The BigQuery service account of your project must be
214  // granted permissions to use the key. Read methods will return the key name
215  // applied in effect. Write methods will apply the key if it is present, or
216  // otherwise try to apply project default keys if it is absent.
217  EncryptionConfiguration encryption_configuration = 28;
218}
219
220// Represents the encryption configuration for a transfer.
221message EncryptionConfiguration {
222  // The name of the KMS key used for encrypting BigQuery data.
223  google.protobuf.StringValue kms_key_name = 1;
224}
225
226// Represents a data transfer run.
227message TransferRun {
228  option (google.api.resource) = {
229    type: "bigquerydatatransfer.googleapis.com/Run"
230    pattern: "projects/{project}/transferConfigs/{transfer_config}/runs/{run}"
231    pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}"
232  };
233
234  // Identifier. The resource name of the transfer run.
235  // Transfer run names have the form
236  // `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
237  // The name is ignored when creating a transfer run.
238  string name = 1 [(google.api.field_behavior) = IDENTIFIER];
239
240  // Minimum time after which a transfer run can be started.
241  google.protobuf.Timestamp schedule_time = 3;
242
243  // For batch transfer runs, specifies the date and time of the data should be
244  // ingested.
245  google.protobuf.Timestamp run_time = 10;
246
247  // Status of the transfer run.
248  google.rpc.Status error_status = 21;
249
250  // Output only. Time when transfer run was started.
251  // Parameter ignored by server for input requests.
252  google.protobuf.Timestamp start_time = 4
253      [(google.api.field_behavior) = OUTPUT_ONLY];
254
255  // Output only. Time when transfer run ended.
256  // Parameter ignored by server for input requests.
257  google.protobuf.Timestamp end_time = 5
258      [(google.api.field_behavior) = OUTPUT_ONLY];
259
260  // Output only. Last time the data transfer run state was updated.
261  google.protobuf.Timestamp update_time = 6
262      [(google.api.field_behavior) = OUTPUT_ONLY];
263
264  // Output only. Parameters specific to each data source. For more information
265  // see the bq tab in the 'Setting up a data transfer' section for each data
266  // source. For example the parameters for Cloud Storage transfers are listed
267  // here:
268  // https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
269  google.protobuf.Struct params = 9 [(google.api.field_behavior) = OUTPUT_ONLY];
270
271  // Data transfer destination.
272  oneof destination {
273    // Output only. The BigQuery target dataset id.
274    string destination_dataset_id = 2
275        [(google.api.field_behavior) = OUTPUT_ONLY];
276  }
277
278  // Output only. Data source id.
279  string data_source_id = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
280
281  // Data transfer run state. Ignored for input requests.
282  TransferState state = 8;
283
284  // Deprecated. Unique ID of the user on whose behalf transfer is done.
285  int64 user_id = 11;
286
287  // Output only. Describes the schedule of this transfer run if it was
288  // created as part of a regular schedule. For batch transfer runs that are
289  // scheduled manually, this is empty.
290  // NOTE: the system might choose to delay the schedule depending on the
291  // current load, so `schedule_time` doesn't always match this.
292  string schedule = 12 [(google.api.field_behavior) = OUTPUT_ONLY];
293
294  // Output only. Pub/Sub topic where a notification will be sent after this
295  // transfer run finishes.
296  //
297  // The format for specifying a pubsub topic is:
298  // `projects/{project_id}/topics/{topic_id}`
299  string notification_pubsub_topic = 23
300      [(google.api.field_behavior) = OUTPUT_ONLY];
301
302  // Output only. Email notifications will be sent according to these
303  // preferences to the email address of the user who owns the transfer config
304  // this run was derived from.
305  EmailPreferences email_preferences = 25
306      [(google.api.field_behavior) = OUTPUT_ONLY];
307}
308
309// Represents a user facing message for a particular data transfer run.
310message TransferMessage {
311  // Represents data transfer user facing message severity.
312  enum MessageSeverity {
313    // No severity specified.
314    MESSAGE_SEVERITY_UNSPECIFIED = 0;
315
316    // Informational message.
317    INFO = 1;
318
319    // Warning message.
320    WARNING = 2;
321
322    // Error message.
323    ERROR = 3;
324  }
325
326  // Time when message was logged.
327  google.protobuf.Timestamp message_time = 1;
328
329  // Message severity.
330  MessageSeverity severity = 2;
331
332  // Message text.
333  string message_text = 3;
334}
335