xref: /aosp_15_r20/external/googleapis/google/cloud/bigquery/logging/v1/audit_data.proto (revision d5c09012810ac0c9f33fe448fb6da8260d444cc9)
1// Copyright 2020 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15syntax = "proto3";
16
17package google.cloud.bigquery.logging.v1;
18
19import "google/iam/v1/iam_policy.proto";
20import "google/iam/v1/policy.proto";
21import "google/protobuf/duration.proto";
22import "google/protobuf/timestamp.proto";
23import "google/rpc/status.proto";
24
25option csharp_namespace = "Google.Cloud.BigQuery.Logging.V1";
26option go_package = "cloud.google.com/go/bigquery/logging/apiv1/loggingpb;loggingpb";
27option java_multiple_files = true;
28option java_outer_classname = "AuditDataProto";
29option java_package = "com.google.cloud.bigquery.logging.v1";
30
31// BigQuery request and response messages for audit log.
32// Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`.
33// `Table.schema` may continue to be present in your logs during this
34// transition.
35message AuditData {
36  // Request data for each BigQuery method.
37  oneof request {
38    // Table insert request.
39    TableInsertRequest table_insert_request = 1;
40
41    // Table update request.
42    TableUpdateRequest table_update_request = 16;
43
44    // Dataset list request.
45    DatasetListRequest dataset_list_request = 2;
46
47    // Dataset insert request.
48    DatasetInsertRequest dataset_insert_request = 3;
49
50    // Dataset update request.
51    DatasetUpdateRequest dataset_update_request = 4;
52
53    // Job insert request.
54    JobInsertRequest job_insert_request = 5;
55
56    // Job query request.
57    JobQueryRequest job_query_request = 6;
58
59    // Job get query results request.
60    JobGetQueryResultsRequest job_get_query_results_request = 7;
61
62    // Table data-list request.
63    TableDataListRequest table_data_list_request = 8;
64
65    // Iam policy request.
66    google.iam.v1.SetIamPolicyRequest set_iam_policy_request = 20;
67  }
68
69  // Response data for each BigQuery method.
70  oneof response {
71    // Table insert response.
72    TableInsertResponse table_insert_response = 9;
73
74    // Table update response.
75    TableUpdateResponse table_update_response = 10;
76
77    // Dataset insert response.
78    DatasetInsertResponse dataset_insert_response = 11;
79
80    // Dataset update response.
81    DatasetUpdateResponse dataset_update_response = 12;
82
83    // Job insert response.
84    JobInsertResponse job_insert_response = 18;
85
86    // Job query response.
87    JobQueryResponse job_query_response = 13;
88
89    // Job get query results response.
90    JobGetQueryResultsResponse job_get_query_results_response = 14;
91
92    // Deprecated: Job query-done response. Use this information for usage
93    // analysis.
94    JobQueryDoneResponse job_query_done_response = 15;
95
96    // Iam Policy.
97    google.iam.v1.Policy policy_response = 21;
98  }
99
100  // A job completion event.
101  JobCompletedEvent job_completed_event = 17;
102
103  // Information about the table access events.
104  repeated TableDataReadEvent table_data_read_events = 19;
105}
106
107// Table insert request.
108message TableInsertRequest {
109  // The new table.
110  Table resource = 1;
111}
112
113// Table update request.
114message TableUpdateRequest {
115  // The table to be updated.
116  Table resource = 1;
117}
118
119// Table insert response.
120message TableInsertResponse {
121  // Final state of the inserted table.
122  Table resource = 1;
123}
124
125// Table update response.
126message TableUpdateResponse {
127  // Final state of the updated table.
128  Table resource = 1;
129}
130
131// Dataset list request.
132message DatasetListRequest {
133  // Whether to list all datasets, including hidden ones.
134  bool list_all = 1;
135}
136
137// Dataset insert request.
138message DatasetInsertRequest {
139  // The dataset to be inserted.
140  Dataset resource = 1;
141}
142
143// Dataset insert response.
144message DatasetInsertResponse {
145  // Final state of the inserted dataset.
146  Dataset resource = 1;
147}
148
149// Dataset update request.
150message DatasetUpdateRequest {
151  // The dataset to be updated.
152  Dataset resource = 1;
153}
154
155// Dataset update response.
156message DatasetUpdateResponse {
157  // Final state of the updated dataset.
158  Dataset resource = 1;
159}
160
161// Job insert request.
162message JobInsertRequest {
163  // Job insert request.
164  Job resource = 1;
165}
166
167// Job insert response.
168message JobInsertResponse {
169  // Job insert response.
170  Job resource = 1;
171}
172
173// Job query request.
174message JobQueryRequest {
175  // The query.
176  string query = 1;
177
178  // The maximum number of results.
179  uint32 max_results = 2;
180
181  // The default dataset for tables that do not have a dataset specified.
182  DatasetName default_dataset = 3;
183
184  // Project that the query should be charged to.
185  string project_id = 4;
186
187  // If true, don't actually run the job. Just check that it would run.
188  bool dry_run = 5;
189}
190
191// Job query response.
192message JobQueryResponse {
193  // The total number of rows in the full query result set.
194  uint64 total_results = 1;
195
196  // Information about the queried job.
197  Job job = 2;
198}
199
200// Job getQueryResults request.
201message JobGetQueryResultsRequest {
202  // Maximum number of results to return.
203  uint32 max_results = 1;
204
205  // Zero-based row number at which to start.
206  uint64 start_row = 2;
207}
208
209// Job getQueryResults response.
210message JobGetQueryResultsResponse {
211  // Total number of results in query results.
212  uint64 total_results = 1;
213
214  // The job that was created to run the query.
215  // It completed if `job.status.state` is `DONE`.
216  // It failed if `job.status.errorResult` is also present.
217  Job job = 2;
218}
219
220// Job getQueryDone response.
221message JobQueryDoneResponse {
222  // The job and status information.
223  // The job completed if `job.status.state` is `DONE`.
224  Job job = 1;
225}
226
227// Query job completed event.
228message JobCompletedEvent {
229  // Name of the event.
230  string event_name = 1;
231
232  // Job information.
233  Job job = 2;
234}
235
236// Table data read event. Only present for tables, not views, and is only
237// included in the log record for the project that owns the table.
238message TableDataReadEvent {
239  // Name of the accessed table.
240  TableName table_name = 1;
241
242  // A list of referenced fields. This information is not included by default.
243  // To enable this in the logs, please contact BigQuery support or open a bug
244  // in the BigQuery issue tracker.
245  repeated string referenced_fields = 2;
246}
247
248// Table data-list request.
249message TableDataListRequest {
250  // Starting row offset.
251  uint64 start_row = 1;
252
253  // Maximum number of results to return.
254  uint32 max_results = 2;
255}
256
257// Describes a BigQuery table.
258// See the [Table](/bigquery/docs/reference/v2/tables) API resource
259// for more details on individual fields.
260// Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`.
261// `Table.schema` may continue to be present in your logs during this
262// transition.
263message Table {
264  // The name of the table.
265  TableName table_name = 1;
266
267  // User-provided metadata for the table.
268  TableInfo info = 2;
269
270  // A JSON representation of the table's schema.
271  string schema_json = 8;
272
273  // If present, this is a virtual table defined by a SQL query.
274  TableViewDefinition view = 4;
275
276  // The expiration date for the table, after which the table
277  // is deleted and the storage reclaimed.
278  // If not present, the table persists indefinitely.
279  google.protobuf.Timestamp expire_time = 5;
280
281  // The time the table was created.
282  google.protobuf.Timestamp create_time = 6;
283
284  // The time the table was last truncated
285  // by an operation with a `writeDisposition` of `WRITE_TRUNCATE`.
286  google.protobuf.Timestamp truncate_time = 7;
287
288  // The time the table was last modified.
289  google.protobuf.Timestamp update_time = 9;
290
291  // The table encryption information. Set when non-default encryption is used.
292  EncryptionInfo encryption = 10;
293}
294
295// User-provided metadata for a table.
296message TableInfo {
297  // A short name for the table, such as`"Analytics Data - Jan 2011"`.
298  string friendly_name = 1;
299
300  // A long description, perhaps several paragraphs,
301  // describing the table contents in detail.
302  string description = 2;
303
304  // Labels provided for the table.
305  map<string, string> labels = 3;
306}
307
308// Describes a virtual table defined by a SQL query.
309message TableViewDefinition {
310  // SQL query defining the view.
311  string query = 1;
312}
313
314// BigQuery dataset information.
315// See the [Dataset](/bigquery/docs/reference/v2/datasets) API resource
316// for more details on individual fields.
317message Dataset {
318  // The name of the dataset.
319  DatasetName dataset_name = 1;
320
321  // User-provided metadata for the dataset.
322  DatasetInfo info = 2;
323
324  // The time the dataset was created.
325  google.protobuf.Timestamp create_time = 4;
326
327  // The time the dataset was last modified.
328  google.protobuf.Timestamp update_time = 5;
329
330  // The access control list for the dataset.
331  BigQueryAcl acl = 6;
332
333  // If this field is present, each table that does not specify an
334  // expiration time is assigned an expiration time by adding this
335  // duration to the table's `createTime`.  If this field is empty,
336  // there is no default table expiration time.
337  google.protobuf.Duration default_table_expire_duration = 8;
338}
339
340// User-provided metadata for a dataset.
341message DatasetInfo {
342  // A short name for the dataset, such as`"Analytics Data 2011"`.
343  string friendly_name = 1;
344
345  // A long description, perhaps several paragraphs,
346  // describing the dataset contents in detail.
347  string description = 2;
348
349  // Labels provided for the dataset.
350  map<string, string> labels = 3;
351}
352
353// An access control list.
354message BigQueryAcl {
355  // Access control entry.
356  message Entry {
357    // The granted role, which can be `READER`, `WRITER`, or `OWNER`.
358    string role = 1;
359
360    // Grants access to a group identified by an email address.
361    string group_email = 2;
362
363    // Grants access to a user identified by an email address.
364    string user_email = 3;
365
366    // Grants access to all members of a domain.
367    string domain = 4;
368
369    // Grants access to special groups. Valid groups are `PROJECT_OWNERS`,
370    // `PROJECT_READERS`, `PROJECT_WRITERS` and `ALL_AUTHENTICATED_USERS`.
371    string special_group = 5;
372
373    // Grants access to a BigQuery View.
374    TableName view_name = 6;
375  }
376
377  // Access control entry list.
378  repeated Entry entries = 1;
379}
380
381// Describes a job.
382message Job {
383  // Job name.
384  JobName job_name = 1;
385
386  // Job configuration.
387  JobConfiguration job_configuration = 2;
388
389  // Job status.
390  JobStatus job_status = 3;
391
392  // Job statistics.
393  JobStatistics job_statistics = 4;
394}
395
396// Job configuration information.
397// See the [Jobs](/bigquery/docs/reference/v2/jobs) API resource
398// for more details on individual fields.
399message JobConfiguration {
400  // Describes a query job, which executes a SQL-like query.
401  message Query {
402    // The SQL query to run.
403    string query = 1;
404
405    // The table where results are written.
406    TableName destination_table = 2;
407
408    // Describes when a job is allowed to create a table:
409    // `CREATE_IF_NEEDED`, `CREATE_NEVER`.
410    string create_disposition = 3;
411
412    // Describes how writes affect existing tables:
413    // `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
414    string write_disposition = 4;
415
416    // If a table name is specified without a dataset in a query,
417    // this dataset will be added to table name.
418    DatasetName default_dataset = 5;
419
420    // Describes data sources outside BigQuery, if needed.
421    repeated TableDefinition table_definitions = 6;
422
423    // Describes the priority given to the query:
424    // `QUERY_INTERACTIVE` or `QUERY_BATCH`.
425    string query_priority = 7;
426
427    // Result table encryption information. Set when non-default encryption is
428    // used.
429    EncryptionInfo destination_table_encryption = 8;
430
431    // Type of the statement (e.g. SELECT, INSERT, CREATE_TABLE, CREATE_MODEL..)
432    string statement_type = 9;
433  }
434
435  // Describes a load job, which loads data from an external source via
436  // the  import pipeline.
437  message Load {
438    // URIs for the data to be imported. Only Google Cloud Storage URIs are
439    // supported.
440    repeated string source_uris = 1;
441
442    // The table schema in JSON format representation of a TableSchema.
443    string schema_json = 6;
444
445    // The table where the imported data is written.
446    TableName destination_table = 3;
447
448    // Describes when a job is allowed to create a table:
449    // `CREATE_IF_NEEDED`, `CREATE_NEVER`.
450    string create_disposition = 4;
451
452    // Describes how writes affect existing tables:
453    // `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
454    string write_disposition = 5;
455
456    // Result table encryption information. Set when non-default encryption is
457    // used.
458    EncryptionInfo destination_table_encryption = 7;
459  }
460
461  // Describes an extract job, which exports data to an external source
462  // via the  export pipeline.
463  message Extract {
464    // Google Cloud Storage URIs where extracted data should be written.
465    repeated string destination_uris = 1;
466
467    // The source table.
468    TableName source_table = 2;
469  }
470
471  // Describes a copy job, which copies an existing table to another table.
472  message TableCopy {
473    // Source tables.
474    repeated TableName source_tables = 1;
475
476    // Destination table.
477    TableName destination_table = 2;
478
479    // Describes when a job is allowed to create a table:
480    // `CREATE_IF_NEEDED`, `CREATE_NEVER`.
481    string create_disposition = 3;
482
483    // Describes how writes affect existing tables:
484    // `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
485    string write_disposition = 4;
486
487    // Result table encryption information. Set when non-default encryption is
488    // used.
489    EncryptionInfo destination_table_encryption = 5;
490  }
491
492  // Job configuration information.
493  oneof configuration {
494    // Query job information.
495    Query query = 5;
496
497    // Load job information.
498    Load load = 6;
499
500    // Extract job information.
501    Extract extract = 7;
502
503    // TableCopy job information.
504    TableCopy table_copy = 8;
505  }
506
507  // If true, don't actually run the job. Just check that it would run.
508  bool dry_run = 9;
509
510  // Labels provided for the job.
511  map<string, string> labels = 3;
512}
513
514// Describes an external data source used in a query.
515message TableDefinition {
516  // Name of the table, used in queries.
517  string name = 1;
518
519  // Google Cloud Storage URIs for the data to be imported.
520  repeated string source_uris = 2;
521}
522
523// Running state of a job.
524message JobStatus {
525  // State of a job: `PENDING`, `RUNNING`, or `DONE`.
526  string state = 1;
527
528  // If the job did not complete successfully, this field describes why.
529  google.rpc.Status error = 2;
530
531  // Errors encountered during the running of the job. Do not necessarily mean
532  // that the job has completed or was unsuccessful.
533  repeated google.rpc.Status additional_errors = 3;
534}
535
536// Job statistics that may change after a job starts.
537message JobStatistics {
538  // Job resource usage breakdown by reservation.
539  message ReservationResourceUsage {
540    // Reservation name or "unreserved" for on-demand resources usage.
541    string name = 1;
542
543    // Total slot milliseconds used by the reservation for a particular job.
544    int64 slot_ms = 2;
545  }
546
547  // Time when the job was created.
548  google.protobuf.Timestamp create_time = 1;
549
550  // Time when the job started.
551  google.protobuf.Timestamp start_time = 2;
552
553  // Time when the job ended.
554  google.protobuf.Timestamp end_time = 3;
555
556  // Total bytes processed for a job.
557  int64 total_processed_bytes = 4;
558
559  // Processed bytes, adjusted by the job's CPU usage.
560  int64 total_billed_bytes = 5;
561
562  // The tier assigned by CPU-based billing.
563  int32 billing_tier = 7;
564
565  // The total number of slot-ms consumed by the query job.
566  int64 total_slot_ms = 8;
567
568  // Reservation usage. This field reported misleading information and will
569  // no longer be populated. Aggregate usage of all jobs submitted to a
570  // reservation should provide a more reliable indicator of reservation
571  // imbalance.
572  repeated ReservationResourceUsage reservation_usage = 14 [deprecated = true];
573
574  // Reservation name or "unreserved" for on-demand resource usage.
575  string reservation = 16;
576
577  // The first N tables accessed by the query job. Older queries that
578  // reference a large number of tables may not have all of their
579  // tables in this list. You can use the total_tables_processed count to
580  // know how many total tables were read in the query. For new queries,
581  // there is currently no limit.
582  repeated TableName referenced_tables = 9;
583
584  // Total number of unique tables referenced in the query.
585  int32 total_tables_processed = 10;
586
587  // The first N views accessed by the query job. Older queries that
588  // reference a large number of views may not have all of their
589  // views in this list. You can use the total_tables_processed count to
590  // know how many total tables were read in the query. For new queries,
591  // there is currently no limit.
592  repeated TableName referenced_views = 11;
593
594  // Total number of unique views referenced in the query.
595  int32 total_views_processed = 12;
596
597  // Number of output rows produced by the query job.
598  int64 query_output_row_count = 15;
599
600  // Total bytes loaded for an import job.
601  int64 total_load_output_bytes = 13;
602}
603
604// The fully-qualified name for a dataset.
605message DatasetName {
606  // The project ID.
607  string project_id = 1;
608
609  // The dataset ID within the project.
610  string dataset_id = 2;
611}
612
613// The fully-qualified name for a table.
614message TableName {
615  // The project ID.
616  string project_id = 1;
617
618  // The dataset ID within the project.
619  string dataset_id = 2;
620
621  // The table ID of the table within the dataset.
622  string table_id = 3;
623}
624
625// The fully-qualified name for a job.
626message JobName {
627  // The project ID.
628  string project_id = 1;
629
630  // The job ID within the project.
631  string job_id = 2;
632
633  // The job location.
634  string location = 3;
635}
636
637// Describes encryption properties for a table or a job
638message EncryptionInfo {
639  // unique identifier for cloud kms key
640  string kms_key_name = 1;
641}
642