audit_data.proto 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634
  1. // Copyright 2020 Google LLC
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. syntax = "proto3";
  15. package google.cloud.bigquery.logging.v1;
  16. import "google/iam/v1/iam_policy.proto";
  17. import "google/iam/v1/policy.proto";
  18. import "google/protobuf/duration.proto";
  19. import "google/protobuf/timestamp.proto";
  20. import "google/rpc/status.proto";
  21. option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1;logging";
  22. option java_multiple_files = true;
  23. option java_outer_classname = "AuditDataProto";
  24. option java_package = "com.google.cloud.bigquery.logging.v1";
  25. // BigQuery request and response messages for audit log.
  26. // Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`.
  27. // `Table.schema` may continue to be present in your logs during this
  28. // transition.
  29. message AuditData {
  30. // Request data for each BigQuery method.
  31. oneof request {
  32. // Table insert request.
  33. TableInsertRequest table_insert_request = 1;
  34. // Table update request.
  35. TableUpdateRequest table_update_request = 16;
  36. // Dataset list request.
  37. DatasetListRequest dataset_list_request = 2;
  38. // Dataset insert request.
  39. DatasetInsertRequest dataset_insert_request = 3;
  40. // Dataset update request.
  41. DatasetUpdateRequest dataset_update_request = 4;
  42. // Job insert request.
  43. JobInsertRequest job_insert_request = 5;
  44. // Job query request.
  45. JobQueryRequest job_query_request = 6;
  46. // Job get query results request.
  47. JobGetQueryResultsRequest job_get_query_results_request = 7;
  48. // Table data-list request.
  49. TableDataListRequest table_data_list_request = 8;
  50. // Iam policy request.
  51. google.iam.v1.SetIamPolicyRequest set_iam_policy_request = 20;
  52. }
  53. // Response data for each BigQuery method.
  54. oneof response {
  55. // Table insert response.
  56. TableInsertResponse table_insert_response = 9;
  57. // Table update response.
  58. TableUpdateResponse table_update_response = 10;
  59. // Dataset insert response.
  60. DatasetInsertResponse dataset_insert_response = 11;
  61. // Dataset update response.
  62. DatasetUpdateResponse dataset_update_response = 12;
  63. // Job insert response.
  64. JobInsertResponse job_insert_response = 18;
  65. // Job query response.
  66. JobQueryResponse job_query_response = 13;
  67. // Job get query results response.
  68. JobGetQueryResultsResponse job_get_query_results_response = 14;
  69. // Deprecated: Job query-done response. Use this information for usage
  70. // analysis.
  71. JobQueryDoneResponse job_query_done_response = 15;
  72. // Iam Policy.
  73. google.iam.v1.Policy policy_response = 21;
  74. }
  75. // A job completion event.
  76. JobCompletedEvent job_completed_event = 17;
  77. // Information about the table access events.
  78. repeated TableDataReadEvent table_data_read_events = 19;
  79. }
  80. // Table insert request.
  81. message TableInsertRequest {
  82. // The new table.
  83. Table resource = 1;
  84. }
  85. // Table update request.
  86. message TableUpdateRequest {
  87. // The table to be updated.
  88. Table resource = 1;
  89. }
  90. // Table insert response.
  91. message TableInsertResponse {
  92. // Final state of the inserted table.
  93. Table resource = 1;
  94. }
  95. // Table update response.
  96. message TableUpdateResponse {
  97. // Final state of the updated table.
  98. Table resource = 1;
  99. }
  100. // Dataset list request.
  101. message DatasetListRequest {
  102. // Whether to list all datasets, including hidden ones.
  103. bool list_all = 1;
  104. }
  105. // Dataset insert request.
  106. message DatasetInsertRequest {
  107. // The dataset to be inserted.
  108. Dataset resource = 1;
  109. }
  110. // Dataset insert response.
  111. message DatasetInsertResponse {
  112. // Final state of the inserted dataset.
  113. Dataset resource = 1;
  114. }
  115. // Dataset update request.
  116. message DatasetUpdateRequest {
  117. // The dataset to be updated.
  118. Dataset resource = 1;
  119. }
  120. // Dataset update response.
  121. message DatasetUpdateResponse {
  122. // Final state of the updated dataset.
  123. Dataset resource = 1;
  124. }
  125. // Job insert request.
  126. message JobInsertRequest {
  127. // Job insert request.
  128. Job resource = 1;
  129. }
  130. // Job insert response.
  131. message JobInsertResponse {
  132. // Job insert response.
  133. Job resource = 1;
  134. }
  135. // Job query request.
  136. message JobQueryRequest {
  137. // The query.
  138. string query = 1;
  139. // The maximum number of results.
  140. uint32 max_results = 2;
  141. // The default dataset for tables that do not have a dataset specified.
  142. DatasetName default_dataset = 3;
  143. // Project that the query should be charged to.
  144. string project_id = 4;
  145. // If true, don't actually run the job. Just check that it would run.
  146. bool dry_run = 5;
  147. }
  148. // Job query response.
  149. message JobQueryResponse {
  150. // The total number of rows in the full query result set.
  151. uint64 total_results = 1;
  152. // Information about the queried job.
  153. Job job = 2;
  154. }
  155. // Job getQueryResults request.
  156. message JobGetQueryResultsRequest {
  157. // Maximum number of results to return.
  158. uint32 max_results = 1;
  159. // Zero-based row number at which to start.
  160. uint64 start_row = 2;
  161. }
  162. // Job getQueryResults response.
  163. message JobGetQueryResultsResponse {
  164. // Total number of results in query results.
  165. uint64 total_results = 1;
  166. // The job that was created to run the query.
  167. // It completed if `job.status.state` is `DONE`.
  168. // It failed if `job.status.errorResult` is also present.
  169. Job job = 2;
  170. }
  171. // Job getQueryDone response.
  172. message JobQueryDoneResponse {
  173. // The job and status information.
  174. // The job completed if `job.status.state` is `DONE`.
  175. Job job = 1;
  176. }
  177. // Query job completed event.
  178. message JobCompletedEvent {
  179. // Name of the event.
  180. string event_name = 1;
  181. // Job information.
  182. Job job = 2;
  183. }
  184. // Table data read event. Only present for tables, not views, and is only
  185. // included in the log record for the project that owns the table.
  186. message TableDataReadEvent {
  187. // Name of the accessed table.
  188. TableName table_name = 1;
  189. // A list of referenced fields. This information is not included by default.
  190. // To enable this in the logs, please contact BigQuery support or open a bug
  191. // in the BigQuery issue tracker.
  192. repeated string referenced_fields = 2;
  193. }
  194. // Table data-list request.
  195. message TableDataListRequest {
  196. // Starting row offset.
  197. uint64 start_row = 1;
  198. // Maximum number of results to return.
  199. uint32 max_results = 2;
  200. }
  201. // Describes a BigQuery table.
  202. // See the [Table](/bigquery/docs/reference/v2/tables) API resource
  203. // for more details on individual fields.
  204. // Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`.
  205. // `Table.schema` may continue to be present in your logs during this
  206. // transition.
  207. message Table {
  208. // The name of the table.
  209. TableName table_name = 1;
  210. // User-provided metadata for the table.
  211. TableInfo info = 2;
  212. // A JSON representation of the table's schema.
  213. string schema_json = 8;
  214. // If present, this is a virtual table defined by a SQL query.
  215. TableViewDefinition view = 4;
  216. // The expiration date for the table, after which the table
  217. // is deleted and the storage reclaimed.
  218. // If not present, the table persists indefinitely.
  219. google.protobuf.Timestamp expire_time = 5;
  220. // The time the table was created.
  221. google.protobuf.Timestamp create_time = 6;
  222. // The time the table was last truncated
  223. // by an operation with a `writeDisposition` of `WRITE_TRUNCATE`.
  224. google.protobuf.Timestamp truncate_time = 7;
  225. // The time the table was last modified.
  226. google.protobuf.Timestamp update_time = 9;
  227. // The table encryption information. Set when non-default encryption is used.
  228. EncryptionInfo encryption = 10;
  229. }
  230. // User-provided metadata for a table.
  231. message TableInfo {
  232. // A short name for the table, such as`"Analytics Data - Jan 2011"`.
  233. string friendly_name = 1;
  234. // A long description, perhaps several paragraphs,
  235. // describing the table contents in detail.
  236. string description = 2;
  237. // Labels provided for the table.
  238. map<string, string> labels = 3;
  239. }
  240. // Describes a virtual table defined by a SQL query.
  241. message TableViewDefinition {
  242. // SQL query defining the view.
  243. string query = 1;
  244. }
  245. // BigQuery dataset information.
  246. // See the [Dataset](/bigquery/docs/reference/v2/datasets) API resource
  247. // for more details on individual fields.
  248. message Dataset {
  249. // The name of the dataset.
  250. DatasetName dataset_name = 1;
  251. // User-provided metadata for the dataset.
  252. DatasetInfo info = 2;
  253. // The time the dataset was created.
  254. google.protobuf.Timestamp create_time = 4;
  255. // The time the dataset was last modified.
  256. google.protobuf.Timestamp update_time = 5;
  257. // The access control list for the dataset.
  258. BigQueryAcl acl = 6;
  259. // If this field is present, each table that does not specify an
  260. // expiration time is assigned an expiration time by adding this
  261. // duration to the table's `createTime`. If this field is empty,
  262. // there is no default table expiration time.
  263. google.protobuf.Duration default_table_expire_duration = 8;
  264. }
  265. // User-provided metadata for a dataset.
  266. message DatasetInfo {
  267. // A short name for the dataset, such as`"Analytics Data 2011"`.
  268. string friendly_name = 1;
  269. // A long description, perhaps several paragraphs,
  270. // describing the dataset contents in detail.
  271. string description = 2;
  272. // Labels provided for the dataset.
  273. map<string, string> labels = 3;
  274. }
  275. // An access control list.
  276. message BigQueryAcl {
  277. // Access control entry.
  278. message Entry {
  279. // The granted role, which can be `READER`, `WRITER`, or `OWNER`.
  280. string role = 1;
  281. // Grants access to a group identified by an email address.
  282. string group_email = 2;
  283. // Grants access to a user identified by an email address.
  284. string user_email = 3;
  285. // Grants access to all members of a domain.
  286. string domain = 4;
  287. // Grants access to special groups. Valid groups are `PROJECT_OWNERS`,
  288. // `PROJECT_READERS`, `PROJECT_WRITERS` and `ALL_AUTHENTICATED_USERS`.
  289. string special_group = 5;
  290. // Grants access to a BigQuery View.
  291. TableName view_name = 6;
  292. }
  293. // Access control entry list.
  294. repeated Entry entries = 1;
  295. }
  296. // Describes a job.
  297. message Job {
  298. // Job name.
  299. JobName job_name = 1;
  300. // Job configuration.
  301. JobConfiguration job_configuration = 2;
  302. // Job status.
  303. JobStatus job_status = 3;
  304. // Job statistics.
  305. JobStatistics job_statistics = 4;
  306. }
  307. // Job configuration information.
  308. // See the [Jobs](/bigquery/docs/reference/v2/jobs) API resource
  309. // for more details on individual fields.
  310. message JobConfiguration {
  311. // Describes a query job, which executes a SQL-like query.
  312. message Query {
  313. // The SQL query to run.
  314. string query = 1;
  315. // The table where results are written.
  316. TableName destination_table = 2;
  317. // Describes when a job is allowed to create a table:
  318. // `CREATE_IF_NEEDED`, `CREATE_NEVER`.
  319. string create_disposition = 3;
  320. // Describes how writes affect existing tables:
  321. // `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
  322. string write_disposition = 4;
  323. // If a table name is specified without a dataset in a query,
  324. // this dataset will be added to table name.
  325. DatasetName default_dataset = 5;
  326. // Describes data sources outside BigQuery, if needed.
  327. repeated TableDefinition table_definitions = 6;
  328. // Describes the priority given to the query:
  329. // `QUERY_INTERACTIVE` or `QUERY_BATCH`.
  330. string query_priority = 7;
  331. // Result table encryption information. Set when non-default encryption is
  332. // used.
  333. EncryptionInfo destination_table_encryption = 8;
  334. // Type of the statement (e.g. SELECT, INSERT, CREATE_TABLE, CREATE_MODEL..)
  335. string statement_type = 9;
  336. }
  337. // Describes a load job, which loads data from an external source via
  338. // the import pipeline.
  339. message Load {
  340. // URIs for the data to be imported. Only Google Cloud Storage URIs are
  341. // supported.
  342. repeated string source_uris = 1;
  343. // The table schema in JSON format representation of a TableSchema.
  344. string schema_json = 6;
  345. // The table where the imported data is written.
  346. TableName destination_table = 3;
  347. // Describes when a job is allowed to create a table:
  348. // `CREATE_IF_NEEDED`, `CREATE_NEVER`.
  349. string create_disposition = 4;
  350. // Describes how writes affect existing tables:
  351. // `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
  352. string write_disposition = 5;
  353. // Result table encryption information. Set when non-default encryption is
  354. // used.
  355. EncryptionInfo destination_table_encryption = 7;
  356. }
  357. // Describes an extract job, which exports data to an external source
  358. // via the export pipeline.
  359. message Extract {
  360. // Google Cloud Storage URIs where extracted data should be written.
  361. repeated string destination_uris = 1;
  362. // The source table.
  363. TableName source_table = 2;
  364. }
  365. // Describes a copy job, which copies an existing table to another table.
  366. message TableCopy {
  367. // Source tables.
  368. repeated TableName source_tables = 1;
  369. // Destination table.
  370. TableName destination_table = 2;
  371. // Describes when a job is allowed to create a table:
  372. // `CREATE_IF_NEEDED`, `CREATE_NEVER`.
  373. string create_disposition = 3;
  374. // Describes how writes affect existing tables:
  375. // `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
  376. string write_disposition = 4;
  377. // Result table encryption information. Set when non-default encryption is
  378. // used.
  379. EncryptionInfo destination_table_encryption = 5;
  380. }
  381. // Job configuration information.
  382. oneof configuration {
  383. // Query job information.
  384. Query query = 5;
  385. // Load job information.
  386. Load load = 6;
  387. // Extract job information.
  388. Extract extract = 7;
  389. // TableCopy job information.
  390. TableCopy table_copy = 8;
  391. }
  392. // If true, don't actually run the job. Just check that it would run.
  393. bool dry_run = 9;
  394. // Labels provided for the job.
  395. map<string, string> labels = 3;
  396. }
  397. // Describes an external data source used in a query.
  398. message TableDefinition {
  399. // Name of the table, used in queries.
  400. string name = 1;
  401. // Google Cloud Storage URIs for the data to be imported.
  402. repeated string source_uris = 2;
  403. }
  404. // Running state of a job.
  405. message JobStatus {
  406. // State of a job: `PENDING`, `RUNNING`, or `DONE`.
  407. string state = 1;
  408. // If the job did not complete successfully, this field describes why.
  409. google.rpc.Status error = 2;
  410. // Errors encountered during the running of the job. Do not necessarily mean
  411. // that the job has completed or was unsuccessful.
  412. repeated google.rpc.Status additional_errors = 3;
  413. }
  414. // Job statistics that may change after a job starts.
  415. message JobStatistics {
  416. // Job resource usage breakdown by reservation.
  417. message ReservationResourceUsage {
  418. // Reservation name or "unreserved" for on-demand resources usage.
  419. string name = 1;
  420. // Total slot milliseconds used by the reservation for a particular job.
  421. int64 slot_ms = 2;
  422. }
  423. // Time when the job was created.
  424. google.protobuf.Timestamp create_time = 1;
  425. // Time when the job started.
  426. google.protobuf.Timestamp start_time = 2;
  427. // Time when the job ended.
  428. google.protobuf.Timestamp end_time = 3;
  429. // Total bytes processed for a job.
  430. int64 total_processed_bytes = 4;
  431. // Processed bytes, adjusted by the job's CPU usage.
  432. int64 total_billed_bytes = 5;
  433. // The tier assigned by CPU-based billing.
  434. int32 billing_tier = 7;
  435. // The total number of slot-ms consumed by the query job.
  436. int64 total_slot_ms = 8;
  437. // Reservation usage.
  438. repeated ReservationResourceUsage reservation_usage = 14;
  439. // The first N tables accessed by the query job. Older queries that
  440. // reference a large number of tables may not have all of their
  441. // tables in this list. You can use the total_tables_processed count to
  442. // know how many total tables were read in the query. For new queries,
  443. // there is currently no limit.
  444. repeated TableName referenced_tables = 9;
  445. // Total number of unique tables referenced in the query.
  446. int32 total_tables_processed = 10;
  447. // The first N views accessed by the query job. Older queries that
  448. // reference a large number of views may not have all of their
  449. // views in this list. You can use the total_tables_processed count to
  450. // know how many total tables were read in the query. For new queries,
  451. // there is currently no limit.
  452. repeated TableName referenced_views = 11;
  453. // Total number of unique views referenced in the query.
  454. int32 total_views_processed = 12;
  455. // Number of output rows produced by the query job.
  456. int64 query_output_row_count = 15;
  457. // Total bytes loaded for an import job.
  458. int64 total_load_output_bytes = 13;
  459. }
  460. // The fully-qualified name for a dataset.
  461. message DatasetName {
  462. // The project ID.
  463. string project_id = 1;
  464. // The dataset ID within the project.
  465. string dataset_id = 2;
  466. }
  467. // The fully-qualified name for a table.
  468. message TableName {
  469. // The project ID.
  470. string project_id = 1;
  471. // The dataset ID within the project.
  472. string dataset_id = 2;
  473. // The table ID of the table within the dataset.
  474. string table_id = 3;
  475. }
  476. // The fully-qualified name for a job.
  477. message JobName {
  478. // The project ID.
  479. string project_id = 1;
  480. // The job ID within the project.
  481. string job_id = 2;
  482. // The job location.
  483. string location = 3;
  484. }
  485. // Describes encryption properties for a table or a job
  486. message EncryptionInfo {
  487. // unique identifier for cloud kms key
  488. string kms_key_name = 1;
  489. }