transfer.proto 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311
  1. // Copyright 2022 Google LLC
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. syntax = "proto3";
  15. package google.cloud.bigquery.datatransfer.v1;
  16. import "google/api/field_behavior.proto";
  17. import "google/api/resource.proto";
  18. import "google/protobuf/struct.proto";
  19. import "google/protobuf/timestamp.proto";
  20. import "google/rpc/status.proto";
  21. option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1";
  22. option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer";
  23. option java_multiple_files = true;
  24. option java_outer_classname = "TransferProto";
  25. option java_package = "com.google.cloud.bigquery.datatransfer.v1";
  26. option objc_class_prefix = "GCBDT";
  27. option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";
  28. option ruby_package = "Google::Cloud::Bigquery::DataTransfer::V1";
  29. // DEPRECATED. Represents data transfer type.
  30. enum TransferType {
  31. option deprecated = true;
  32. // Invalid or Unknown transfer type placeholder.
  33. TRANSFER_TYPE_UNSPECIFIED = 0;
  34. // Batch data transfer.
  35. BATCH = 1;
  36. // Streaming data transfer. Streaming data source currently doesn't
  37. // support multiple transfer configs per project.
  38. STREAMING = 2;
  39. }
  40. // Represents data transfer run state.
  41. enum TransferState {
  42. // State placeholder (0).
  43. TRANSFER_STATE_UNSPECIFIED = 0;
  44. // Data transfer is scheduled and is waiting to be picked up by
  45. // data transfer backend (2).
  46. PENDING = 2;
  47. // Data transfer is in progress (3).
  48. RUNNING = 3;
  49. // Data transfer completed successfully (4).
  50. SUCCEEDED = 4;
  51. // Data transfer failed (5).
  52. FAILED = 5;
  53. // Data transfer is cancelled (6).
  54. CANCELLED = 6;
  55. }
  56. // Represents preferences for sending email notifications for transfer run
  57. // events.
  58. message EmailPreferences {
  59. // If true, email notifications will be sent on transfer run failures.
  60. bool enable_failure_email = 1;
  61. }
  62. // Options customizing the data transfer schedule.
  63. message ScheduleOptions {
  64. // If true, automatic scheduling of data transfer runs for this configuration
  65. // will be disabled. The runs can be started on ad-hoc basis using
  66. // StartManualTransferRuns API. When automatic scheduling is disabled, the
  67. // TransferConfig.schedule field will be ignored.
  68. bool disable_auto_scheduling = 3;
  69. // Specifies time to start scheduling transfer runs. The first run will be
  70. // scheduled at or after the start time according to a recurrence pattern
  71. // defined in the schedule string. The start time can be changed at any
  72. // moment. The time when a data transfer can be trigerred manually is not
  73. // limited by this option.
  74. google.protobuf.Timestamp start_time = 1;
  75. // Defines time to stop scheduling transfer runs. A transfer run cannot be
  76. // scheduled at or after the end time. The end time can be changed at any
  77. // moment. The time when a data transfer can be trigerred manually is not
  78. // limited by this option.
  79. google.protobuf.Timestamp end_time = 2;
  80. }
  81. // Information about a user.
  82. message UserInfo {
  83. // E-mail address of the user.
  84. optional string email = 1;
  85. }
  86. // Represents a data transfer configuration. A transfer configuration
  87. // contains all metadata needed to perform a data transfer. For example,
  88. // `destination_dataset_id` specifies where data should be stored.
  89. // When a new transfer configuration is created, the specified
  90. // `destination_dataset_id` is created when needed and shared with the
  91. // appropriate data source service account.
  92. message TransferConfig {
  93. option (google.api.resource) = {
  94. type: "bigquerydatatransfer.googleapis.com/TransferConfig"
  95. pattern: "projects/{project}/transferConfigs/{transfer_config}"
  96. pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}"
  97. };
  98. // The resource name of the transfer config.
  99. // Transfer config names have the form
  100. // `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
  101. // Where `config_id` is usually a uuid, even though it is not
  102. // guaranteed or required. The name is ignored when creating a transfer
  103. // config.
  104. string name = 1;
  105. // The desination of the transfer config.
  106. oneof destination {
  107. // The BigQuery target dataset id.
  108. string destination_dataset_id = 2;
  109. }
  110. // User specified display name for the data transfer.
  111. string display_name = 3;
  112. // Data source ID. This cannot be changed once data transfer is created. The
  113. // full list of available data source IDs can be returned through an API call:
  114. // https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list
  115. string data_source_id = 5;
  116. // Parameters specific to each data source. For more information see the
  117. // bq tab in the 'Setting up a data transfer' section for each data source.
  118. // For example the parameters for Cloud Storage transfers are listed here:
  119. // https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
  120. google.protobuf.Struct params = 9;
  121. // Data transfer schedule.
  122. // If the data source does not support a custom schedule, this should be
  123. // empty. If it is empty, the default value for the data source will be
  124. // used.
  125. // The specified times are in UTC.
  126. // Examples of valid format:
  127. // `1st,3rd monday of month 15:30`,
  128. // `every wed,fri of jan,jun 13:15`, and
  129. // `first sunday of quarter 00:00`.
  130. // See more explanation about the format here:
  131. // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
  132. //
  133. // NOTE: The minimum interval time between recurring transfers depends on the
  134. // data source; refer to the documentation for your data source.
  135. string schedule = 7;
  136. // Options customizing the data transfer schedule.
  137. ScheduleOptions schedule_options = 24;
  138. // The number of days to look back to automatically refresh the data.
  139. // For example, if `data_refresh_window_days = 10`, then every day
  140. // BigQuery reingests data for [today-10, today-1], rather than ingesting data
  141. // for just [today-1].
  142. // Only valid if the data source supports the feature. Set the value to 0
  143. // to use the default value.
  144. int32 data_refresh_window_days = 12;
  145. // Is this config disabled. When set to true, no runs are scheduled
  146. // for a given transfer.
  147. bool disabled = 13;
  148. // Output only. Data transfer modification time. Ignored by server on input.
  149. google.protobuf.Timestamp update_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
  150. // Output only. Next time when data transfer will run.
  151. google.protobuf.Timestamp next_run_time = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
  152. // Output only. State of the most recently updated transfer run.
  153. TransferState state = 10 [(google.api.field_behavior) = OUTPUT_ONLY];
  154. // Deprecated. Unique ID of the user on whose behalf transfer is done.
  155. int64 user_id = 11;
  156. // Output only. Region in which BigQuery dataset is located.
  157. string dataset_region = 14 [(google.api.field_behavior) = OUTPUT_ONLY];
  158. // Pub/Sub topic where notifications will be sent after transfer runs
  159. // associated with this transfer config finish.
  160. //
  161. // The format for specifying a pubsub topic is:
  162. // `projects/{project}/topics/{topic}`
  163. string notification_pubsub_topic = 15;
  164. // Email notifications will be sent according to these preferences
  165. // to the email address of the user who owns this transfer config.
  166. EmailPreferences email_preferences = 18;
  167. // Output only. Information about the user whose credentials are used to transfer data.
  168. // Populated only for `transferConfigs.get` requests. In case the user
  169. // information is not available, this field will not be populated.
  170. optional UserInfo owner_info = 27 [(google.api.field_behavior) = OUTPUT_ONLY];
  171. }
  172. // Represents a data transfer run.
  173. message TransferRun {
  174. option (google.api.resource) = {
  175. type: "bigquerydatatransfer.googleapis.com/Run"
  176. pattern: "projects/{project}/transferConfigs/{transfer_config}/runs/{run}"
  177. pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}"
  178. };
  179. // The resource name of the transfer run.
  180. // Transfer run names have the form
  181. // `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
  182. // The name is ignored when creating a transfer run.
  183. string name = 1;
  184. // Minimum time after which a transfer run can be started.
  185. google.protobuf.Timestamp schedule_time = 3;
  186. // For batch transfer runs, specifies the date and time of the data should be
  187. // ingested.
  188. google.protobuf.Timestamp run_time = 10;
  189. // Status of the transfer run.
  190. google.rpc.Status error_status = 21;
  191. // Output only. Time when transfer run was started.
  192. // Parameter ignored by server for input requests.
  193. google.protobuf.Timestamp start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
  194. // Output only. Time when transfer run ended.
  195. // Parameter ignored by server for input requests.
  196. google.protobuf.Timestamp end_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY];
  197. // Output only. Last time the data transfer run state was updated.
  198. google.protobuf.Timestamp update_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY];
  199. // Output only. Parameters specific to each data source. For more information see the
  200. // bq tab in the 'Setting up a data transfer' section for each data source.
  201. // For example the parameters for Cloud Storage transfers are listed here:
  202. // https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
  203. google.protobuf.Struct params = 9 [(google.api.field_behavior) = OUTPUT_ONLY];
  204. // Data transfer destination.
  205. oneof destination {
  206. // Output only. The BigQuery target dataset id.
  207. string destination_dataset_id = 2 [(google.api.field_behavior) = OUTPUT_ONLY];
  208. }
  209. // Output only. Data source id.
  210. string data_source_id = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
  211. // Data transfer run state. Ignored for input requests.
  212. TransferState state = 8;
  213. // Deprecated. Unique ID of the user on whose behalf transfer is done.
  214. int64 user_id = 11;
  215. // Output only. Describes the schedule of this transfer run if it was
  216. // created as part of a regular schedule. For batch transfer runs that are
  217. // scheduled manually, this is empty.
  218. // NOTE: the system might choose to delay the schedule depending on the
  219. // current load, so `schedule_time` doesn't always match this.
  220. string schedule = 12 [(google.api.field_behavior) = OUTPUT_ONLY];
  221. // Output only. Pub/Sub topic where a notification will be sent after this
  222. // transfer run finishes.
  223. //
  224. // The format for specifying a pubsub topic is:
  225. // `projects/{project}/topics/{topic}`
  226. string notification_pubsub_topic = 23 [(google.api.field_behavior) = OUTPUT_ONLY];
  227. // Output only. Email notifications will be sent according to these
  228. // preferences to the email address of the user who owns the transfer config
  229. // this run was derived from.
  230. EmailPreferences email_preferences = 25 [(google.api.field_behavior) = OUTPUT_ONLY];
  231. }
  232. // Represents a user facing message for a particular data transfer run.
  233. message TransferMessage {
  234. // Represents data transfer user facing message severity.
  235. enum MessageSeverity {
  236. // No severity specified.
  237. MESSAGE_SEVERITY_UNSPECIFIED = 0;
  238. // Informational message.
  239. INFO = 1;
  240. // Warning message.
  241. WARNING = 2;
  242. // Error message.
  243. ERROR = 3;
  244. }
  245. // Time when message was logged.
  246. google.protobuf.Timestamp message_time = 1;
  247. // Message severity.
  248. MessageSeverity severity = 2;
  249. // Message text.
  250. string message_text = 3;
  251. }