123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926 |
- // Copyright 2022 Google LLC
- //
- // Licensed under the Apache License, Version 2.0 (the "License");
- // you may not use this file except in compliance with the License.
- // You may obtain a copy of the License at
- //
- // http://www.apache.org/licenses/LICENSE-2.0
- //
- // Unless required by applicable law or agreed to in writing, software
- // distributed under the License is distributed on an "AS IS" BASIS,
- // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- // See the License for the specific language governing permissions and
- // limitations under the License.
- syntax = "proto3";
- package google.dataflow.v1beta3;
- import "google/api/annotations.proto";
- import "google/api/client.proto";
- import "google/dataflow/v1beta3/environment.proto";
- import "google/dataflow/v1beta3/snapshots.proto";
- import "google/protobuf/duration.proto";
- import "google/protobuf/struct.proto";
- import "google/protobuf/timestamp.proto";
- option csharp_namespace = "Google.Cloud.Dataflow.V1Beta3";
- option go_package = "google.golang.org/genproto/googleapis/dataflow/v1beta3;dataflow";
- option java_multiple_files = true;
- option java_outer_classname = "JobsProto";
- option java_package = "com.google.dataflow.v1beta3";
- option php_namespace = "Google\\Cloud\\Dataflow\\V1beta3";
- option ruby_package = "Google::Cloud::Dataflow::V1beta3";
- // Provides a method to create and modify Google Cloud Dataflow jobs.
- // A Job is a multi-stage computation graph run by the Cloud Dataflow service.
- service JobsV1Beta3 {
- option (google.api.default_host) = "dataflow.googleapis.com";
- option (google.api.oauth_scopes) =
- "https://www.googleapis.com/auth/cloud-platform,"
- "https://www.googleapis.com/auth/compute,"
- "https://www.googleapis.com/auth/compute.readonly,"
- "https://www.googleapis.com/auth/userinfo.email";
- // Creates a Cloud Dataflow job.
- //
- // To create a job, we recommend using `projects.locations.jobs.create` with a
- // [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using
- // `projects.jobs.create` is not recommended, as your job will always start
- // in `us-central1`.
- rpc CreateJob(CreateJobRequest) returns (Job) {
- option (google.api.http) = {
- post: "/v1b3/projects/{project_id}/locations/{location}/jobs"
- body: "job"
- additional_bindings {
- post: "/v1b3/projects/{project_id}/jobs"
- body: "job"
- }
- };
- }
- // Gets the state of the specified Cloud Dataflow job.
- //
- // To get the state of a job, we recommend using `projects.locations.jobs.get`
- // with a [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using
- // `projects.jobs.get` is not recommended, as you can only get the state of
- // jobs that are running in `us-central1`.
- rpc GetJob(GetJobRequest) returns (Job) {
- option (google.api.http) = {
- get: "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}"
- additional_bindings {
- get: "/v1b3/projects/{project_id}/jobs/{job_id}"
- }
- };
- }
- // Updates the state of an existing Cloud Dataflow job.
- //
- // To update the state of an existing job, we recommend using
- // `projects.locations.jobs.update` with a [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using
- // `projects.jobs.update` is not recommended, as you can only update the state
- // of jobs that are running in `us-central1`.
- rpc UpdateJob(UpdateJobRequest) returns (Job) {
- option (google.api.http) = {
- put: "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}"
- body: "job"
- additional_bindings {
- put: "/v1b3/projects/{project_id}/jobs/{job_id}"
- body: "job"
- }
- };
- }
- // List the jobs of a project.
- //
- // To list the jobs of a project in a region, we recommend using
- // `projects.locations.jobs.list` with a [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). To
- // list the all jobs across all regions, use `projects.jobs.aggregated`. Using
- // `projects.jobs.list` is not recommended, as you can only get the list of
- // jobs that are running in `us-central1`.
- rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) {
- option (google.api.http) = {
- get: "/v1b3/projects/{project_id}/locations/{location}/jobs"
- additional_bindings {
- get: "/v1b3/projects/{project_id}/jobs"
- }
- };
- }
- // List the jobs of a project across all regions.
- rpc AggregatedListJobs(ListJobsRequest) returns (ListJobsResponse) {
- option (google.api.http) = {
- get: "/v1b3/projects/{project_id}/jobs:aggregated"
- };
- }
- // Check for existence of active jobs in the given project across all regions.
- rpc CheckActiveJobs(CheckActiveJobsRequest) returns (CheckActiveJobsResponse) {
- }
- // Snapshot the state of a streaming job.
- rpc SnapshotJob(SnapshotJobRequest) returns (Snapshot) {
- option (google.api.http) = {
- post: "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}:snapshot"
- body: "*"
- additional_bindings {
- post: "/v1b3/projects/{project_id}/jobs/{job_id}:snapshot"
- body: "*"
- }
- };
- }
- }
- // Defines a job to be run by the Cloud Dataflow service.
- message Job {
- // The unique ID of this job.
- //
- // This field is set by the Cloud Dataflow service when the Job is
- // created, and is immutable for the life of the job.
- string id = 1;
- // The ID of the Cloud Platform project that the job belongs to.
- string project_id = 2;
- // The user-specified Cloud Dataflow job name.
- //
- // Only one Job with a given name may exist in a project at any
- // given time. If a caller attempts to create a Job with the same
- // name as an already-existing Job, the attempt returns the
- // existing Job.
- //
- // The name must match the regular expression
- // `[a-z]([-a-z0-9]{0,1022}[a-z0-9])?`
- string name = 3;
- // The type of Cloud Dataflow job.
- JobType type = 4;
- // The environment for the job.
- Environment environment = 5;
- // Exactly one of step or steps_location should be specified.
- //
- // The top-level steps that constitute the entire job. Only retrieved with
- // JOB_VIEW_ALL.
- repeated Step steps = 6;
- // The Cloud Storage location where the steps are stored.
- string steps_location = 24;
- // The current state of the job.
- //
- // Jobs are created in the `JOB_STATE_STOPPED` state unless otherwise
- // specified.
- //
- // A job in the `JOB_STATE_RUNNING` state may asynchronously enter a
- // terminal state. After a job has reached a terminal state, no
- // further state updates may be made.
- //
- // This field may be mutated by the Cloud Dataflow service;
- // callers cannot mutate it.
- JobState current_state = 7;
- // The timestamp associated with the current state.
- google.protobuf.Timestamp current_state_time = 8;
- // The job's requested state.
- //
- // `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and
- // `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may
- // also be used to directly set a job's requested state to
- // `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the
- // job if it has not already reached a terminal state.
- JobState requested_state = 9;
- // Deprecated.
- JobExecutionInfo execution_info = 10;
- // The timestamp when the job was initially created. Immutable and set by the
- // Cloud Dataflow service.
- google.protobuf.Timestamp create_time = 11;
- // If this job is an update of an existing job, this field is the job ID
- // of the job it replaced.
- //
- // When sending a `CreateJobRequest`, you can update a job by specifying it
- // here. The job named here is stopped, and its intermediate state is
- // transferred to this job.
- string replace_job_id = 12;
- // The map of transform name prefixes of the job to be replaced to the
- // corresponding name prefixes of the new job.
- map<string, string> transform_name_mapping = 13;
- // The client's unique identifier of the job, re-used across retried attempts.
- // If this field is set, the service will ensure its uniqueness.
- // The request to create a job will fail if the service has knowledge of a
- // previously submitted job with the same client's ID and job name.
- // The caller may use this field to ensure idempotence of job
- // creation across retried attempts to create a job.
- // By default, the field is empty and, in that case, the service ignores it.
- string client_request_id = 14;
- // If another job is an update of this job (and thus, this job is in
- // `JOB_STATE_UPDATED`), this field contains the ID of that job.
- string replaced_by_job_id = 15;
- // A set of files the system should be aware of that are used
- // for temporary storage. These temporary files will be
- // removed on job completion.
- // No duplicates are allowed.
- // No file patterns are supported.
- //
- // The supported files are:
- //
- // Google Cloud Storage:
- //
- // storage.googleapis.com/{bucket}/{object}
- // bucket.storage.googleapis.com/{object}
- repeated string temp_files = 16;
- // User-defined labels for this job.
- //
- // The labels map can contain no more than 64 entries. Entries of the labels
- // map are UTF8 strings that comply with the following restrictions:
- //
- // * Keys must conform to regexp: [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}
- // * Values must conform to regexp: [\p{Ll}\p{Lo}\p{N}_-]{0,63}
- // * Both keys and values are additionally constrained to be <= 128 bytes in
- // size.
- map<string, string> labels = 17;
- // The [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that
- // contains this job.
- string location = 18;
- // Preliminary field: The format of this data may change at any time.
- // A description of the user pipeline and stages through which it is executed.
- // Created by Cloud Dataflow service. Only retrieved with
- // JOB_VIEW_DESCRIPTION or JOB_VIEW_ALL.
- PipelineDescription pipeline_description = 19;
- // This field may be mutated by the Cloud Dataflow service;
- // callers cannot mutate it.
- repeated ExecutionStageState stage_states = 20;
- // This field is populated by the Dataflow service to support filtering jobs
- // by the metadata values provided here. Populated for ListJobs and all GetJob
- // views SUMMARY and higher.
- JobMetadata job_metadata = 21;
- // The timestamp when the job was started (transitioned to JOB_STATE_PENDING).
- // Flexible resource scheduling jobs are started with some delay after job
- // creation, so start_time is unset before start and is updated when the
- // job is started by the Cloud Dataflow service. For other jobs, start_time
- // always equals to create_time and is immutable and set by the Cloud Dataflow
- // service.
- google.protobuf.Timestamp start_time = 22;
- // If this is specified, the job's initial state is populated from the given
- // snapshot.
- string created_from_snapshot_id = 23;
- // Reserved for future use. This field is set only in responses from the
- // server; it is ignored if it is set in any requests.
- bool satisfies_pzs = 25;
- }
- // Metadata for a Datastore connector used by the job.
- message DatastoreIODetails {
- // Namespace used in the connection.
- string namespace = 1;
- // ProjectId accessed in the connection.
- string project_id = 2;
- }
- // Metadata for a Pub/Sub connector used by the job.
- message PubSubIODetails {
- // Topic accessed in the connection.
- string topic = 1;
- // Subscription used in the connection.
- string subscription = 2;
- }
- // Metadata for a File connector used by the job.
- message FileIODetails {
- // File Pattern used to access files by the connector.
- string file_pattern = 1;
- }
- // Metadata for a Cloud Bigtable connector used by the job.
- message BigTableIODetails {
- // ProjectId accessed in the connection.
- string project_id = 1;
- // InstanceId accessed in the connection.
- string instance_id = 2;
- // TableId accessed in the connection.
- string table_id = 3;
- }
- // Metadata for a BigQuery connector used by the job.
- message BigQueryIODetails {
- // Table accessed in the connection.
- string table = 1;
- // Dataset accessed in the connection.
- string dataset = 2;
- // Project accessed in the connection.
- string project_id = 3;
- // Query used to access data in the connection.
- string query = 4;
- }
- // Metadata for a Spanner connector used by the job.
- message SpannerIODetails {
- // ProjectId accessed in the connection.
- string project_id = 1;
- // InstanceId accessed in the connection.
- string instance_id = 2;
- // DatabaseId accessed in the connection.
- string database_id = 3;
- }
- // The version of the SDK used to run the job.
- message SdkVersion {
- // The support status of the SDK used to run the job.
- enum SdkSupportStatus {
- // Cloud Dataflow is unaware of this version.
- UNKNOWN = 0;
- // This is a known version of an SDK, and is supported.
- SUPPORTED = 1;
- // A newer version of the SDK family exists, and an update is recommended.
- STALE = 2;
- // This version of the SDK is deprecated and will eventually be
- // unsupported.
- DEPRECATED = 3;
- // Support for this SDK version has ended and it should no longer be used.
- UNSUPPORTED = 4;
- }
- // The version of the SDK used to run the job.
- string version = 1;
- // A readable string describing the version of the SDK.
- string version_display_name = 2;
- // The support status for this SDK version.
- SdkSupportStatus sdk_support_status = 3;
- }
- // Metadata available primarily for filtering jobs. Will be included in the
- // ListJob response and Job SUMMARY view.
- message JobMetadata {
- // The SDK version used to run the job.
- SdkVersion sdk_version = 1;
- // Identification of a Spanner source used in the Dataflow job.
- repeated SpannerIODetails spanner_details = 2;
- // Identification of a BigQuery source used in the Dataflow job.
- repeated BigQueryIODetails bigquery_details = 3;
- // Identification of a Cloud Bigtable source used in the Dataflow job.
- repeated BigTableIODetails big_table_details = 4;
- // Identification of a Pub/Sub source used in the Dataflow job.
- repeated PubSubIODetails pubsub_details = 5;
- // Identification of a File source used in the Dataflow job.
- repeated FileIODetails file_details = 6;
- // Identification of a Datastore source used in the Dataflow job.
- repeated DatastoreIODetails datastore_details = 7;
- }
- // A message describing the state of a particular execution stage.
- message ExecutionStageState {
- // The name of the execution stage.
- string execution_stage_name = 1;
- // Executions stage states allow the same set of values as JobState.
- JobState execution_stage_state = 2;
- // The time at which the stage transitioned to this state.
- google.protobuf.Timestamp current_state_time = 3;
- }
- // A descriptive representation of submitted pipeline as well as the executed
- // form. This data is provided by the Dataflow service for ease of visualizing
- // the pipeline and interpreting Dataflow provided metrics.
- message PipelineDescription {
- // Description of each transform in the pipeline and collections between them.
- repeated TransformSummary original_pipeline_transform = 1;
- // Description of each stage of execution of the pipeline.
- repeated ExecutionStageSummary execution_pipeline_stage = 2;
- // Pipeline level display data.
- repeated DisplayData display_data = 3;
- }
- // Type of transform or stage operation.
- enum KindType {
- // Unrecognized transform type.
- UNKNOWN_KIND = 0;
- // ParDo transform.
- PAR_DO_KIND = 1;
- // Group By Key transform.
- GROUP_BY_KEY_KIND = 2;
- // Flatten transform.
- FLATTEN_KIND = 3;
- // Read transform.
- READ_KIND = 4;
- // Write transform.
- WRITE_KIND = 5;
- // Constructs from a constant value, such as with Create.of.
- CONSTANT_KIND = 6;
- // Creates a Singleton view of a collection.
- SINGLETON_KIND = 7;
- // Opening or closing a shuffle session, often as part of a GroupByKey.
- SHUFFLE_KIND = 8;
- }
- // Description of the type, names/ids, and input/outputs for a transform.
- message TransformSummary {
- // Type of transform.
- KindType kind = 1;
- // SDK generated id of this transform instance.
- string id = 2;
- // User provided name for this transform instance.
- string name = 3;
- // Transform-specific display data.
- repeated DisplayData display_data = 4;
- // User names for all collection outputs to this transform.
- repeated string output_collection_name = 5;
- // User names for all collection inputs to this transform.
- repeated string input_collection_name = 6;
- }
- // Description of the composing transforms, names/ids, and input/outputs of a
- // stage of execution. Some composing transforms and sources may have been
- // generated by the Dataflow service during execution planning.
- message ExecutionStageSummary {
- // Description of an input or output of an execution stage.
- message StageSource {
- // Human-readable name for this source; may be user or system generated.
- string user_name = 1;
- // Dataflow service generated name for this source.
- string name = 2;
- // User name for the original user transform or collection with which this
- // source is most closely associated.
- string original_transform_or_collection = 3;
- // Size of the source, if measurable.
- int64 size_bytes = 4;
- }
- // Description of a transform executed as part of an execution stage.
- message ComponentTransform {
- // Human-readable name for this transform; may be user or system generated.
- string user_name = 1;
- // Dataflow service generated name for this source.
- string name = 2;
- // User name for the original user transform with which this transform is
- // most closely associated.
- string original_transform = 3;
- }
- // Description of an interstitial value between transforms in an execution
- // stage.
- message ComponentSource {
- // Human-readable name for this transform; may be user or system generated.
- string user_name = 1;
- // Dataflow service generated name for this source.
- string name = 2;
- // User name for the original user transform or collection with which this
- // source is most closely associated.
- string original_transform_or_collection = 3;
- }
- // Dataflow service generated name for this stage.
- string name = 1;
- // Dataflow service generated id for this stage.
- string id = 2;
- // Type of transform this stage is executing.
- KindType kind = 3;
- // Input sources for this stage.
- repeated StageSource input_source = 4;
- // Output sources for this stage.
- repeated StageSource output_source = 5;
- // Other stages that must complete before this stage can run.
- repeated string prerequisite_stage = 8;
- // Transforms that comprise this execution stage.
- repeated ComponentTransform component_transform = 6;
- // Collections produced and consumed by component transforms of this stage.
- repeated ComponentSource component_source = 7;
- }
- // Data provided with a pipeline or transform to provide descriptive info.
- message DisplayData {
- // The key identifying the display data.
- // This is intended to be used as a label for the display data
- // when viewed in a dax monitoring system.
- string key = 1;
- // The namespace for the key. This is usually a class name or programming
- // language namespace (i.e. python module) which defines the display data.
- // This allows a dax monitoring system to specially handle the data
- // and perform custom rendering.
- string namespace = 2;
- // Various value types which can be used for display data. Only one will be
- // set.
- oneof Value {
- // Contains value if the data is of string type.
- string str_value = 4;
- // Contains value if the data is of int64 type.
- int64 int64_value = 5;
- // Contains value if the data is of float type.
- float float_value = 6;
- // Contains value if the data is of java class type.
- string java_class_value = 7;
- // Contains value if the data is of timestamp type.
- google.protobuf.Timestamp timestamp_value = 8;
- // Contains value if the data is of duration type.
- google.protobuf.Duration duration_value = 9;
- // Contains value if the data is of a boolean type.
- bool bool_value = 10;
- }
- // A possible additional shorter value to display.
- // For example a java_class_name_value of com.mypackage.MyDoFn
- // will be stored with MyDoFn as the short_str_value and
- // com.mypackage.MyDoFn as the java_class_name value.
- // short_str_value can be displayed and java_class_name_value
- // will be displayed as a tooltip.
- string short_str_value = 11;
- // An optional full URL.
- string url = 12;
- // An optional label to display in a dax UI for the element.
- string label = 13;
- }
- // Defines a particular step within a Cloud Dataflow job.
- //
- // A job consists of multiple steps, each of which performs some
- // specific operation as part of the overall job. Data is typically
- // passed from one step to another as part of the job.
- //
- // Here's an example of a sequence of steps which together implement a
- // Map-Reduce job:
- //
- // * Read a collection of data from some source, parsing the
- // collection's elements.
- //
- // * Validate the elements.
- //
- // * Apply a user-defined function to map each element to some value
- // and extract an element-specific key value.
- //
- // * Group elements with the same key into a single element with
- // that key, transforming a multiply-keyed collection into a
- // uniquely-keyed collection.
- //
- // * Write the elements out to some data sink.
- //
- // Note that the Cloud Dataflow service may be used to run many different
- // types of jobs, not just Map-Reduce.
- message Step {
- // The kind of step in the Cloud Dataflow job.
- string kind = 1;
- // The name that identifies the step. This must be unique for each
- // step with respect to all other steps in the Cloud Dataflow job.
- string name = 2;
- // Named properties associated with the step. Each kind of
- // predefined step has its own required set of properties.
- // Must be provided on Create. Only retrieved with JOB_VIEW_ALL.
- google.protobuf.Struct properties = 3;
- }
- // Describes the overall state of a [google.dataflow.v1beta3.Job][google.dataflow.v1beta3.Job].
- enum JobState {
- // The job's run state isn't specified.
- JOB_STATE_UNKNOWN = 0;
- // `JOB_STATE_STOPPED` indicates that the job has not
- // yet started to run.
- JOB_STATE_STOPPED = 1;
- // `JOB_STATE_RUNNING` indicates that the job is currently running.
- JOB_STATE_RUNNING = 2;
- // `JOB_STATE_DONE` indicates that the job has successfully completed.
- // This is a terminal job state. This state may be set by the Cloud Dataflow
- // service, as a transition from `JOB_STATE_RUNNING`. It may also be set via a
- // Cloud Dataflow `UpdateJob` call, if the job has not yet reached a terminal
- // state.
- JOB_STATE_DONE = 3;
- // `JOB_STATE_FAILED` indicates that the job has failed. This is a
- // terminal job state. This state may only be set by the Cloud Dataflow
- // service, and only as a transition from `JOB_STATE_RUNNING`.
- JOB_STATE_FAILED = 4;
- // `JOB_STATE_CANCELLED` indicates that the job has been explicitly
- // cancelled. This is a terminal job state. This state may only be
- // set via a Cloud Dataflow `UpdateJob` call, and only if the job has not
- // yet reached another terminal state.
- JOB_STATE_CANCELLED = 5;
- // `JOB_STATE_UPDATED` indicates that the job was successfully updated,
- // meaning that this job was stopped and another job was started, inheriting
- // state from this one. This is a terminal job state. This state may only be
- // set by the Cloud Dataflow service, and only as a transition from
- // `JOB_STATE_RUNNING`.
- JOB_STATE_UPDATED = 6;
- // `JOB_STATE_DRAINING` indicates that the job is in the process of draining.
- // A draining job has stopped pulling from its input sources and is processing
- // any data that remains in-flight. This state may be set via a Cloud Dataflow
- // `UpdateJob` call, but only as a transition from `JOB_STATE_RUNNING`. Jobs
- // that are draining may only transition to `JOB_STATE_DRAINED`,
- // `JOB_STATE_CANCELLED`, or `JOB_STATE_FAILED`.
- JOB_STATE_DRAINING = 7;
- // `JOB_STATE_DRAINED` indicates that the job has been drained.
- // A drained job terminated by stopping pulling from its input sources and
- // processing any data that remained in-flight when draining was requested.
- // This state is a terminal state, may only be set by the Cloud Dataflow
- // service, and only as a transition from `JOB_STATE_DRAINING`.
- JOB_STATE_DRAINED = 8;
- // `JOB_STATE_PENDING` indicates that the job has been created but is not yet
- // running. Jobs that are pending may only transition to `JOB_STATE_RUNNING`,
- // or `JOB_STATE_FAILED`.
- JOB_STATE_PENDING = 9;
- // `JOB_STATE_CANCELLING` indicates that the job has been explicitly cancelled
- // and is in the process of stopping. Jobs that are cancelling may only
- // transition to `JOB_STATE_CANCELLED` or `JOB_STATE_FAILED`.
- JOB_STATE_CANCELLING = 10;
- // `JOB_STATE_QUEUED` indicates that the job has been created but is being
- // delayed until launch. Jobs that are queued may only transition to
- // `JOB_STATE_PENDING` or `JOB_STATE_CANCELLED`.
- JOB_STATE_QUEUED = 11;
- // `JOB_STATE_RESOURCE_CLEANING_UP` indicates that the batch job's associated
- // resources are currently being cleaned up after a successful run.
- // Currently, this is an opt-in feature, please reach out to Cloud support
- // team if you are interested.
- JOB_STATE_RESOURCE_CLEANING_UP = 12;
- }
- // Additional information about how a Cloud Dataflow job will be executed that
- // isn't contained in the submitted job.
- message JobExecutionInfo {
- // A mapping from each stage to the information about that stage.
- map<string, JobExecutionStageInfo> stages = 1;
- }
- // Contains information about how a particular
- // [google.dataflow.v1beta3.Step][google.dataflow.v1beta3.Step] will be executed.
- message JobExecutionStageInfo {
- // The steps associated with the execution stage.
- // Note that stages may have several steps, and that a given step
- // might be run by more than one stage.
- repeated string step_name = 1;
- }
- // Selector for how much information is returned in Job responses.
- enum JobView {
- // The job view to return isn't specified, or is unknown.
- // Responses will contain at least the `JOB_VIEW_SUMMARY` information,
- // and may contain additional information.
- JOB_VIEW_UNKNOWN = 0;
- // Request summary information only:
- // Project ID, Job ID, job name, job type, job status, start/end time,
- // and Cloud SDK version details.
- JOB_VIEW_SUMMARY = 1;
- // Request all information available for this job.
- JOB_VIEW_ALL = 2;
- // Request summary info and limited job description data for steps, labels and
- // environment.
- JOB_VIEW_DESCRIPTION = 3;
- }
- // Request to create a Cloud Dataflow job.
- message CreateJobRequest {
- // The ID of the Cloud Platform project that the job belongs to.
- string project_id = 1;
- // The job to create.
- Job job = 2;
- // The level of information requested in response.
- JobView view = 3;
- // Deprecated. This field is now in the Job message.
- string replace_job_id = 4;
- // The [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that
- // contains this job.
- string location = 5;
- }
- // Request to get the state of a Cloud Dataflow job.
- message GetJobRequest {
- // The ID of the Cloud Platform project that the job belongs to.
- string project_id = 1;
- // The job ID.
- string job_id = 2;
- // The level of information requested in response.
- JobView view = 3;
- // The [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that
- // contains this job.
- string location = 4;
- }
- // Request to update a Cloud Dataflow job.
- message UpdateJobRequest {
- // The ID of the Cloud Platform project that the job belongs to.
- string project_id = 1;
- // The job ID.
- string job_id = 2;
- // The updated job.
- // Only the job state is updatable; other fields will be ignored.
- Job job = 3;
- // The [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that
- // contains this job.
- string location = 4;
- }
- // Request to list Cloud Dataflow jobs.
- message ListJobsRequest {
- // This field filters out and returns jobs in the specified job state. The
- // order of data returned is determined by the filter used, and is subject to
- // change.
- enum Filter {
- // The filter isn't specified, or is unknown. This returns all jobs ordered
- // on descending `JobUuid`.
- UNKNOWN = 0;
- // Returns all running jobs first ordered on creation timestamp, then
- // returns all terminated jobs ordered on the termination timestamp.
- ALL = 1;
- // Filters the jobs that have a terminated state, ordered on the
- // termination timestamp. Example terminated states: `JOB_STATE_STOPPED`,
- // `JOB_STATE_UPDATED`, `JOB_STATE_DRAINED`, etc.
- TERMINATED = 2;
- // Filters the jobs that are running ordered on the creation timestamp.
- ACTIVE = 3;
- }
- // The kind of filter to use.
- Filter filter = 5;
- // The project which owns the jobs.
- string project_id = 1;
- // Deprecated. ListJobs always returns summaries now.
- // Use GetJob for other JobViews.
- JobView view = 2 [deprecated = true];
- // If there are many jobs, limit response to at most this many.
- // The actual number of jobs returned will be the lesser of max_responses
- // and an unspecified server-defined limit.
- int32 page_size = 3;
- // Set this to the 'next_page_token' field of a previous response
- // to request additional results in a long list.
- string page_token = 4;
- // The [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that
- // contains this job.
- string location = 17;
- }
- // Indicates which [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) failed
- // to respond to a request for data.
- message FailedLocation {
- // The name of the [regional endpoint]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that
- // failed to respond.
- string name = 1;
- }
- // Response to a request to list Cloud Dataflow jobs in a project. This might
- // be a partial response, depending on the page size in the ListJobsRequest.
- // However, if the project does not have any jobs, an instance of
- // ListJobsResponse is not returned and the requests's response
- // body is empty {}.
- message ListJobsResponse {
- // A subset of the requested job information.
- repeated Job jobs = 1;
- // Set if there may be more results than fit in this response.
- string next_page_token = 2;
- // Zero or more messages describing the [regional endpoints]
- // (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that
- // failed to respond.
- repeated FailedLocation failed_location = 3;
- }
- // Request to create a snapshot of a job.
- message SnapshotJobRequest {
- // The project which owns the job to be snapshotted.
- string project_id = 1;
- // The job to be snapshotted.
- string job_id = 2;
- // TTL for the snapshot.
- google.protobuf.Duration ttl = 3;
- // The location that contains this job.
- string location = 4;
- // If true, perform snapshots for sources which support this.
- bool snapshot_sources = 5;
- // User specified description of the snapshot. Maybe empty.
- string description = 6;
- }
- // Request to check is active jobs exists for a project
- message CheckActiveJobsRequest {
- // The project which owns the jobs.
- string project_id = 1;
- }
- // Response for CheckActiveJobsRequest.
- message CheckActiveJobsResponse {
- // If True, active jobs exists for project. False otherwise.
- bool active_jobs_exist = 1;
- }
|