chore: Update discovery artifacts (#1195)
* chore(accesscontextmanager): update the api
* chore(adexchangebuyer2): update the api
* chore(admin): update the api
* chore(alertcenter): update the api
* chore(analyticsadmin): update the api
* chore(analyticsdata): update the api
* chore(androidmanagement): update the api
* chore(apigateway): update the api
* chore(apigee): update the api
* chore(appengine): update the api
* chore(area120tables): update the api
* chore(artifactregistry): update the api
* chore(bigquery): update the api
* chore(bigqueryconnection): update the api
* chore(bigqueryreservation): update the api
* chore(billingbudgets): update the api
* chore(binaryauthorization): update the api
* chore(blogger): update the api
* chore(calendar): update the api
* chore(chat): update the api
* chore(cloudasset): update the api
* chore(cloudbuild): update the api
* chore(cloudfunctions): update the api
* chore(cloudidentity): update the api
* chore(cloudkms): update the api
* chore(cloudresourcemanager): update the api
* chore(cloudscheduler): update the api
* chore(cloudtasks): update the api
* chore(composer): update the api
* chore(compute): update the api
* chore(container): update the api
* chore(containeranalysis): update the api
* chore(content): update the api
* chore(datacatalog): update the api
* chore(dataflow): update the api
* chore(datafusion): update the api
* chore(datamigration): update the api
* chore(dataproc): update the api
* chore(deploymentmanager): update the api
* chore(dialogflow): update the api
* chore(displayvideo): update the api
* chore(dlp): update the api
* chore(dns): update the api
* chore(documentai): update the api
* chore(eventarc): update the api
* chore(file): update the api
* chore(firebaseml): update the api
* chore(games): update the api
* chore(gameservices): update the api
* chore(genomics): update the api
* chore(healthcare): update the api
* chore(homegraph): update the api
* chore(iam): update the api
* chore(iap): update the api
* chore(jobs): update the api
* chore(lifesciences): update the api
* chore(localservices): update the api
* chore(managedidentities): update the api
* chore(manufacturers): update the api
* chore(memcache): update the api
* chore(ml): update the api
* chore(monitoring): update the api
* chore(notebooks): update the api
* chore(osconfig): update the api
* chore(pagespeedonline): update the api
* chore(people): update the api
* chore(privateca): update the api
* chore(prod_tt_sasportal): update the api
* chore(pubsub): update the api
* chore(pubsublite): update the api
* chore(recommender): update the api
* chore(remotebuildexecution): update the api
* chore(reseller): update the api
* chore(run): update the api
* chore(safebrowsing): update the api
* chore(sasportal): update the api
* chore(searchconsole): update the api
* chore(secretmanager): update the api
* chore(securitycenter): update the api
* chore(serviceconsumermanagement): update the api
* chore(servicecontrol): update the api
* chore(servicenetworking): update the api
* chore(serviceusage): update the api
* chore(sheets): update the api
* chore(slides): update the api
* chore(spanner): update the api
* chore(speech): update the api
* chore(sqladmin): update the api
* chore(storage): update the api
* chore(storagetransfer): update the api
* chore(sts): update the api
* chore(tagmanager): update the api
* chore(testing): update the api
* chore(toolresults): update the api
* chore(transcoder): update the api
* chore(vectortile): update the api
* chore(videointelligence): update the api
* chore(vision): update the api
* chore(webmasters): update the api
* chore(workflowexecutions): update the api
* chore(youtube): update the api
diff --git a/docs/dyn/dataflow_v1b3.projects.jobs.html b/docs/dyn/dataflow_v1b3.projects.jobs.html
index 7072751..622f5ed 100644
--- a/docs/dyn/dataflow_v1b3.projects.jobs.html
+++ b/docs/dyn/dataflow_v1b3.projects.jobs.html
@@ -135,7 +135,7 @@
location: string, The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that contains this job.
pageSize: integer, If there are many jobs, limit response to at most this many. The actual number of jobs returned will be the lesser of max_responses and an unspecified server-defined limit.
pageToken: string, Set this to the 'next_page_token' field of a previous response to request additional results in a long list.
- view: string, Level of information requested in response. Default is `JOB_VIEW_SUMMARY`.
+ view: string, Deprecated. ListJobs always returns summaries now. Use GetJob for other JobViews.
Allowed values
JOB_VIEW_UNKNOWN - The job view to return isn't specified, or is unknown. Responses will contain at least the `JOB_VIEW_SUMMARY` information, and may contain additional information.
JOB_VIEW_SUMMARY - Request summary information only: Project ID, Job ID, job name, job type, job status, start/end time, and Cloud SDK version details.
@@ -156,7 +156,7 @@
},
],
"jobs": [ # A subset of the requested job information.
- { # Defines a job to be run by the Cloud Dataflow service.
+ { # Defines a job to be run by the Cloud Dataflow service. nextID: 26
"clientRequestId": "A String", # The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it.
"createTime": "A String", # The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service.
"createdFromSnapshotId": "A String", # If this is specified, the job's initial state is populated from the given snapshot.
@@ -165,7 +165,7 @@
"environment": { # Describes the environment in which a Dataflow Job runs. # The environment for the job.
"clusterManagerApiService": "A String", # The type of cluster manager API to use. If unknown or unspecified, the service will attempt to choose a reasonable default. This should be in the form of the API service name, e.g. "compute.googleapis.com".
"dataset": "A String", # The dataset for the current project where various workflow related tables are stored. The supported resource type is: Google BigQuery: bigquery.googleapis.com/{dataset}
- "experiments": [ # The list of experiments to enable.
+ "experiments": [ # The list of experiments to enable. This field should be used for SDK related experiments and not for service related experiments. The proper field for service related experiments is service_options. For more details see the rationale at go/user-specified-service-options.
"A String",
],
"flexResourceSchedulingGoal": "A String", # Which Flexible Resource Scheduling mode to run in.
@@ -177,6 +177,10 @@
},
"serviceAccountEmail": "A String", # Identity to run virtual machines as. Defaults to the default account.
"serviceKmsKeyName": "A String", # If set, contains the Cloud KMS key identifier used to encrypt data at rest, AKA a Customer Managed Encryption Key (CMEK). Format: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
+ "serviceOptions": [ # The list of service options to enable. This field should be used for service related experiments only. These experiments, when graduating to GA, should be replaced by dedicated fields or become default (i.e. always on). For more details see the rationale at go/user-specified-service-options.
+ "A String",
+ ],
+ "shuffleMode": "A String", # Output only. The shuffle mode used for the job.
"tempStoragePrefix": "A String", # The prefix of the resources the system should use for temporary storage. The system will append the suffix "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is the value of the job_name field. The resulting bucket and object prefix is used as the prefix of the resources used to store temporary data needed during the job execution. NOTE: This will override the value in taskrunner_settings. The supported resource type is: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object}
"userAgent": { # A description of the process that generated the request.
"a_key": "", # Properties of the object.
@@ -223,6 +227,7 @@
"sdkHarnessContainerImages": [ # Set of SDK harness containers needed to execute this pipeline. This will only be set in the Fn API path. For non-cross-language pipelines this should have only one entry. Cross-language pipelines will have two or more entries.
{ # Defines a SDK harness container for executing Dataflow pipelines.
"containerImage": "A String", # A docker container image that resides in Google Container Registry.
+ "environmentId": "A String", # Environment ID for the Beam runner API proto Environment that corresponds to the current SDK Harness.
"useSingleCorePerContainer": True or False, # If true, recommends the Dataflow service to use only one core per SDK container instance with this image. If false (or unset) recommends using more than one core per SDK container instance with this image for efficiency. Note that Dataflow service may choose to override this property if needed.
},
],
@@ -417,6 +422,7 @@
"replaceJobId": "A String", # If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job.
"replacedByJobId": "A String", # If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job.
"requestedState": "A String", # The job's requested state. `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may also be used to directly set a job's requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if it has not already reached a terminal state.
+ "satisfiesPzs": True or False, # Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests.
"stageStates": [ # This field may be mutated by the Cloud Dataflow service; callers cannot mutate it.
{ # A message describing the state of a particular execution stage.
"currentStateTime": "A String", # The time at which the stage transitioned to this state.
@@ -476,7 +482,7 @@
body: object, The request body.
The object takes the form of:
-{ # Defines a job to be run by the Cloud Dataflow service.
+{ # Defines a job to be run by the Cloud Dataflow service. nextID: 26
"clientRequestId": "A String", # The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it.
"createTime": "A String", # The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service.
"createdFromSnapshotId": "A String", # If this is specified, the job's initial state is populated from the given snapshot.
@@ -485,7 +491,7 @@
"environment": { # Describes the environment in which a Dataflow Job runs. # The environment for the job.
"clusterManagerApiService": "A String", # The type of cluster manager API to use. If unknown or unspecified, the service will attempt to choose a reasonable default. This should be in the form of the API service name, e.g. "compute.googleapis.com".
"dataset": "A String", # The dataset for the current project where various workflow related tables are stored. The supported resource type is: Google BigQuery: bigquery.googleapis.com/{dataset}
- "experiments": [ # The list of experiments to enable.
+ "experiments": [ # The list of experiments to enable. This field should be used for SDK related experiments and not for service related experiments. The proper field for service related experiments is service_options. For more details see the rationale at go/user-specified-service-options.
"A String",
],
"flexResourceSchedulingGoal": "A String", # Which Flexible Resource Scheduling mode to run in.
@@ -497,6 +503,10 @@
},
"serviceAccountEmail": "A String", # Identity to run virtual machines as. Defaults to the default account.
"serviceKmsKeyName": "A String", # If set, contains the Cloud KMS key identifier used to encrypt data at rest, AKA a Customer Managed Encryption Key (CMEK). Format: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
+ "serviceOptions": [ # The list of service options to enable. This field should be used for service related experiments only. These experiments, when graduating to GA, should be replaced by dedicated fields or become default (i.e. always on). For more details see the rationale at go/user-specified-service-options.
+ "A String",
+ ],
+ "shuffleMode": "A String", # Output only. The shuffle mode used for the job.
"tempStoragePrefix": "A String", # The prefix of the resources the system should use for temporary storage. The system will append the suffix "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is the value of the job_name field. The resulting bucket and object prefix is used as the prefix of the resources used to store temporary data needed during the job execution. NOTE: This will override the value in taskrunner_settings. The supported resource type is: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object}
"userAgent": { # A description of the process that generated the request.
"a_key": "", # Properties of the object.
@@ -543,6 +553,7 @@
"sdkHarnessContainerImages": [ # Set of SDK harness containers needed to execute this pipeline. This will only be set in the Fn API path. For non-cross-language pipelines this should have only one entry. Cross-language pipelines will have two or more entries.
{ # Defines a SDK harness container for executing Dataflow pipelines.
"containerImage": "A String", # A docker container image that resides in Google Container Registry.
+ "environmentId": "A String", # Environment ID for the Beam runner API proto Environment that corresponds to the current SDK Harness.
"useSingleCorePerContainer": True or False, # If true, recommends the Dataflow service to use only one core per SDK container instance with this image. If false (or unset) recommends using more than one core per SDK container instance with this image for efficiency. Note that Dataflow service may choose to override this property if needed.
},
],
@@ -737,6 +748,7 @@
"replaceJobId": "A String", # If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job.
"replacedByJobId": "A String", # If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job.
"requestedState": "A String", # The job's requested state. `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may also be used to directly set a job's requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if it has not already reached a terminal state.
+ "satisfiesPzs": True or False, # Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests.
"stageStates": [ # This field may be mutated by the Cloud Dataflow service; callers cannot mutate it.
{ # A message describing the state of a particular execution stage.
"currentStateTime": "A String", # The time at which the stage transitioned to this state.
@@ -780,7 +792,7 @@
Returns:
An object of the form:
- { # Defines a job to be run by the Cloud Dataflow service.
+ { # Defines a job to be run by the Cloud Dataflow service. nextID: 26
"clientRequestId": "A String", # The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it.
"createTime": "A String", # The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service.
"createdFromSnapshotId": "A String", # If this is specified, the job's initial state is populated from the given snapshot.
@@ -789,7 +801,7 @@
"environment": { # Describes the environment in which a Dataflow Job runs. # The environment for the job.
"clusterManagerApiService": "A String", # The type of cluster manager API to use. If unknown or unspecified, the service will attempt to choose a reasonable default. This should be in the form of the API service name, e.g. "compute.googleapis.com".
"dataset": "A String", # The dataset for the current project where various workflow related tables are stored. The supported resource type is: Google BigQuery: bigquery.googleapis.com/{dataset}
- "experiments": [ # The list of experiments to enable.
+ "experiments": [ # The list of experiments to enable. This field should be used for SDK related experiments and not for service related experiments. The proper field for service related experiments is service_options. For more details see the rationale at go/user-specified-service-options.
"A String",
],
"flexResourceSchedulingGoal": "A String", # Which Flexible Resource Scheduling mode to run in.
@@ -801,6 +813,10 @@
},
"serviceAccountEmail": "A String", # Identity to run virtual machines as. Defaults to the default account.
"serviceKmsKeyName": "A String", # If set, contains the Cloud KMS key identifier used to encrypt data at rest, AKA a Customer Managed Encryption Key (CMEK). Format: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
+ "serviceOptions": [ # The list of service options to enable. This field should be used for service related experiments only. These experiments, when graduating to GA, should be replaced by dedicated fields or become default (i.e. always on). For more details see the rationale at go/user-specified-service-options.
+ "A String",
+ ],
+ "shuffleMode": "A String", # Output only. The shuffle mode used for the job.
"tempStoragePrefix": "A String", # The prefix of the resources the system should use for temporary storage. The system will append the suffix "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is the value of the job_name field. The resulting bucket and object prefix is used as the prefix of the resources used to store temporary data needed during the job execution. NOTE: This will override the value in taskrunner_settings. The supported resource type is: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object}
"userAgent": { # A description of the process that generated the request.
"a_key": "", # Properties of the object.
@@ -847,6 +863,7 @@
"sdkHarnessContainerImages": [ # Set of SDK harness containers needed to execute this pipeline. This will only be set in the Fn API path. For non-cross-language pipelines this should have only one entry. Cross-language pipelines will have two or more entries.
{ # Defines a SDK harness container for executing Dataflow pipelines.
"containerImage": "A String", # A docker container image that resides in Google Container Registry.
+ "environmentId": "A String", # Environment ID for the Beam runner API proto Environment that corresponds to the current SDK Harness.
"useSingleCorePerContainer": True or False, # If true, recommends the Dataflow service to use only one core per SDK container instance with this image. If false (or unset) recommends using more than one core per SDK container instance with this image for efficiency. Note that Dataflow service may choose to override this property if needed.
},
],
@@ -1041,6 +1058,7 @@
"replaceJobId": "A String", # If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job.
"replacedByJobId": "A String", # If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job.
"requestedState": "A String", # The job's requested state. `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may also be used to directly set a job's requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if it has not already reached a terminal state.
+ "satisfiesPzs": True or False, # Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests.
"stageStates": [ # This field may be mutated by the Cloud Dataflow service; callers cannot mutate it.
{ # A message describing the state of a particular execution stage.
"currentStateTime": "A String", # The time at which the stage transitioned to this state.
@@ -1091,7 +1109,7 @@
Returns:
An object of the form:
- { # Defines a job to be run by the Cloud Dataflow service.
+ { # Defines a job to be run by the Cloud Dataflow service. nextID: 26
"clientRequestId": "A String", # The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it.
"createTime": "A String", # The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service.
"createdFromSnapshotId": "A String", # If this is specified, the job's initial state is populated from the given snapshot.
@@ -1100,7 +1118,7 @@
"environment": { # Describes the environment in which a Dataflow Job runs. # The environment for the job.
"clusterManagerApiService": "A String", # The type of cluster manager API to use. If unknown or unspecified, the service will attempt to choose a reasonable default. This should be in the form of the API service name, e.g. "compute.googleapis.com".
"dataset": "A String", # The dataset for the current project where various workflow related tables are stored. The supported resource type is: Google BigQuery: bigquery.googleapis.com/{dataset}
- "experiments": [ # The list of experiments to enable.
+ "experiments": [ # The list of experiments to enable. This field should be used for SDK related experiments and not for service related experiments. The proper field for service related experiments is service_options. For more details see the rationale at go/user-specified-service-options.
"A String",
],
"flexResourceSchedulingGoal": "A String", # Which Flexible Resource Scheduling mode to run in.
@@ -1112,6 +1130,10 @@
},
"serviceAccountEmail": "A String", # Identity to run virtual machines as. Defaults to the default account.
"serviceKmsKeyName": "A String", # If set, contains the Cloud KMS key identifier used to encrypt data at rest, AKA a Customer Managed Encryption Key (CMEK). Format: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
+ "serviceOptions": [ # The list of service options to enable. This field should be used for service related experiments only. These experiments, when graduating to GA, should be replaced by dedicated fields or become default (i.e. always on). For more details see the rationale at go/user-specified-service-options.
+ "A String",
+ ],
+ "shuffleMode": "A String", # Output only. The shuffle mode used for the job.
"tempStoragePrefix": "A String", # The prefix of the resources the system should use for temporary storage. The system will append the suffix "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is the value of the job_name field. The resulting bucket and object prefix is used as the prefix of the resources used to store temporary data needed during the job execution. NOTE: This will override the value in taskrunner_settings. The supported resource type is: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object}
"userAgent": { # A description of the process that generated the request.
"a_key": "", # Properties of the object.
@@ -1158,6 +1180,7 @@
"sdkHarnessContainerImages": [ # Set of SDK harness containers needed to execute this pipeline. This will only be set in the Fn API path. For non-cross-language pipelines this should have only one entry. Cross-language pipelines will have two or more entries.
{ # Defines a SDK harness container for executing Dataflow pipelines.
"containerImage": "A String", # A docker container image that resides in Google Container Registry.
+ "environmentId": "A String", # Environment ID for the Beam runner API proto Environment that corresponds to the current SDK Harness.
"useSingleCorePerContainer": True or False, # If true, recommends the Dataflow service to use only one core per SDK container instance with this image. If false (or unset) recommends using more than one core per SDK container instance with this image for efficiency. Note that Dataflow service may choose to override this property if needed.
},
],
@@ -1352,6 +1375,7 @@
"replaceJobId": "A String", # If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job.
"replacedByJobId": "A String", # If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job.
"requestedState": "A String", # The job's requested state. `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may also be used to directly set a job's requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if it has not already reached a terminal state.
+ "satisfiesPzs": True or False, # Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests.
"stageStates": [ # This field may be mutated by the Cloud Dataflow service; callers cannot mutate it.
{ # A message describing the state of a particular execution stage.
"currentStateTime": "A String", # The time at which the stage transitioned to this state.
@@ -1438,7 +1462,7 @@
location: string, The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that contains this job.
pageSize: integer, If there are many jobs, limit response to at most this many. The actual number of jobs returned will be the lesser of max_responses and an unspecified server-defined limit.
pageToken: string, Set this to the 'next_page_token' field of a previous response to request additional results in a long list.
- view: string, Level of information requested in response. Default is `JOB_VIEW_SUMMARY`.
+ view: string, Deprecated. ListJobs always returns summaries now. Use GetJob for other JobViews.
Allowed values
JOB_VIEW_UNKNOWN - The job view to return isn't specified, or is unknown. Responses will contain at least the `JOB_VIEW_SUMMARY` information, and may contain additional information.
JOB_VIEW_SUMMARY - Request summary information only: Project ID, Job ID, job name, job type, job status, start/end time, and Cloud SDK version details.
@@ -1459,7 +1483,7 @@
},
],
"jobs": [ # A subset of the requested job information.
- { # Defines a job to be run by the Cloud Dataflow service.
+ { # Defines a job to be run by the Cloud Dataflow service. nextID: 26
"clientRequestId": "A String", # The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it.
"createTime": "A String", # The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service.
"createdFromSnapshotId": "A String", # If this is specified, the job's initial state is populated from the given snapshot.
@@ -1468,7 +1492,7 @@
"environment": { # Describes the environment in which a Dataflow Job runs. # The environment for the job.
"clusterManagerApiService": "A String", # The type of cluster manager API to use. If unknown or unspecified, the service will attempt to choose a reasonable default. This should be in the form of the API service name, e.g. "compute.googleapis.com".
"dataset": "A String", # The dataset for the current project where various workflow related tables are stored. The supported resource type is: Google BigQuery: bigquery.googleapis.com/{dataset}
- "experiments": [ # The list of experiments to enable.
+ "experiments": [ # The list of experiments to enable. This field should be used for SDK related experiments and not for service related experiments. The proper field for service related experiments is service_options. For more details see the rationale at go/user-specified-service-options.
"A String",
],
"flexResourceSchedulingGoal": "A String", # Which Flexible Resource Scheduling mode to run in.
@@ -1480,6 +1504,10 @@
},
"serviceAccountEmail": "A String", # Identity to run virtual machines as. Defaults to the default account.
"serviceKmsKeyName": "A String", # If set, contains the Cloud KMS key identifier used to encrypt data at rest, AKA a Customer Managed Encryption Key (CMEK). Format: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
+ "serviceOptions": [ # The list of service options to enable. This field should be used for service related experiments only. These experiments, when graduating to GA, should be replaced by dedicated fields or become default (i.e. always on). For more details see the rationale at go/user-specified-service-options.
+ "A String",
+ ],
+ "shuffleMode": "A String", # Output only. The shuffle mode used for the job.
"tempStoragePrefix": "A String", # The prefix of the resources the system should use for temporary storage. The system will append the suffix "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is the value of the job_name field. The resulting bucket and object prefix is used as the prefix of the resources used to store temporary data needed during the job execution. NOTE: This will override the value in taskrunner_settings. The supported resource type is: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object}
"userAgent": { # A description of the process that generated the request.
"a_key": "", # Properties of the object.
@@ -1526,6 +1554,7 @@
"sdkHarnessContainerImages": [ # Set of SDK harness containers needed to execute this pipeline. This will only be set in the Fn API path. For non-cross-language pipelines this should have only one entry. Cross-language pipelines will have two or more entries.
{ # Defines a SDK harness container for executing Dataflow pipelines.
"containerImage": "A String", # A docker container image that resides in Google Container Registry.
+ "environmentId": "A String", # Environment ID for the Beam runner API proto Environment that corresponds to the current SDK Harness.
"useSingleCorePerContainer": True or False, # If true, recommends the Dataflow service to use only one core per SDK container instance with this image. If false (or unset) recommends using more than one core per SDK container instance with this image for efficiency. Note that Dataflow service may choose to override this property if needed.
},
],
@@ -1720,6 +1749,7 @@
"replaceJobId": "A String", # If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job.
"replacedByJobId": "A String", # If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job.
"requestedState": "A String", # The job's requested state. `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may also be used to directly set a job's requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if it has not already reached a terminal state.
+ "satisfiesPzs": True or False, # Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests.
"stageStates": [ # This field may be mutated by the Cloud Dataflow service; callers cannot mutate it.
{ # A message describing the state of a particular execution stage.
"currentStateTime": "A String", # The time at which the stage transitioned to this state.
@@ -1819,7 +1849,7 @@
body: object, The request body.
The object takes the form of:
-{ # Defines a job to be run by the Cloud Dataflow service.
+{ # Defines a job to be run by the Cloud Dataflow service. nextID: 26
"clientRequestId": "A String", # The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it.
"createTime": "A String", # The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service.
"createdFromSnapshotId": "A String", # If this is specified, the job's initial state is populated from the given snapshot.
@@ -1828,7 +1858,7 @@
"environment": { # Describes the environment in which a Dataflow Job runs. # The environment for the job.
"clusterManagerApiService": "A String", # The type of cluster manager API to use. If unknown or unspecified, the service will attempt to choose a reasonable default. This should be in the form of the API service name, e.g. "compute.googleapis.com".
"dataset": "A String", # The dataset for the current project where various workflow related tables are stored. The supported resource type is: Google BigQuery: bigquery.googleapis.com/{dataset}
- "experiments": [ # The list of experiments to enable.
+ "experiments": [ # The list of experiments to enable. This field should be used for SDK related experiments and not for service related experiments. The proper field for service related experiments is service_options. For more details see the rationale at go/user-specified-service-options.
"A String",
],
"flexResourceSchedulingGoal": "A String", # Which Flexible Resource Scheduling mode to run in.
@@ -1840,6 +1870,10 @@
},
"serviceAccountEmail": "A String", # Identity to run virtual machines as. Defaults to the default account.
"serviceKmsKeyName": "A String", # If set, contains the Cloud KMS key identifier used to encrypt data at rest, AKA a Customer Managed Encryption Key (CMEK). Format: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
+ "serviceOptions": [ # The list of service options to enable. This field should be used for service related experiments only. These experiments, when graduating to GA, should be replaced by dedicated fields or become default (i.e. always on). For more details see the rationale at go/user-specified-service-options.
+ "A String",
+ ],
+ "shuffleMode": "A String", # Output only. The shuffle mode used for the job.
"tempStoragePrefix": "A String", # The prefix of the resources the system should use for temporary storage. The system will append the suffix "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is the value of the job_name field. The resulting bucket and object prefix is used as the prefix of the resources used to store temporary data needed during the job execution. NOTE: This will override the value in taskrunner_settings. The supported resource type is: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object}
"userAgent": { # A description of the process that generated the request.
"a_key": "", # Properties of the object.
@@ -1886,6 +1920,7 @@
"sdkHarnessContainerImages": [ # Set of SDK harness containers needed to execute this pipeline. This will only be set in the Fn API path. For non-cross-language pipelines this should have only one entry. Cross-language pipelines will have two or more entries.
{ # Defines a SDK harness container for executing Dataflow pipelines.
"containerImage": "A String", # A docker container image that resides in Google Container Registry.
+ "environmentId": "A String", # Environment ID for the Beam runner API proto Environment that corresponds to the current SDK Harness.
"useSingleCorePerContainer": True or False, # If true, recommends the Dataflow service to use only one core per SDK container instance with this image. If false (or unset) recommends using more than one core per SDK container instance with this image for efficiency. Note that Dataflow service may choose to override this property if needed.
},
],
@@ -2080,6 +2115,7 @@
"replaceJobId": "A String", # If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job.
"replacedByJobId": "A String", # If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job.
"requestedState": "A String", # The job's requested state. `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may also be used to directly set a job's requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if it has not already reached a terminal state.
+ "satisfiesPzs": True or False, # Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests.
"stageStates": [ # This field may be mutated by the Cloud Dataflow service; callers cannot mutate it.
{ # A message describing the state of a particular execution stage.
"currentStateTime": "A String", # The time at which the stage transitioned to this state.
@@ -2116,7 +2152,7 @@
Returns:
An object of the form:
- { # Defines a job to be run by the Cloud Dataflow service.
+ { # Defines a job to be run by the Cloud Dataflow service. nextID: 26
"clientRequestId": "A String", # The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it.
"createTime": "A String", # The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service.
"createdFromSnapshotId": "A String", # If this is specified, the job's initial state is populated from the given snapshot.
@@ -2125,7 +2161,7 @@
"environment": { # Describes the environment in which a Dataflow Job runs. # The environment for the job.
"clusterManagerApiService": "A String", # The type of cluster manager API to use. If unknown or unspecified, the service will attempt to choose a reasonable default. This should be in the form of the API service name, e.g. "compute.googleapis.com".
"dataset": "A String", # The dataset for the current project where various workflow related tables are stored. The supported resource type is: Google BigQuery: bigquery.googleapis.com/{dataset}
- "experiments": [ # The list of experiments to enable.
+ "experiments": [ # The list of experiments to enable. This field should be used for SDK related experiments and not for service related experiments. The proper field for service related experiments is service_options. For more details see the rationale at go/user-specified-service-options.
"A String",
],
"flexResourceSchedulingGoal": "A String", # Which Flexible Resource Scheduling mode to run in.
@@ -2137,6 +2173,10 @@
},
"serviceAccountEmail": "A String", # Identity to run virtual machines as. Defaults to the default account.
"serviceKmsKeyName": "A String", # If set, contains the Cloud KMS key identifier used to encrypt data at rest, AKA a Customer Managed Encryption Key (CMEK). Format: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
+ "serviceOptions": [ # The list of service options to enable. This field should be used for service related experiments only. These experiments, when graduating to GA, should be replaced by dedicated fields or become default (i.e. always on). For more details see the rationale at go/user-specified-service-options.
+ "A String",
+ ],
+ "shuffleMode": "A String", # Output only. The shuffle mode used for the job.
"tempStoragePrefix": "A String", # The prefix of the resources the system should use for temporary storage. The system will append the suffix "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is the value of the job_name field. The resulting bucket and object prefix is used as the prefix of the resources used to store temporary data needed during the job execution. NOTE: This will override the value in taskrunner_settings. The supported resource type is: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object}
"userAgent": { # A description of the process that generated the request.
"a_key": "", # Properties of the object.
@@ -2183,6 +2223,7 @@
"sdkHarnessContainerImages": [ # Set of SDK harness containers needed to execute this pipeline. This will only be set in the Fn API path. For non-cross-language pipelines this should have only one entry. Cross-language pipelines will have two or more entries.
{ # Defines a SDK harness container for executing Dataflow pipelines.
"containerImage": "A String", # A docker container image that resides in Google Container Registry.
+ "environmentId": "A String", # Environment ID for the Beam runner API proto Environment that corresponds to the current SDK Harness.
"useSingleCorePerContainer": True or False, # If true, recommends the Dataflow service to use only one core per SDK container instance with this image. If false (or unset) recommends using more than one core per SDK container instance with this image for efficiency. Note that Dataflow service may choose to override this property if needed.
},
],
@@ -2377,6 +2418,7 @@
"replaceJobId": "A String", # If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job.
"replacedByJobId": "A String", # If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job.
"requestedState": "A String", # The job's requested state. `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may also be used to directly set a job's requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if it has not already reached a terminal state.
+ "satisfiesPzs": True or False, # Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests.
"stageStates": [ # This field may be mutated by the Cloud Dataflow service; callers cannot mutate it.
{ # A message describing the state of a particular execution stage.
"currentStateTime": "A String", # The time at which the stage transitioned to this state.