chore: Update discovery artifacts (#1531)
## Deleted keys were detected in the following stable discovery artifacts:
storage v1 https://github.com/googleapis/google-api-python-client/commit/6117646c93e672eb34816b6db4d2b84c3c046071
## Discovery Artifact Change Summary:
feat(androidmanagement): update the api https://github.com/googleapis/google-api-python-client/commit/493de7636af575bec7e3d646c77d81a4278891e7
feat(composer): update the api https://github.com/googleapis/google-api-python-client/commit/827a98a27eb06dee06080e01edc1b9d1304bae67
feat(compute): update the api https://github.com/googleapis/google-api-python-client/commit/967d539cb9dcccfe2eea8fd81e05989f1bd92975
feat(contactcenterinsights): update the api https://github.com/googleapis/google-api-python-client/commit/fd55971dcc7913faa7c90614e1b44122da9f3c1d
feat(containeranalysis): update the api https://github.com/googleapis/google-api-python-client/commit/be52e3f77f0900ea3369a3f1145702832ea2167a
feat(content): update the api https://github.com/googleapis/google-api-python-client/commit/c422dda8dc607554e34899c964c36b32c554bb61
feat(dataflow): update the api https://github.com/googleapis/google-api-python-client/commit/9357bc2b4b507ba98fd17988eb93e0c08da00bc3
feat(datastore): update the api https://github.com/googleapis/google-api-python-client/commit/ee1091a834aaf37e6b2a279f901543d43152da74
feat(documentai): update the api https://github.com/googleapis/google-api-python-client/commit/02e062eb95ebadf2f8002c34424a7442d327c765
feat(healthcare): update the api https://github.com/googleapis/google-api-python-client/commit/29bd379b11ee39b49d7452f0e9d7aada1536a22f
feat(notebooks): update the api https://github.com/googleapis/google-api-python-client/commit/438b148616d638783b17bf7fe060cdb57a8bc473
feat(ondemandscanning): update the api https://github.com/googleapis/google-api-python-client/commit/8f732ecf65df8e7aa8ad58258ed5d5a0dfed62ea
feat(osconfig): update the api https://github.com/googleapis/google-api-python-client/commit/655a50711fb06b94a3b33a173611cc39cfb2553f
feat(pubsublite): update the api https://github.com/googleapis/google-api-python-client/commit/fc27fe7319f659032e2c3e9fe7be24224dca9fb6
feat(run): update the api https://github.com/googleapis/google-api-python-client/commit/de851d225affb67ba9819e9d4c81dc14bc95dcd1
feat(sasportal): update the api https://github.com/googleapis/google-api-python-client/commit/9e472d5f1b8f31708fd535a3a8575f0510dad5a7
feat(storage): update the api https://github.com/googleapis/google-api-python-client/commit/6117646c93e672eb34816b6db4d2b84c3c046071
feat(sts): update the api https://github.com/googleapis/google-api-python-client/commit/9e0f476952df90e2fb9b6df287c2ceb2a5417c84
feat(youtube): update the api https://github.com/googleapis/google-api-python-client/commit/2624f80fe82466181d853c35138e04064b1edcef
diff --git a/docs/dyn/notebooks_v1.projects.locations.executions.html b/docs/dyn/notebooks_v1.projects.locations.executions.html
index 8f1dc62..d997ba0 100644
--- a/docs/dyn/notebooks_v1.projects.locations.executions.html
+++ b/docs/dyn/notebooks_v1.projects.locations.executions.html
@@ -79,7 +79,7 @@
<p class="firstline">Close httplib2 connections.</p>
<p class="toc_element">
<code><a href="#create">create(parent, body=None, executionId=None, x__xgafv=None)</a></code></p>
-<p class="firstline">Creates a new Scheduled Notebook in a given project and location.</p>
+<p class="firstline">Creates a new Execution in a given project and location.</p>
<p class="toc_element">
<code><a href="#delete">delete(name, x__xgafv=None)</a></code></p>
<p class="firstline">Deletes execution</p>
@@ -100,7 +100,7 @@
<div class="method">
<code class="details" id="create">create(parent, body=None, executionId=None, x__xgafv=None)</code>
- <pre>Creates a new Scheduled Notebook in a given project and location.
+ <pre>Creates a new Execution in a given project and location.
Args:
parent: string, Required. Format: `parent=projects/{project_id}/locations/{location}` (required)
@@ -112,7 +112,7 @@
"description": "A String", # A brief description of this execution.
"displayName": "A String", # Output only. Name used for UI purposes. Name can only contain alphanumeric characters and underscores '_'.
"executionTemplate": { # The description a notebook execution workload. # execute metadata including name, hardware spec, region, labels, etc.
- "acceleratorConfig": { # Definition of a hardware accelerator. Note that not all combinations of `type` and `core_count` are valid. Check GPUs on Compute Engine to find a valid combination. TPUs are not supported. # Configuration (count and accelerator type) for hardware running notebook execution.
+ "acceleratorConfig": { # Definition of a hardware accelerator. Note that not all combinations of `type` and `core_count` are valid. Check [GPUs on Compute Engine](https://cloud.google.com/compute/docs/gpus) to find a valid combination. TPUs are not supported. # Configuration (count and accelerator type) for hardware running notebook execution.
"coreCount": "A String", # Count of cores of this accelerator.
"type": "A String", # Type of this accelerator.
},
@@ -120,20 +120,23 @@
"dataprocParameters": { # Parameters used in Dataproc JobType executions. # Parameters used in Dataproc JobType executions.
"cluster": "A String", # URI for cluster used to run Dataproc execution. Format: 'projects/{PROJECT_ID}/regions/{REGION}/clusters/{CLUSTER_NAME}
},
- "inputNotebookFile": "A String", # Path to the notebook file to execute. Must be in a Google Cloud Storage bucket. Format: gs://{project_id}/{folder}/{notebook_file_name} Ex: gs://notebook_user/scheduled_notebooks/sentiment_notebook.ipynb
+ "inputNotebookFile": "A String", # Path to the notebook file to execute. Must be in a Google Cloud Storage bucket. Format: gs://{bucket_name}/{folder}/{notebook_file_name} Ex: gs://notebook_user/scheduled_notebooks/sentiment_notebook.ipynb
"jobType": "A String", # The type of Job to be used on this execution.
"labels": { # Labels for execution. If execution is scheduled, a field included will be 'nbs-scheduled'. Otherwise, it is an immediate execution, and an included field will be 'nbs-immediate'. Use fields to efficiently index between various types of executions.
"a_key": "A String",
},
- "masterType": "A String", # Specifies the type of virtual machine to use for your training job's master worker. You must specify this field when `scaleTier` is set to `CUSTOM`. You can use certain Compute Engine machine types directly in this field. The following types are supported: - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` - `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` - `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` - `n1-highcpu-64` - `n1-highcpu-96` Alternatively, you can use the following legacy machine types: - `standard` - `large_model` - `complex_model_s` - `complex_model_m` - `complex_model_l` - `standard_gpu` - `complex_model_m_gpu` - `complex_model_l_gpu` - `standard_p100` - `complex_model_m_p100` - `standard_v100` - `large_model_v100` - `complex_model_m_v100` - `complex_model_l_v100` Finally, if you want to use a TPU for training, specify `cloud_tpu` in this field. Learn more about the [special configuration options for training with TPU.
- "outputNotebookFolder": "A String", # Path to the notebook folder to write to. Must be in a Google Cloud Storage bucket path. Format: gs://{project_id}/{folder} Ex: gs://notebook_user/scheduled_notebooks
+ "masterType": "A String", # Specifies the type of virtual machine to use for your training job's master worker. You must specify this field when `scaleTier` is set to `CUSTOM`. You can use certain Compute Engine machine types directly in this field. The following types are supported: - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` - `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` - `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` - `n1-highcpu-64` - `n1-highcpu-96` Alternatively, you can use the following legacy machine types: - `standard` - `large_model` - `complex_model_s` - `complex_model_m` - `complex_model_l` - `standard_gpu` - `complex_model_m_gpu` - `complex_model_l_gpu` - `standard_p100` - `complex_model_m_p100` - `standard_v100` - `large_model_v100` - `complex_model_m_v100` - `complex_model_l_v100` Finally, if you want to use a TPU for training, specify `cloud_tpu` in this field. Learn more about the [special configuration options for training with TPU](https://cloud.google.com/ai-platform/training/docs/using-tpus#configuring_a_custom_tpu_machine).
+ "outputNotebookFolder": "A String", # Path to the notebook folder to write to. Must be in a Google Cloud Storage bucket path. Format: gs://{bucket_name}/{folder} Ex: gs://notebook_user/scheduled_notebooks
"parameters": "A String", # Parameters used within the 'input_notebook_file' notebook.
"paramsYamlFile": "A String", # Parameters to be overridden in the notebook during execution. Ref https://papermill.readthedocs.io/en/latest/usage-parameterize.html on how to specifying parameters in the input notebook and pass them here in an YAML file. Ex: gs://notebook_user/scheduled_notebooks/sentiment_notebook_params.yaml
"scaleTier": "A String", # Required. Scale tier of the hardware used for notebook execution. DEPRECATED Will be discontinued. As right now only CUSTOM is supported.
"serviceAccount": "A String", # The email address of a service account to use when running the execution. You must have the `iam.serviceAccounts.actAs` permission for the specified service account.
+ "vertexAiParameters": { # Parameters used in Vertex AI JobType executions. # Parameters used in Vertex AI JobType executions.
+ "network": "A String", # The full name of the Compute Engine [network](/compute/docs/networks-and-firewalls#networks) to which the Job should be peered. For example, `projects/12345/global/networks/myVPC`. [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert) is of the form `projects/{project}/global/networks/{network}`. Where {project} is a project number, as in `12345`, and {network} is a network name. Private services access must already be configured for the network. If left unspecified, the job is not peered with any network.
+ },
},
"jobUri": "A String", # Output only. The URI of the external job used to execute the notebook.
- "name": "A String", # Output only. The resource name of the execute. Format: `projects/{project_id}/locations/{location}/execution/{execution_id}
+ "name": "A String", # Output only. The resource name of the execute. Format: `projects/{project_id}/locations/{location}/executions/{execution_id}`
"outputNotebookFile": "A String", # Output notebook file generated by this execution
"state": "A String", # Output only. State of the underlying AI Platform job.
"updateTime": "A String", # Output only. Time the Execution was last updated.
@@ -223,7 +226,7 @@
"description": "A String", # A brief description of this execution.
"displayName": "A String", # Output only. Name used for UI purposes. Name can only contain alphanumeric characters and underscores '_'.
"executionTemplate": { # The description a notebook execution workload. # execute metadata including name, hardware spec, region, labels, etc.
- "acceleratorConfig": { # Definition of a hardware accelerator. Note that not all combinations of `type` and `core_count` are valid. Check GPUs on Compute Engine to find a valid combination. TPUs are not supported. # Configuration (count and accelerator type) for hardware running notebook execution.
+ "acceleratorConfig": { # Definition of a hardware accelerator. Note that not all combinations of `type` and `core_count` are valid. Check [GPUs on Compute Engine](https://cloud.google.com/compute/docs/gpus) to find a valid combination. TPUs are not supported. # Configuration (count and accelerator type) for hardware running notebook execution.
"coreCount": "A String", # Count of cores of this accelerator.
"type": "A String", # Type of this accelerator.
},
@@ -231,20 +234,23 @@
"dataprocParameters": { # Parameters used in Dataproc JobType executions. # Parameters used in Dataproc JobType executions.
"cluster": "A String", # URI for cluster used to run Dataproc execution. Format: 'projects/{PROJECT_ID}/regions/{REGION}/clusters/{CLUSTER_NAME}
},
- "inputNotebookFile": "A String", # Path to the notebook file to execute. Must be in a Google Cloud Storage bucket. Format: gs://{project_id}/{folder}/{notebook_file_name} Ex: gs://notebook_user/scheduled_notebooks/sentiment_notebook.ipynb
+ "inputNotebookFile": "A String", # Path to the notebook file to execute. Must be in a Google Cloud Storage bucket. Format: gs://{bucket_name}/{folder}/{notebook_file_name} Ex: gs://notebook_user/scheduled_notebooks/sentiment_notebook.ipynb
"jobType": "A String", # The type of Job to be used on this execution.
"labels": { # Labels for execution. If execution is scheduled, a field included will be 'nbs-scheduled'. Otherwise, it is an immediate execution, and an included field will be 'nbs-immediate'. Use fields to efficiently index between various types of executions.
"a_key": "A String",
},
- "masterType": "A String", # Specifies the type of virtual machine to use for your training job's master worker. You must specify this field when `scaleTier` is set to `CUSTOM`. You can use certain Compute Engine machine types directly in this field. The following types are supported: - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` - `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` - `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` - `n1-highcpu-64` - `n1-highcpu-96` Alternatively, you can use the following legacy machine types: - `standard` - `large_model` - `complex_model_s` - `complex_model_m` - `complex_model_l` - `standard_gpu` - `complex_model_m_gpu` - `complex_model_l_gpu` - `standard_p100` - `complex_model_m_p100` - `standard_v100` - `large_model_v100` - `complex_model_m_v100` - `complex_model_l_v100` Finally, if you want to use a TPU for training, specify `cloud_tpu` in this field. Learn more about the [special configuration options for training with TPU.
- "outputNotebookFolder": "A String", # Path to the notebook folder to write to. Must be in a Google Cloud Storage bucket path. Format: gs://{project_id}/{folder} Ex: gs://notebook_user/scheduled_notebooks
+ "masterType": "A String", # Specifies the type of virtual machine to use for your training job's master worker. You must specify this field when `scaleTier` is set to `CUSTOM`. You can use certain Compute Engine machine types directly in this field. The following types are supported: - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` - `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` - `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` - `n1-highcpu-64` - `n1-highcpu-96` Alternatively, you can use the following legacy machine types: - `standard` - `large_model` - `complex_model_s` - `complex_model_m` - `complex_model_l` - `standard_gpu` - `complex_model_m_gpu` - `complex_model_l_gpu` - `standard_p100` - `complex_model_m_p100` - `standard_v100` - `large_model_v100` - `complex_model_m_v100` - `complex_model_l_v100` Finally, if you want to use a TPU for training, specify `cloud_tpu` in this field. Learn more about the [special configuration options for training with TPU](https://cloud.google.com/ai-platform/training/docs/using-tpus#configuring_a_custom_tpu_machine).
+ "outputNotebookFolder": "A String", # Path to the notebook folder to write to. Must be in a Google Cloud Storage bucket path. Format: gs://{bucket_name}/{folder} Ex: gs://notebook_user/scheduled_notebooks
"parameters": "A String", # Parameters used within the 'input_notebook_file' notebook.
"paramsYamlFile": "A String", # Parameters to be overridden in the notebook during execution. Ref https://papermill.readthedocs.io/en/latest/usage-parameterize.html on how to specifying parameters in the input notebook and pass them here in an YAML file. Ex: gs://notebook_user/scheduled_notebooks/sentiment_notebook_params.yaml
"scaleTier": "A String", # Required. Scale tier of the hardware used for notebook execution. DEPRECATED Will be discontinued. As right now only CUSTOM is supported.
"serviceAccount": "A String", # The email address of a service account to use when running the execution. You must have the `iam.serviceAccounts.actAs` permission for the specified service account.
+ "vertexAiParameters": { # Parameters used in Vertex AI JobType executions. # Parameters used in Vertex AI JobType executions.
+ "network": "A String", # The full name of the Compute Engine [network](/compute/docs/networks-and-firewalls#networks) to which the Job should be peered. For example, `projects/12345/global/networks/myVPC`. [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert) is of the form `projects/{project}/global/networks/{network}`. Where {project} is a project number, as in `12345`, and {network} is a network name. Private services access must already be configured for the network. If left unspecified, the job is not peered with any network.
+ },
},
"jobUri": "A String", # Output only. The URI of the external job used to execute the notebook.
- "name": "A String", # Output only. The resource name of the execute. Format: `projects/{project_id}/locations/{location}/execution/{execution_id}
+ "name": "A String", # Output only. The resource name of the execute. Format: `projects/{project_id}/locations/{location}/executions/{execution_id}`
"outputNotebookFile": "A String", # Output notebook file generated by this execution
"state": "A String", # Output only. State of the underlying AI Platform job.
"updateTime": "A String", # Output only. Time the Execution was last updated.
@@ -276,7 +282,7 @@
"description": "A String", # A brief description of this execution.
"displayName": "A String", # Output only. Name used for UI purposes. Name can only contain alphanumeric characters and underscores '_'.
"executionTemplate": { # The description a notebook execution workload. # execute metadata including name, hardware spec, region, labels, etc.
- "acceleratorConfig": { # Definition of a hardware accelerator. Note that not all combinations of `type` and `core_count` are valid. Check GPUs on Compute Engine to find a valid combination. TPUs are not supported. # Configuration (count and accelerator type) for hardware running notebook execution.
+ "acceleratorConfig": { # Definition of a hardware accelerator. Note that not all combinations of `type` and `core_count` are valid. Check [GPUs on Compute Engine](https://cloud.google.com/compute/docs/gpus) to find a valid combination. TPUs are not supported. # Configuration (count and accelerator type) for hardware running notebook execution.
"coreCount": "A String", # Count of cores of this accelerator.
"type": "A String", # Type of this accelerator.
},
@@ -284,20 +290,23 @@
"dataprocParameters": { # Parameters used in Dataproc JobType executions. # Parameters used in Dataproc JobType executions.
"cluster": "A String", # URI for cluster used to run Dataproc execution. Format: 'projects/{PROJECT_ID}/regions/{REGION}/clusters/{CLUSTER_NAME}
},
- "inputNotebookFile": "A String", # Path to the notebook file to execute. Must be in a Google Cloud Storage bucket. Format: gs://{project_id}/{folder}/{notebook_file_name} Ex: gs://notebook_user/scheduled_notebooks/sentiment_notebook.ipynb
+ "inputNotebookFile": "A String", # Path to the notebook file to execute. Must be in a Google Cloud Storage bucket. Format: gs://{bucket_name}/{folder}/{notebook_file_name} Ex: gs://notebook_user/scheduled_notebooks/sentiment_notebook.ipynb
"jobType": "A String", # The type of Job to be used on this execution.
"labels": { # Labels for execution. If execution is scheduled, a field included will be 'nbs-scheduled'. Otherwise, it is an immediate execution, and an included field will be 'nbs-immediate'. Use fields to efficiently index between various types of executions.
"a_key": "A String",
},
- "masterType": "A String", # Specifies the type of virtual machine to use for your training job's master worker. You must specify this field when `scaleTier` is set to `CUSTOM`. You can use certain Compute Engine machine types directly in this field. The following types are supported: - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` - `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` - `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` - `n1-highcpu-64` - `n1-highcpu-96` Alternatively, you can use the following legacy machine types: - `standard` - `large_model` - `complex_model_s` - `complex_model_m` - `complex_model_l` - `standard_gpu` - `complex_model_m_gpu` - `complex_model_l_gpu` - `standard_p100` - `complex_model_m_p100` - `standard_v100` - `large_model_v100` - `complex_model_m_v100` - `complex_model_l_v100` Finally, if you want to use a TPU for training, specify `cloud_tpu` in this field. Learn more about the [special configuration options for training with TPU.
- "outputNotebookFolder": "A String", # Path to the notebook folder to write to. Must be in a Google Cloud Storage bucket path. Format: gs://{project_id}/{folder} Ex: gs://notebook_user/scheduled_notebooks
+ "masterType": "A String", # Specifies the type of virtual machine to use for your training job's master worker. You must specify this field when `scaleTier` is set to `CUSTOM`. You can use certain Compute Engine machine types directly in this field. The following types are supported: - `n1-standard-4` - `n1-standard-8` - `n1-standard-16` - `n1-standard-32` - `n1-standard-64` - `n1-standard-96` - `n1-highmem-2` - `n1-highmem-4` - `n1-highmem-8` - `n1-highmem-16` - `n1-highmem-32` - `n1-highmem-64` - `n1-highmem-96` - `n1-highcpu-16` - `n1-highcpu-32` - `n1-highcpu-64` - `n1-highcpu-96` Alternatively, you can use the following legacy machine types: - `standard` - `large_model` - `complex_model_s` - `complex_model_m` - `complex_model_l` - `standard_gpu` - `complex_model_m_gpu` - `complex_model_l_gpu` - `standard_p100` - `complex_model_m_p100` - `standard_v100` - `large_model_v100` - `complex_model_m_v100` - `complex_model_l_v100` Finally, if you want to use a TPU for training, specify `cloud_tpu` in this field. Learn more about the [special configuration options for training with TPU](https://cloud.google.com/ai-platform/training/docs/using-tpus#configuring_a_custom_tpu_machine).
+ "outputNotebookFolder": "A String", # Path to the notebook folder to write to. Must be in a Google Cloud Storage bucket path. Format: gs://{bucket_name}/{folder} Ex: gs://notebook_user/scheduled_notebooks
"parameters": "A String", # Parameters used within the 'input_notebook_file' notebook.
"paramsYamlFile": "A String", # Parameters to be overridden in the notebook during execution. Ref https://papermill.readthedocs.io/en/latest/usage-parameterize.html on how to specifying parameters in the input notebook and pass them here in an YAML file. Ex: gs://notebook_user/scheduled_notebooks/sentiment_notebook_params.yaml
"scaleTier": "A String", # Required. Scale tier of the hardware used for notebook execution. DEPRECATED Will be discontinued. As right now only CUSTOM is supported.
"serviceAccount": "A String", # The email address of a service account to use when running the execution. You must have the `iam.serviceAccounts.actAs` permission for the specified service account.
+ "vertexAiParameters": { # Parameters used in Vertex AI JobType executions. # Parameters used in Vertex AI JobType executions.
+ "network": "A String", # The full name of the Compute Engine [network](/compute/docs/networks-and-firewalls#networks) to which the Job should be peered. For example, `projects/12345/global/networks/myVPC`. [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert) is of the form `projects/{project}/global/networks/{network}`. Where {project} is a project number, as in `12345`, and {network} is a network name. Private services access must already be configured for the network. If left unspecified, the job is not peered with any network.
+ },
},
"jobUri": "A String", # Output only. The URI of the external job used to execute the notebook.
- "name": "A String", # Output only. The resource name of the execute. Format: `projects/{project_id}/locations/{location}/execution/{execution_id}
+ "name": "A String", # Output only. The resource name of the execute. Format: `projects/{project_id}/locations/{location}/executions/{execution_id}`
"outputNotebookFile": "A String", # Output notebook file generated by this execution
"state": "A String", # Output only. State of the underlying AI Platform job.
"updateTime": "A String", # Output only. Time the Execution was last updated.