docs: update generated docs (#981)

diff --git a/docs/dyn/genomics_v1alpha2.pipelines.html b/docs/dyn/genomics_v1alpha2.pipelines.html
index ec8a23e..3239542 100644
--- a/docs/dyn/genomics_v1alpha2.pipelines.html
+++ b/docs/dyn/genomics_v1alpha2.pipelines.html
@@ -84,10 +84,10 @@
   <code><a href="#get">get(pipelineId, x__xgafv=None)</a></code></p>
 <p class="firstline">Retrieves a pipeline based on ID.</p>
 <p class="toc_element">
-  <code><a href="#getControllerConfig">getControllerConfig(operationId=None, validationToken=None, x__xgafv=None)</a></code></p>
+  <code><a href="#getControllerConfig">getControllerConfig(validationToken=None, operationId=None, x__xgafv=None)</a></code></p>
 <p class="firstline">Gets controller configuration information. Should only be called</p>
 <p class="toc_element">
-  <code><a href="#list">list(pageToken=None, pageSize=None, projectId=None, namePrefix=None, x__xgafv=None)</a></code></p>
+  <code><a href="#list">list(namePrefix=None, pageSize=None, projectId=None, pageToken=None, x__xgafv=None)</a></code></p>
 <p class="firstline">Lists pipelines.</p>
 <p class="toc_element">
   <code><a href="#list_next">list_next(previous_request, previous_response)</a></code></p>
@@ -113,475 +113,86 @@
     The object takes the form of:
 
 { # The pipeline object. Represents a transformation from a set of input
-    # parameters to a set of output parameters. The transformation is defined
-    # as a docker image and command to run within that image. Each pipeline
-    # is run on a Google Compute Engine VM. A pipeline can be created with the
-    # `create` method and then later run with the `run` method, or a pipeline can
-    # be defined and run all at once with the `run` method.
-  &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
-      # This name can be used for filtering Pipelines in ListPipelines.
-  &quot;pipelineId&quot;: &quot;A String&quot;, # Unique pipeline id that is generated by the service when CreatePipeline
-      # is called. Cannot be specified in the Pipeline used in the
-      # CreatePipelineRequest, and will be populated in the response to
-      # CreatePipeline and all subsequent Get and List calls. Indicates that the
-      # service has registered this pipeline.
-  &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
-      # WRITE access.
-  &quot;outputParameters&quot;: [ # Output parameters of the pipeline.
-    { # Parameters facilitate setting and delivering data into the
-        # pipeline&#x27;s execution environment. They are defined at create time,
-        # with optional defaults, and can be overridden at run time.
-        #
-        # If `localCopy` is unset, then the parameter specifies a string that
-        # is passed as-is into the pipeline, as the value of the environment
-        # variable with the given name.  A default value can be optionally
-        # specified at create time. The default can be overridden at run time
-        # using the inputs map. If no default is given, a value must be
-        # supplied at runtime.
-        #
-        # If `localCopy` is defined, then the parameter specifies a data
-        # source or sink, both in Google Cloud Storage and on the Docker container
-        # where the pipeline computation is run. The service account associated with
-        # the Pipeline (by
-        # default the project&#x27;s Compute Engine service account) must have access to the
-        # Google Cloud Storage paths.
-        #
-        # At run time, the Google Cloud Storage paths can be overridden if a default
-        # was provided at create time, or must be set otherwise. The pipeline runner
-        # should add a key/value pair to either the inputs or outputs map. The
-        # indicated data copies will be carried out before/after pipeline execution,
-        # just as if the corresponding arguments were provided to `gsutil cp`.
-        #
-        # For example: Given the following `PipelineParameter`, specified
-        # in the `inputParameters` list:
-        #
-        # ```
-        # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
-        # ```
-        #
-        # where `disk` is defined in the `PipelineResources` object as:
-        #
-        # ```
-        # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
-        # ```
-        #
-        # We create a disk named `pd1`, mount it on the host VM, and map
-        # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
-        # runtime, an entry for `input_file` would be required in the inputs
-        # map, such as:
-        #
-        # ```
-        #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
-        # ```
-        #
-        # This would generate the following gsutil call:
-        #
-        # ```
-        #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
-        # ```
-        #
-        # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
-        # Docker container. Acceptable paths are:
-        #
-        # &lt;table&gt;
-        #   &lt;thead&gt;
-        #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
-        #   &lt;/thead&gt;
-        #   &lt;tbody&gt;
-        #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-        #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
-        #   &lt;/tbody&gt;
-        # &lt;/table&gt;
-        #
-        # For outputs, the direction of the copy is reversed:
-        #
-        # ```
-        #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
-        # ```
-        #
-        # Acceptable paths are:
-        #
-        # &lt;table&gt;
-        #   &lt;thead&gt;
-        #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
-        #   &lt;/thead&gt;
-        #   &lt;tbody&gt;
-        #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-        #     &lt;tr&gt;
-        #       &lt;td&gt;file&lt;/td&gt;
-        #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
-        #     &lt;/tr&gt;
-        #     &lt;tr&gt;
-        #       &lt;td&gt;glob&lt;/td&gt;
-        #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
-        #   &lt;/tbody&gt;
-        # &lt;/table&gt;
-        #
-        # One restriction due to docker limitations, is that for outputs that are found
-        # on the boot disk, the local path cannot be a glob and must be a file.
-      &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-          # If `localCopy` is present, then this must be a Google Cloud Storage path
-          # beginning with `gs://`.
-      &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-          # as the key to the input and output maps in RunPipeline.
-      &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
-      &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
-          # `LocalCopy` indicates where on the VM the file should be. The value
-          # given to this parameter (either at runtime or using `defaultValue`)
-          # must be the remote path where the file should be.
-        &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
-            # located. Can be the name of one of the disks specified in the
-            # Resources field, or &quot;boot&quot;, which represents the Docker
-            # instance&#x27;s boot disk and has a mount point of `/`.
-        &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
-            # this input should be localized to and from, relative to the specified
-            # disk&#x27;s mount point. For example: file.txt,
-      },
-    },
-  ],
-  &quot;docker&quot;: { # The Docker execuctor specification. # Specifies the docker run information.
-    &quot;cmd&quot;: &quot;A String&quot;, # Required. The command or newline delimited script to run. The command
-        # string will be executed within a bash shell.
-        #
-        # If the command exits with a non-zero exit code, output parameter
-        # de-localization will be skipped and the pipeline operation&#x27;s
-        # `error` field will be populated.
-        #
-        # Maximum command string length is 16384.
-    &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
-        # Users that run pipelines must have READ access to the image.
-  },
-  &quot;description&quot;: &quot;A String&quot;, # User-specified description.
-  &quot;inputParameters&quot;: [ # Input parameters of the pipeline.
-    { # Parameters facilitate setting and delivering data into the
-        # pipeline&#x27;s execution environment. They are defined at create time,
-        # with optional defaults, and can be overridden at run time.
-        #
-        # If `localCopy` is unset, then the parameter specifies a string that
-        # is passed as-is into the pipeline, as the value of the environment
-        # variable with the given name.  A default value can be optionally
-        # specified at create time. The default can be overridden at run time
-        # using the inputs map. If no default is given, a value must be
-        # supplied at runtime.
-        #
-        # If `localCopy` is defined, then the parameter specifies a data
-        # source or sink, both in Google Cloud Storage and on the Docker container
-        # where the pipeline computation is run. The service account associated with
-        # the Pipeline (by
-        # default the project&#x27;s Compute Engine service account) must have access to the
-        # Google Cloud Storage paths.
-        #
-        # At run time, the Google Cloud Storage paths can be overridden if a default
-        # was provided at create time, or must be set otherwise. The pipeline runner
-        # should add a key/value pair to either the inputs or outputs map. The
-        # indicated data copies will be carried out before/after pipeline execution,
-        # just as if the corresponding arguments were provided to `gsutil cp`.
-        #
-        # For example: Given the following `PipelineParameter`, specified
-        # in the `inputParameters` list:
-        #
-        # ```
-        # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
-        # ```
-        #
-        # where `disk` is defined in the `PipelineResources` object as:
-        #
-        # ```
-        # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
-        # ```
-        #
-        # We create a disk named `pd1`, mount it on the host VM, and map
-        # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
-        # runtime, an entry for `input_file` would be required in the inputs
-        # map, such as:
-        #
-        # ```
-        #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
-        # ```
-        #
-        # This would generate the following gsutil call:
-        #
-        # ```
-        #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
-        # ```
-        #
-        # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
-        # Docker container. Acceptable paths are:
-        #
-        # &lt;table&gt;
-        #   &lt;thead&gt;
-        #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
-        #   &lt;/thead&gt;
-        #   &lt;tbody&gt;
-        #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-        #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
-        #   &lt;/tbody&gt;
-        # &lt;/table&gt;
-        #
-        # For outputs, the direction of the copy is reversed:
-        #
-        # ```
-        #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
-        # ```
-        #
-        # Acceptable paths are:
-        #
-        # &lt;table&gt;
-        #   &lt;thead&gt;
-        #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
-        #   &lt;/thead&gt;
-        #   &lt;tbody&gt;
-        #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-        #     &lt;tr&gt;
-        #       &lt;td&gt;file&lt;/td&gt;
-        #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
-        #     &lt;/tr&gt;
-        #     &lt;tr&gt;
-        #       &lt;td&gt;glob&lt;/td&gt;
-        #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
-        #   &lt;/tbody&gt;
-        # &lt;/table&gt;
-        #
-        # One restriction due to docker limitations, is that for outputs that are found
-        # on the boot disk, the local path cannot be a glob and must be a file.
-      &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-          # If `localCopy` is present, then this must be a Google Cloud Storage path
-          # beginning with `gs://`.
-      &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-          # as the key to the input and output maps in RunPipeline.
-      &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
-      &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
-          # `LocalCopy` indicates where on the VM the file should be. The value
-          # given to this parameter (either at runtime or using `defaultValue`)
-          # must be the remote path where the file should be.
-        &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
-            # located. Can be the name of one of the disks specified in the
-            # Resources field, or &quot;boot&quot;, which represents the Docker
-            # instance&#x27;s boot disk and has a mount point of `/`.
-        &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
-            # this input should be localized to and from, relative to the specified
-            # disk&#x27;s mount point. For example: file.txt,
-      },
-    },
-  ],
-  &quot;resources&quot;: { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
-      # Required fields:
-      # 
-      # *
-      # minimumCpuCores
-      # 
-      # *
-      # minimumRamGb
-    &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
-    &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
-        # must be true for both create time and run time. Cannot be true at run time
-        # if false at create time.
-    &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
-        # creation will restricted. If empty, any zone may be chosen.
-      &quot;A String&quot;,
-    ],
-    &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
-        # By specifying this parameter, you will download and install the following
-        # third-party software onto your managed Compute Engine instances:
-        # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
-    &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
-        # By specifying this parameter, you will download and install the following
-        # third-party software onto your managed Compute Engine instances: NVIDIA®
-        # Tesla® drivers and NVIDIA® CUDA toolkit.
-        # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
-        # available accelerator types.
-    &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
-    &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
-        # feature that may go away. Defaults to false.
-        # Corresponds to `--no_address` flag for [gcloud compute instances create]
-        # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
-        # In order to use this, must be true for both create time and run time.
-        # Cannot be true at run time if false at create time. If you need to ssh into
-        # a private IP VM for debugging, you can ssh to a public VM and then ssh into
-        # the private VM&#x27;s Internal IP.  If noAddress is set, this pipeline run may
-        # only load docker images from Google Container Registry and not Docker Hub.
-        # Before using this, you must
-        # [configure access to Google services from internal
-        # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
-    &quot;disks&quot;: [ # Disks to attach.
-      { # A Google Compute Engine disk resource specification.
-        &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
-            # parameters. Must be 1 - 63 characters.
-            # The name &quot;boot&quot; is reserved for system use.
-        &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
-        &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
-            # of the pipeline run, regardless of what this field is set to.
-        &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
-            # This field is not applicable for local SSD.
-        &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
-            # Specifies the path in the docker container where files on
-            # this disk should be located. For example, if `mountPoint`
-            # is `/mnt/disk`, and the parameter has `localPath`
-            # `inputs/file.txt`, the docker container can access the data at
-            # `/mnt/disk/inputs/file.txt`.
-        &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
-            # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
-            # for more details.
-            # Can only be set at create time.
-        &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
-            # https://cloud.google.com/compute/docs/reference/latest/instances#resource
-            # and
-            # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
-            # for more details.
-      },
-    ],
-    &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
-  },
-}
-
-  x__xgafv: string, V1 error format.
-    Allowed values
-      1 - v1 error format
-      2 - v2 error format
-
-Returns:
-  An object of the form:
-
-    { # The pipeline object. Represents a transformation from a set of input
       # parameters to a set of output parameters. The transformation is defined
       # as a docker image and command to run within that image. Each pipeline
       # is run on a Google Compute Engine VM. A pipeline can be created with the
       # `create` method and then later run with the `run` method, or a pipeline can
       # be defined and run all at once with the `run` method.
-    &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
-        # This name can be used for filtering Pipelines in ListPipelines.
+    &quot;resources&quot;: { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
+        # Required fields:
+        # 
+        # *
+        # minimumCpuCores
+        # 
+        # *
+        # minimumRamGb
+      &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
+          # must be true for both create time and run time. Cannot be true at run time
+          # if false at create time.
+      &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
+      &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
+          # By specifying this parameter, you will download and install the following
+          # third-party software onto your managed Compute Engine instances:
+          # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
+      &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
+          # feature that may go away. Defaults to false.
+          # Corresponds to `--no_address` flag for [gcloud compute instances create]
+          # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
+          # In order to use this, must be true for both create time and run time.
+          # Cannot be true at run time if false at create time. If you need to ssh into
+          # a private IP VM for debugging, you can ssh to a public VM and then ssh into
+          # the private VM&#x27;s Internal IP.  If noAddress is set, this pipeline run may
+          # only load docker images from Google Container Registry and not Docker Hub.
+          # Before using this, you must
+          # [configure access to Google services from internal
+          # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+      &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
+          # creation will restricted. If empty, any zone may be chosen.
+        &quot;A String&quot;,
+      ],
+      &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+      &quot;disks&quot;: [ # Disks to attach.
+        { # A Google Compute Engine disk resource specification.
+          &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
+              # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+              # and
+              # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+              # for more details.
+          &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
+              # Specifies the path in the docker container where files on
+              # this disk should be located. For example, if `mountPoint`
+              # is `/mnt/disk`, and the parameter has `localPath`
+              # `inputs/file.txt`, the docker container can access the data at
+              # `/mnt/disk/inputs/file.txt`.
+          &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+              # of the pipeline run, regardless of what this field is set to.
+          &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
+              # parameters. Must be 1 - 63 characters.
+              # The name &quot;boot&quot; is reserved for system use.
+          &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
+          &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
+              # This field is not applicable for local SSD.
+          &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
+              # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
+              # for more details.
+              # Can only be set at create time.
+        },
+      ],
+      &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
+          # By specifying this parameter, you will download and install the following
+          # third-party software onto your managed Compute Engine instances: NVIDIA®
+          # Tesla® drivers and NVIDIA® CUDA toolkit.
+          # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+          # available accelerator types.
+      &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
+    },
+    &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
+        # WRITE access.
     &quot;pipelineId&quot;: &quot;A String&quot;, # Unique pipeline id that is generated by the service when CreatePipeline
         # is called. Cannot be specified in the Pipeline used in the
         # CreatePipelineRequest, and will be populated in the response to
         # CreatePipeline and all subsequent Get and List calls. Indicates that the
         # service has registered this pipeline.
-    &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
-        # WRITE access.
-    &quot;outputParameters&quot;: [ # Output parameters of the pipeline.
-      { # Parameters facilitate setting and delivering data into the
-          # pipeline&#x27;s execution environment. They are defined at create time,
-          # with optional defaults, and can be overridden at run time.
-          #
-          # If `localCopy` is unset, then the parameter specifies a string that
-          # is passed as-is into the pipeline, as the value of the environment
-          # variable with the given name.  A default value can be optionally
-          # specified at create time. The default can be overridden at run time
-          # using the inputs map. If no default is given, a value must be
-          # supplied at runtime.
-          #
-          # If `localCopy` is defined, then the parameter specifies a data
-          # source or sink, both in Google Cloud Storage and on the Docker container
-          # where the pipeline computation is run. The service account associated with
-          # the Pipeline (by
-          # default the project&#x27;s Compute Engine service account) must have access to the
-          # Google Cloud Storage paths.
-          #
-          # At run time, the Google Cloud Storage paths can be overridden if a default
-          # was provided at create time, or must be set otherwise. The pipeline runner
-          # should add a key/value pair to either the inputs or outputs map. The
-          # indicated data copies will be carried out before/after pipeline execution,
-          # just as if the corresponding arguments were provided to `gsutil cp`.
-          #
-          # For example: Given the following `PipelineParameter`, specified
-          # in the `inputParameters` list:
-          #
-          # ```
-          # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
-          # ```
-          #
-          # where `disk` is defined in the `PipelineResources` object as:
-          #
-          # ```
-          # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
-          # ```
-          #
-          # We create a disk named `pd1`, mount it on the host VM, and map
-          # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
-          # runtime, an entry for `input_file` would be required in the inputs
-          # map, such as:
-          #
-          # ```
-          #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
-          # ```
-          #
-          # This would generate the following gsutil call:
-          #
-          # ```
-          #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
-          # ```
-          #
-          # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
-          # Docker container. Acceptable paths are:
-          #
-          # &lt;table&gt;
-          #   &lt;thead&gt;
-          #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
-          #   &lt;/thead&gt;
-          #   &lt;tbody&gt;
-          #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-          #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
-          #   &lt;/tbody&gt;
-          # &lt;/table&gt;
-          #
-          # For outputs, the direction of the copy is reversed:
-          #
-          # ```
-          #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
-          # ```
-          #
-          # Acceptable paths are:
-          #
-          # &lt;table&gt;
-          #   &lt;thead&gt;
-          #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
-          #   &lt;/thead&gt;
-          #   &lt;tbody&gt;
-          #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-          #     &lt;tr&gt;
-          #       &lt;td&gt;file&lt;/td&gt;
-          #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
-          #     &lt;/tr&gt;
-          #     &lt;tr&gt;
-          #       &lt;td&gt;glob&lt;/td&gt;
-          #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
-          #   &lt;/tbody&gt;
-          # &lt;/table&gt;
-          #
-          # One restriction due to docker limitations, is that for outputs that are found
-          # on the boot disk, the local path cannot be a glob and must be a file.
-        &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-            # If `localCopy` is present, then this must be a Google Cloud Storage path
-            # beginning with `gs://`.
-        &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-            # as the key to the input and output maps in RunPipeline.
-        &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
-        &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
-            # `LocalCopy` indicates where on the VM the file should be. The value
-            # given to this parameter (either at runtime or using `defaultValue`)
-            # must be the remote path where the file should be.
-          &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
-              # located. Can be the name of one of the disks specified in the
-              # Resources field, or &quot;boot&quot;, which represents the Docker
-              # instance&#x27;s boot disk and has a mount point of `/`.
-          &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
-              # this input should be localized to and from, relative to the specified
-              # disk&#x27;s mount point. For example: file.txt,
-        },
-      },
-    ],
-    &quot;docker&quot;: { # The Docker execuctor specification. # Specifies the docker run information.
-      &quot;cmd&quot;: &quot;A String&quot;, # Required. The command or newline delimited script to run. The command
-          # string will be executed within a bash shell.
-          #
-          # If the command exits with a non-zero exit code, output parameter
-          # de-localization will be skipped and the pipeline operation&#x27;s
-          # `error` field will be populated.
-          #
-          # Maximum command string length is 16384.
-      &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
-          # Users that run pipelines must have READ access to the image.
-    },
-    &quot;description&quot;: &quot;A String&quot;, # User-specified description.
     &quot;inputParameters&quot;: [ # Input parameters of the pipeline.
       { # Parameters facilitate setting and delivering data into the
           # pipeline&#x27;s execution environment. They are defined at create time,
@@ -674,12 +285,6 @@
           #
           # One restriction due to docker limitations, is that for outputs that are found
           # on the boot disk, the local path cannot be a glob and must be a file.
-        &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-            # If `localCopy` is present, then this must be a Google Cloud Storage path
-            # beginning with `gs://`.
-        &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-            # as the key to the input and output maps in RunPipeline.
-        &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
         &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
             # `LocalCopy` indicates where on the VM the file should be. The value
             # given to this parameter (either at runtime or using `defaultValue`)
@@ -692,140 +297,14 @@
               # this input should be localized to and from, relative to the specified
               # disk&#x27;s mount point. For example: file.txt,
         },
+        &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+        &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+            # If `localCopy` is present, then this must be a Google Cloud Storage path
+            # beginning with `gs://`.
+        &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+            # as the key to the input and output maps in RunPipeline.
       },
     ],
-    &quot;resources&quot;: { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
-        # Required fields:
-        #
-        # *
-        # minimumCpuCores
-        #
-        # *
-        # minimumRamGb
-      &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
-      &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
-          # must be true for both create time and run time. Cannot be true at run time
-          # if false at create time.
-      &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
-          # creation will restricted. If empty, any zone may be chosen.
-        &quot;A String&quot;,
-      ],
-      &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
-          # By specifying this parameter, you will download and install the following
-          # third-party software onto your managed Compute Engine instances:
-          # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
-      &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
-          # By specifying this parameter, you will download and install the following
-          # third-party software onto your managed Compute Engine instances: NVIDIA®
-          # Tesla® drivers and NVIDIA® CUDA toolkit.
-          # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
-          # available accelerator types.
-      &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
-      &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
-          # feature that may go away. Defaults to false.
-          # Corresponds to `--no_address` flag for [gcloud compute instances create]
-          # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
-          # In order to use this, must be true for both create time and run time.
-          # Cannot be true at run time if false at create time. If you need to ssh into
-          # a private IP VM for debugging, you can ssh to a public VM and then ssh into
-          # the private VM&#x27;s Internal IP.  If noAddress is set, this pipeline run may
-          # only load docker images from Google Container Registry and not Docker Hub.
-          # Before using this, you must
-          # [configure access to Google services from internal
-          # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
-      &quot;disks&quot;: [ # Disks to attach.
-        { # A Google Compute Engine disk resource specification.
-          &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
-              # parameters. Must be 1 - 63 characters.
-              # The name &quot;boot&quot; is reserved for system use.
-          &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
-          &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
-              # of the pipeline run, regardless of what this field is set to.
-          &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
-              # This field is not applicable for local SSD.
-          &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
-              # Specifies the path in the docker container where files on
-              # this disk should be located. For example, if `mountPoint`
-              # is `/mnt/disk`, and the parameter has `localPath`
-              # `inputs/file.txt`, the docker container can access the data at
-              # `/mnt/disk/inputs/file.txt`.
-          &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
-              # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
-              # for more details.
-              # Can only be set at create time.
-          &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
-              # https://cloud.google.com/compute/docs/reference/latest/instances#resource
-              # and
-              # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
-              # for more details.
-        },
-      ],
-      &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
-    },
-  }</pre>
-</div>
-
-<div class="method">
-    <code class="details" id="delete">delete(pipelineId, x__xgafv=None)</code>
-  <pre>Deletes a pipeline based on ID.
-
-Caller must have WRITE permission to the project.
-
-Args:
-  pipelineId: string, Caller must have WRITE access to the project in which this pipeline
-is defined. (required)
-  x__xgafv: string, V1 error format.
-    Allowed values
-      1 - v1 error format
-      2 - v2 error format
-
-Returns:
-  An object of the form:
-
-    { # A generic empty message that you can re-use to avoid defining duplicated
-      # empty messages in your APIs. A typical example is to use it as the request
-      # or the response type of an API method. For instance:
-      #
-      #     service Foo {
-      #       rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
-      #     }
-      #
-      # The JSON representation for `Empty` is empty JSON object `{}`.
-  }</pre>
-</div>
-
-<div class="method">
-    <code class="details" id="get">get(pipelineId, x__xgafv=None)</code>
-  <pre>Retrieves a pipeline based on ID.
-
-Caller must have READ permission to the project.
-
-Args:
-  pipelineId: string, Caller must have READ access to the project in which this pipeline
-is defined. (required)
-  x__xgafv: string, V1 error format.
-    Allowed values
-      1 - v1 error format
-      2 - v2 error format
-
-Returns:
-  An object of the form:
-
-    { # The pipeline object. Represents a transformation from a set of input
-      # parameters to a set of output parameters. The transformation is defined
-      # as a docker image and command to run within that image. Each pipeline
-      # is run on a Google Compute Engine VM. A pipeline can be created with the
-      # `create` method and then later run with the `run` method, or a pipeline can
-      # be defined and run all at once with the `run` method.
-    &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
-        # This name can be used for filtering Pipelines in ListPipelines.
-    &quot;pipelineId&quot;: &quot;A String&quot;, # Unique pipeline id that is generated by the service when CreatePipeline
-        # is called. Cannot be specified in the Pipeline used in the
-        # CreatePipelineRequest, and will be populated in the response to
-        # CreatePipeline and all subsequent Get and List calls. Indicates that the
-        # service has registered this pipeline.
-    &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
-        # WRITE access.
     &quot;outputParameters&quot;: [ # Output parameters of the pipeline.
       { # Parameters facilitate setting and delivering data into the
           # pipeline&#x27;s execution environment. They are defined at create time,
@@ -918,12 +397,6 @@
           #
           # One restriction due to docker limitations, is that for outputs that are found
           # on the boot disk, the local path cannot be a glob and must be a file.
-        &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-            # If `localCopy` is present, then this must be a Google Cloud Storage path
-            # beginning with `gs://`.
-        &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-            # as the key to the input and output maps in RunPipeline.
-        &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
         &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
             # `LocalCopy` indicates where on the VM the file should be. The value
             # given to this parameter (either at runtime or using `defaultValue`)
@@ -936,9 +409,18 @@
               # this input should be localized to and from, relative to the specified
               # disk&#x27;s mount point. For example: file.txt,
         },
+        &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+        &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+            # If `localCopy` is present, then this must be a Google Cloud Storage path
+            # beginning with `gs://`.
+        &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+            # as the key to the input and output maps in RunPipeline.
       },
     ],
+    &quot;description&quot;: &quot;A String&quot;, # User-specified description.
     &quot;docker&quot;: { # The Docker execuctor specification. # Specifies the docker run information.
+      &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
+          # Users that run pipelines must have READ access to the image.
       &quot;cmd&quot;: &quot;A String&quot;, # Required. The command or newline delimited script to run. The command
           # string will be executed within a bash shell.
           #
@@ -947,201 +429,11 @@
           # `error` field will be populated.
           #
           # Maximum command string length is 16384.
-      &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
-          # Users that run pipelines must have READ access to the image.
     },
-    &quot;description&quot;: &quot;A String&quot;, # User-specified description.
-    &quot;inputParameters&quot;: [ # Input parameters of the pipeline.
-      { # Parameters facilitate setting and delivering data into the
-          # pipeline&#x27;s execution environment. They are defined at create time,
-          # with optional defaults, and can be overridden at run time.
-          #
-          # If `localCopy` is unset, then the parameter specifies a string that
-          # is passed as-is into the pipeline, as the value of the environment
-          # variable with the given name.  A default value can be optionally
-          # specified at create time. The default can be overridden at run time
-          # using the inputs map. If no default is given, a value must be
-          # supplied at runtime.
-          #
-          # If `localCopy` is defined, then the parameter specifies a data
-          # source or sink, both in Google Cloud Storage and on the Docker container
-          # where the pipeline computation is run. The service account associated with
-          # the Pipeline (by
-          # default the project&#x27;s Compute Engine service account) must have access to the
-          # Google Cloud Storage paths.
-          #
-          # At run time, the Google Cloud Storage paths can be overridden if a default
-          # was provided at create time, or must be set otherwise. The pipeline runner
-          # should add a key/value pair to either the inputs or outputs map. The
-          # indicated data copies will be carried out before/after pipeline execution,
-          # just as if the corresponding arguments were provided to `gsutil cp`.
-          #
-          # For example: Given the following `PipelineParameter`, specified
-          # in the `inputParameters` list:
-          #
-          # ```
-          # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
-          # ```
-          #
-          # where `disk` is defined in the `PipelineResources` object as:
-          #
-          # ```
-          # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
-          # ```
-          #
-          # We create a disk named `pd1`, mount it on the host VM, and map
-          # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
-          # runtime, an entry for `input_file` would be required in the inputs
-          # map, such as:
-          #
-          # ```
-          #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
-          # ```
-          #
-          # This would generate the following gsutil call:
-          #
-          # ```
-          #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
-          # ```
-          #
-          # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
-          # Docker container. Acceptable paths are:
-          #
-          # &lt;table&gt;
-          #   &lt;thead&gt;
-          #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
-          #   &lt;/thead&gt;
-          #   &lt;tbody&gt;
-          #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-          #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
-          #   &lt;/tbody&gt;
-          # &lt;/table&gt;
-          #
-          # For outputs, the direction of the copy is reversed:
-          #
-          # ```
-          #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
-          # ```
-          #
-          # Acceptable paths are:
-          #
-          # &lt;table&gt;
-          #   &lt;thead&gt;
-          #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
-          #   &lt;/thead&gt;
-          #   &lt;tbody&gt;
-          #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-          #     &lt;tr&gt;
-          #       &lt;td&gt;file&lt;/td&gt;
-          #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
-          #     &lt;/tr&gt;
-          #     &lt;tr&gt;
-          #       &lt;td&gt;glob&lt;/td&gt;
-          #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
-          #   &lt;/tbody&gt;
-          # &lt;/table&gt;
-          #
-          # One restriction due to docker limitations, is that for outputs that are found
-          # on the boot disk, the local path cannot be a glob and must be a file.
-        &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-            # If `localCopy` is present, then this must be a Google Cloud Storage path
-            # beginning with `gs://`.
-        &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-            # as the key to the input and output maps in RunPipeline.
-        &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
-        &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
-            # `LocalCopy` indicates where on the VM the file should be. The value
-            # given to this parameter (either at runtime or using `defaultValue`)
-            # must be the remote path where the file should be.
-          &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
-              # located. Can be the name of one of the disks specified in the
-              # Resources field, or &quot;boot&quot;, which represents the Docker
-              # instance&#x27;s boot disk and has a mount point of `/`.
-          &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
-              # this input should be localized to and from, relative to the specified
-              # disk&#x27;s mount point. For example: file.txt,
-        },
-      },
-    ],
-    &quot;resources&quot;: { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
-        # Required fields:
-        #
-        # *
-        # minimumCpuCores
-        #
-        # *
-        # minimumRamGb
-      &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
-      &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
-          # must be true for both create time and run time. Cannot be true at run time
-          # if false at create time.
-      &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
-          # creation will restricted. If empty, any zone may be chosen.
-        &quot;A String&quot;,
-      ],
-      &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
-          # By specifying this parameter, you will download and install the following
-          # third-party software onto your managed Compute Engine instances:
-          # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
-      &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
-          # By specifying this parameter, you will download and install the following
-          # third-party software onto your managed Compute Engine instances: NVIDIA®
-          # Tesla® drivers and NVIDIA® CUDA toolkit.
-          # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
-          # available accelerator types.
-      &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
-      &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
-          # feature that may go away. Defaults to false.
-          # Corresponds to `--no_address` flag for [gcloud compute instances create]
-          # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
-          # In order to use this, must be true for both create time and run time.
-          # Cannot be true at run time if false at create time. If you need to ssh into
-          # a private IP VM for debugging, you can ssh to a public VM and then ssh into
-          # the private VM&#x27;s Internal IP.  If noAddress is set, this pipeline run may
-          # only load docker images from Google Container Registry and not Docker Hub.
-          # Before using this, you must
-          # [configure access to Google services from internal
-          # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
-      &quot;disks&quot;: [ # Disks to attach.
-        { # A Google Compute Engine disk resource specification.
-          &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
-              # parameters. Must be 1 - 63 characters.
-              # The name &quot;boot&quot; is reserved for system use.
-          &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
-          &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
-              # of the pipeline run, regardless of what this field is set to.
-          &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
-              # This field is not applicable for local SSD.
-          &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
-              # Specifies the path in the docker container where files on
-              # this disk should be located. For example, if `mountPoint`
-              # is `/mnt/disk`, and the parameter has `localPath`
-              # `inputs/file.txt`, the docker container can access the data at
-              # `/mnt/disk/inputs/file.txt`.
-          &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
-              # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
-              # for more details.
-              # Can only be set at create time.
-          &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
-              # https://cloud.google.com/compute/docs/reference/latest/instances#resource
-              # and
-              # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
-              # for more details.
-        },
-      ],
-      &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
-    },
-  }</pre>
-</div>
+    &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
+        # This name can be used for filtering Pipelines in ListPipelines.
+  }
 
-<div class="method">
-    <code class="details" id="getControllerConfig">getControllerConfig(operationId=None, validationToken=None, x__xgafv=None)</code>
-  <pre>Gets controller configuration information. Should only be called
-by VMs created by the Pipelines Service and not by end users.
-
-Args:
-  operationId: string, The operation to retrieve controller configuration for.
-  validationToken: string, A parameter
   x__xgafv: string, V1 error format.
     Allowed values
       1 - v1 error format
@@ -1150,504 +442,28 @@
 Returns:
   An object of the form:
 
-    { # Stores the information that the controller will fetch from the
-      # server in order to run. Should only be used by VMs created by the
-      # Pipelines Service and not by end users.
-    &quot;machineType&quot;: &quot;A String&quot;,
-    &quot;cmd&quot;: &quot;A String&quot;,
-    &quot;vars&quot;: {
-      &quot;a_key&quot;: &quot;A String&quot;,
-    },
-    &quot;image&quot;: &quot;A String&quot;,
-    &quot;gcsLogPath&quot;: &quot;A String&quot;,
-    &quot;gcsSources&quot;: {
-      &quot;a_key&quot;: {
-        &quot;values&quot;: [
-          &quot;A String&quot;,
-        ],
-      },
-    },
-    &quot;gcsSinks&quot;: {
-      &quot;a_key&quot;: {
-        &quot;values&quot;: [
-          &quot;A String&quot;,
-        ],
-      },
-    },
-    &quot;disks&quot;: {
-      &quot;a_key&quot;: &quot;A String&quot;,
-    },
-  }</pre>
-</div>
-
-<div class="method">
-    <code class="details" id="list">list(pageToken=None, pageSize=None, projectId=None, namePrefix=None, x__xgafv=None)</code>
-  <pre>Lists pipelines.
-
-Caller must have READ permission to the project.
-
-Args:
-  pageToken: string, Token to use to indicate where to start getting results.
-If unspecified, returns the first page of results.
-  pageSize: integer, Number of pipelines to return at once. Defaults to 256, and max
-is 2048.
-  projectId: string, Required. The name of the project to search for pipelines. Caller
-must have READ access to this project.
-  namePrefix: string, Pipelines with names that match this prefix should be
-returned.  If unspecified, all pipelines in the project, up to
-`pageSize`, will be returned.
-  x__xgafv: string, V1 error format.
-    Allowed values
-      1 - v1 error format
-      2 - v2 error format
-
-Returns:
-  An object of the form:
-
-    { # The response of ListPipelines. Contains at most `pageSize`
-      # pipelines. If it contains `pageSize` pipelines, and more pipelines
-      # exist, then `nextPageToken` will be populated and should be
-      # used as the `pageToken` argument to a subsequent ListPipelines
-      # request.
-    &quot;nextPageToken&quot;: &quot;A String&quot;, # The token to use to get the next page of results.
-    &quot;pipelines&quot;: [ # The matched pipelines.
-      { # The pipeline object. Represents a transformation from a set of input
-          # parameters to a set of output parameters. The transformation is defined
-          # as a docker image and command to run within that image. Each pipeline
-          # is run on a Google Compute Engine VM. A pipeline can be created with the
-          # `create` method and then later run with the `run` method, or a pipeline can
-          # be defined and run all at once with the `run` method.
-        &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
-            # This name can be used for filtering Pipelines in ListPipelines.
-        &quot;pipelineId&quot;: &quot;A String&quot;, # Unique pipeline id that is generated by the service when CreatePipeline
-            # is called. Cannot be specified in the Pipeline used in the
-            # CreatePipelineRequest, and will be populated in the response to
-            # CreatePipeline and all subsequent Get and List calls. Indicates that the
-            # service has registered this pipeline.
-        &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
-            # WRITE access.
-        &quot;outputParameters&quot;: [ # Output parameters of the pipeline.
-          { # Parameters facilitate setting and delivering data into the
-              # pipeline&#x27;s execution environment. They are defined at create time,
-              # with optional defaults, and can be overridden at run time.
-              #
-              # If `localCopy` is unset, then the parameter specifies a string that
-              # is passed as-is into the pipeline, as the value of the environment
-              # variable with the given name.  A default value can be optionally
-              # specified at create time. The default can be overridden at run time
-              # using the inputs map. If no default is given, a value must be
-              # supplied at runtime.
-              #
-              # If `localCopy` is defined, then the parameter specifies a data
-              # source or sink, both in Google Cloud Storage and on the Docker container
-              # where the pipeline computation is run. The service account associated with
-              # the Pipeline (by
-              # default the project&#x27;s Compute Engine service account) must have access to the
-              # Google Cloud Storage paths.
-              #
-              # At run time, the Google Cloud Storage paths can be overridden if a default
-              # was provided at create time, or must be set otherwise. The pipeline runner
-              # should add a key/value pair to either the inputs or outputs map. The
-              # indicated data copies will be carried out before/after pipeline execution,
-              # just as if the corresponding arguments were provided to `gsutil cp`.
-              #
-              # For example: Given the following `PipelineParameter`, specified
-              # in the `inputParameters` list:
-              #
-              # ```
-              # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
-              # ```
-              #
-              # where `disk` is defined in the `PipelineResources` object as:
-              #
-              # ```
-              # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
-              # ```
-              #
-              # We create a disk named `pd1`, mount it on the host VM, and map
-              # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
-              # runtime, an entry for `input_file` would be required in the inputs
-              # map, such as:
-              #
-              # ```
-              #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
-              # ```
-              #
-              # This would generate the following gsutil call:
-              #
-              # ```
-              #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
-              # ```
-              #
-              # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
-              # Docker container. Acceptable paths are:
-              #
-              # &lt;table&gt;
-              #   &lt;thead&gt;
-              #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
-              #   &lt;/thead&gt;
-              #   &lt;tbody&gt;
-              #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-              #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
-              #   &lt;/tbody&gt;
-              # &lt;/table&gt;
-              #
-              # For outputs, the direction of the copy is reversed:
-              #
-              # ```
-              #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
-              # ```
-              #
-              # Acceptable paths are:
-              #
-              # &lt;table&gt;
-              #   &lt;thead&gt;
-              #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
-              #   &lt;/thead&gt;
-              #   &lt;tbody&gt;
-              #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-              #     &lt;tr&gt;
-              #       &lt;td&gt;file&lt;/td&gt;
-              #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
-              #     &lt;/tr&gt;
-              #     &lt;tr&gt;
-              #       &lt;td&gt;glob&lt;/td&gt;
-              #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
-              #   &lt;/tbody&gt;
-              # &lt;/table&gt;
-              #
-              # One restriction due to docker limitations, is that for outputs that are found
-              # on the boot disk, the local path cannot be a glob and must be a file.
-            &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-                # If `localCopy` is present, then this must be a Google Cloud Storage path
-                # beginning with `gs://`.
-            &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-                # as the key to the input and output maps in RunPipeline.
-            &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
-            &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
-                # `LocalCopy` indicates where on the VM the file should be. The value
-                # given to this parameter (either at runtime or using `defaultValue`)
-                # must be the remote path where the file should be.
-              &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
-                  # located. Can be the name of one of the disks specified in the
-                  # Resources field, or &quot;boot&quot;, which represents the Docker
-                  # instance&#x27;s boot disk and has a mount point of `/`.
-              &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
-                  # this input should be localized to and from, relative to the specified
-                  # disk&#x27;s mount point. For example: file.txt,
-            },
-          },
-        ],
-        &quot;docker&quot;: { # The Docker execuctor specification. # Specifies the docker run information.
-          &quot;cmd&quot;: &quot;A String&quot;, # Required. The command or newline delimited script to run. The command
-              # string will be executed within a bash shell.
-              #
-              # If the command exits with a non-zero exit code, output parameter
-              # de-localization will be skipped and the pipeline operation&#x27;s
-              # `error` field will be populated.
-              #
-              # Maximum command string length is 16384.
-          &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
-              # Users that run pipelines must have READ access to the image.
-        },
-        &quot;description&quot;: &quot;A String&quot;, # User-specified description.
-        &quot;inputParameters&quot;: [ # Input parameters of the pipeline.
-          { # Parameters facilitate setting and delivering data into the
-              # pipeline&#x27;s execution environment. They are defined at create time,
-              # with optional defaults, and can be overridden at run time.
-              #
-              # If `localCopy` is unset, then the parameter specifies a string that
-              # is passed as-is into the pipeline, as the value of the environment
-              # variable with the given name.  A default value can be optionally
-              # specified at create time. The default can be overridden at run time
-              # using the inputs map. If no default is given, a value must be
-              # supplied at runtime.
-              #
-              # If `localCopy` is defined, then the parameter specifies a data
-              # source or sink, both in Google Cloud Storage and on the Docker container
-              # where the pipeline computation is run. The service account associated with
-              # the Pipeline (by
-              # default the project&#x27;s Compute Engine service account) must have access to the
-              # Google Cloud Storage paths.
-              #
-              # At run time, the Google Cloud Storage paths can be overridden if a default
-              # was provided at create time, or must be set otherwise. The pipeline runner
-              # should add a key/value pair to either the inputs or outputs map. The
-              # indicated data copies will be carried out before/after pipeline execution,
-              # just as if the corresponding arguments were provided to `gsutil cp`.
-              #
-              # For example: Given the following `PipelineParameter`, specified
-              # in the `inputParameters` list:
-              #
-              # ```
-              # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
-              # ```
-              #
-              # where `disk` is defined in the `PipelineResources` object as:
-              #
-              # ```
-              # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
-              # ```
-              #
-              # We create a disk named `pd1`, mount it on the host VM, and map
-              # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
-              # runtime, an entry for `input_file` would be required in the inputs
-              # map, such as:
-              #
-              # ```
-              #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
-              # ```
-              #
-              # This would generate the following gsutil call:
-              #
-              # ```
-              #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
-              # ```
-              #
-              # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
-              # Docker container. Acceptable paths are:
-              #
-              # &lt;table&gt;
-              #   &lt;thead&gt;
-              #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
-              #   &lt;/thead&gt;
-              #   &lt;tbody&gt;
-              #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-              #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
-              #   &lt;/tbody&gt;
-              # &lt;/table&gt;
-              #
-              # For outputs, the direction of the copy is reversed:
-              #
-              # ```
-              #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
-              # ```
-              #
-              # Acceptable paths are:
-              #
-              # &lt;table&gt;
-              #   &lt;thead&gt;
-              #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
-              #   &lt;/thead&gt;
-              #   &lt;tbody&gt;
-              #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-              #     &lt;tr&gt;
-              #       &lt;td&gt;file&lt;/td&gt;
-              #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
-              #     &lt;/tr&gt;
-              #     &lt;tr&gt;
-              #       &lt;td&gt;glob&lt;/td&gt;
-              #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
-              #   &lt;/tbody&gt;
-              # &lt;/table&gt;
-              #
-              # One restriction due to docker limitations, is that for outputs that are found
-              # on the boot disk, the local path cannot be a glob and must be a file.
-            &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-                # If `localCopy` is present, then this must be a Google Cloud Storage path
-                # beginning with `gs://`.
-            &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-                # as the key to the input and output maps in RunPipeline.
-            &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
-            &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
-                # `LocalCopy` indicates where on the VM the file should be. The value
-                # given to this parameter (either at runtime or using `defaultValue`)
-                # must be the remote path where the file should be.
-              &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
-                  # located. Can be the name of one of the disks specified in the
-                  # Resources field, or &quot;boot&quot;, which represents the Docker
-                  # instance&#x27;s boot disk and has a mount point of `/`.
-              &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
-                  # this input should be localized to and from, relative to the specified
-                  # disk&#x27;s mount point. For example: file.txt,
-            },
-          },
-        ],
-        &quot;resources&quot;: { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
-            # Required fields:
-            #
-            # *
-            # minimumCpuCores
-            #
-            # *
-            # minimumRamGb
-          &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
-          &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
-              # must be true for both create time and run time. Cannot be true at run time
-              # if false at create time.
-          &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
-              # creation will restricted. If empty, any zone may be chosen.
-            &quot;A String&quot;,
-          ],
-          &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
-              # By specifying this parameter, you will download and install the following
-              # third-party software onto your managed Compute Engine instances:
-              # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
-          &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
-              # By specifying this parameter, you will download and install the following
-              # third-party software onto your managed Compute Engine instances: NVIDIA®
-              # Tesla® drivers and NVIDIA® CUDA toolkit.
-              # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
-              # available accelerator types.
-          &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
-          &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
-              # feature that may go away. Defaults to false.
-              # Corresponds to `--no_address` flag for [gcloud compute instances create]
-              # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
-              # In order to use this, must be true for both create time and run time.
-              # Cannot be true at run time if false at create time. If you need to ssh into
-              # a private IP VM for debugging, you can ssh to a public VM and then ssh into
-              # the private VM&#x27;s Internal IP.  If noAddress is set, this pipeline run may
-              # only load docker images from Google Container Registry and not Docker Hub.
-              # Before using this, you must
-              # [configure access to Google services from internal
-              # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
-          &quot;disks&quot;: [ # Disks to attach.
-            { # A Google Compute Engine disk resource specification.
-              &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
-                  # parameters. Must be 1 - 63 characters.
-                  # The name &quot;boot&quot; is reserved for system use.
-              &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
-              &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
-                  # of the pipeline run, regardless of what this field is set to.
-              &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
-                  # This field is not applicable for local SSD.
-              &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
-                  # Specifies the path in the docker container where files on
-                  # this disk should be located. For example, if `mountPoint`
-                  # is `/mnt/disk`, and the parameter has `localPath`
-                  # `inputs/file.txt`, the docker container can access the data at
-                  # `/mnt/disk/inputs/file.txt`.
-              &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
-                  # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
-                  # for more details.
-                  # Can only be set at create time.
-              &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
-                  # https://cloud.google.com/compute/docs/reference/latest/instances#resource
-                  # and
-                  # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
-                  # for more details.
-            },
-          ],
-          &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
-        },
-      },
-    ],
-  }</pre>
-</div>
-
-<div class="method">
-    <code class="details" id="list_next">list_next(previous_request, previous_response)</code>
-  <pre>Retrieves the next page of results.
-
-Args:
-  previous_request: The request for the previous page. (required)
-  previous_response: The response from the request for the previous page. (required)
-
-Returns:
-  A request object that you can call &#x27;execute()&#x27; on to request the next
-  page. Returns None if there are no more items in the collection.
-    </pre>
-</div>
-
-<div class="method">
-    <code class="details" id="run">run(body=None, x__xgafv=None)</code>
-  <pre>Runs a pipeline. If `pipelineId` is specified in the request, then
-run a saved pipeline. If `ephemeralPipeline` is specified, then run
-that pipeline once without saving a copy.
-
-The caller must have READ permission to the project where the pipeline
-is stored and WRITE permission to the project where the pipeline will be
-run, as VMs will be created and storage will be used.
-
-If a pipeline operation is still running after 6 days, it will be canceled.
-
-Args:
-  body: object, The request body.
-    The object takes the form of:
-
-{ # The request to run a pipeline. If `pipelineId` is specified, it
-      # refers to a saved pipeline created with CreatePipeline and set as
-      # the `pipelineId` of the returned Pipeline object. If
-      # `ephemeralPipeline` is specified, that pipeline is run once
-      # with the given args and not saved. It is an error to specify both
-      # `pipelineId` and `ephemeralPipeline`. `pipelineArgs`
-      # must be specified.
-    &quot;pipelineArgs&quot;: { # The pipeline run arguments. # The arguments to use when running this pipeline.
-      &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to run the pipeline. The caller must have
-          # WRITER access to all Google Cloud services and resources (e.g. Google
-          # Compute Engine) will be used.
-      &quot;clientId&quot;: &quot;A String&quot;, # This field is deprecated. Use `labels` instead. Client-specified pipeline
-          # operation identifier.
-      &quot;serviceAccount&quot;: { # A Google Cloud Service Account. # The Google Cloud Service Account that will be used to access data and
-          # services. By default, the compute service account associated with
-          # `projectId` is used.
-        &quot;email&quot;: &quot;A String&quot;, # Email address of the service account. Defaults to `default`,
-            # which uses the compute service account associated with the project.
-        &quot;scopes&quot;: [ # List of scopes to be enabled for this service account on the VM.
-            # The following scopes are automatically included:
-            #
-            # * https://www.googleapis.com/auth/compute
-            # * https://www.googleapis.com/auth/devstorage.full_control
-            # * https://www.googleapis.com/auth/genomics
-            # * https://www.googleapis.com/auth/logging.write
-            # * https://www.googleapis.com/auth/monitoring.write
-          &quot;A String&quot;,
-        ],
-      },
-      &quot;inputs&quot;: { # Pipeline input arguments; keys are defined in the pipeline documentation.
-          # All input parameters that do not have default values  must be specified.
-          # If parameters with defaults are specified here, the defaults will be
-          # overridden.
-        &quot;a_key&quot;: &quot;A String&quot;,
-      },
-      &quot;labels&quot;: { # Labels to apply to this pipeline run. Labels will also be applied to
-          # compute resources (VM, disks) created by this pipeline run. When listing
-          # operations, operations can filtered by labels.
-          # Label keys may not be empty; label values may be empty. Non-empty labels
-          # must be 1-63 characters long, and comply with [RFC1035]
-          # (https://www.ietf.org/rfc/rfc1035.txt).
-          # Specifically, the name must be 1-63 characters long and match the regular
-          # expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
-          # character must be a lowercase letter, and all following characters must be
-          # a dash, lowercase letter, or digit, except the last character, which cannot
-          # be a dash.
-        &quot;a_key&quot;: &quot;A String&quot;,
-      },
-      &quot;logging&quot;: { # The logging options for the pipeline run. # Required. Logging options. Used by the service to communicate results
-          # to the user.
-        &quot;gcsPath&quot;: &quot;A String&quot;, # The location in Google Cloud Storage to which the pipeline logs
-            # will be copied. Can be specified as a fully qualified directory
-            # path, in which case logs will be output with a unique identifier
-            # as the filename in that directory, or as a fully specified path,
-            # which must end in `.log`, in which case that path will be
-            # used, and the user must ensure that logs are not
-            # overwritten. Stdout and stderr logs from the run are also
-            # generated and output as `-stdout.log` and `-stderr.log`.
-      },
-      &quot;keepVmAliveOnFailureDuration&quot;: &quot;A String&quot;, # How long to keep the VM up after a failure (for example docker command
-          # failed, copying input or output files failed, etc). While the VM is up, one
-          # can ssh into the VM to debug. Default is 0; maximum allowed value is 1 day.
-      &quot;resources&quot;: { # The system resources for the pipeline run. # Specifies resource requirements/overrides for the pipeline run.
-        &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+    { # The pipeline object. Represents a transformation from a set of input
+        # parameters to a set of output parameters. The transformation is defined
+        # as a docker image and command to run within that image. Each pipeline
+        # is run on a Google Compute Engine VM. A pipeline can be created with the
+        # `create` method and then later run with the `run` method, or a pipeline can
+        # be defined and run all at once with the `run` method.
+      &quot;resources&quot;: { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
+          # Required fields:
+          #
+          # *
+          # minimumCpuCores
+          #
+          # *
+          # minimumRamGb
         &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
             # must be true for both create time and run time. Cannot be true at run time
             # if false at create time.
-        &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
-            # creation will restricted. If empty, any zone may be chosen.
-          &quot;A String&quot;,
-        ],
+        &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
         &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
             # By specifying this parameter, you will download and install the following
             # third-party software onto your managed Compute Engine instances:
             # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
-        &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
-            # By specifying this parameter, you will download and install the following
-            # third-party software onto your managed Compute Engine instances: NVIDIA®
-            # Tesla® drivers and NVIDIA® CUDA toolkit.
-            # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
-            # available accelerator types.
-        &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
         &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
             # feature that may go away. Defaults to false.
             # Corresponds to `--no_address` flag for [gcloud compute instances create]
@@ -1660,183 +476,53 @@
             # Before using this, you must
             # [configure access to Google services from internal
             # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+        &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
+            # creation will restricted. If empty, any zone may be chosen.
+          &quot;A String&quot;,
+        ],
+        &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
         &quot;disks&quot;: [ # Disks to attach.
           { # A Google Compute Engine disk resource specification.
-            &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
-                # parameters. Must be 1 - 63 characters.
-                # The name &quot;boot&quot; is reserved for system use.
-            &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
-            &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
-                # of the pipeline run, regardless of what this field is set to.
-            &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
-                # This field is not applicable for local SSD.
+            &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
+                # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+                # and
+                # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+                # for more details.
             &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
                 # Specifies the path in the docker container where files on
                 # this disk should be located. For example, if `mountPoint`
                 # is `/mnt/disk`, and the parameter has `localPath`
                 # `inputs/file.txt`, the docker container can access the data at
                 # `/mnt/disk/inputs/file.txt`.
+            &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+                # of the pipeline run, regardless of what this field is set to.
+            &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
+                # parameters. Must be 1 - 63 characters.
+                # The name &quot;boot&quot; is reserved for system use.
+            &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
+            &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
+                # This field is not applicable for local SSD.
             &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
                 # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
                 # for more details.
                 # Can only be set at create time.
-            &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
-                # https://cloud.google.com/compute/docs/reference/latest/instances#resource
-                # and
-                # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
-                # for more details.
           },
         ],
-        &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
+        &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
+            # By specifying this parameter, you will download and install the following
+            # third-party software onto your managed Compute Engine instances: NVIDIA®
+            # Tesla® drivers and NVIDIA® CUDA toolkit.
+            # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+            # available accelerator types.
+        &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
       },
-      &quot;outputs&quot;: { # Pipeline output arguments; keys are defined in the pipeline
-          # documentation.  All output parameters of without default values
-          # must be specified.  If parameters with defaults are specified
-          # here, the defaults will be overridden.
-        &quot;a_key&quot;: &quot;A String&quot;,
-      },
-    },
-    &quot;pipelineId&quot;: &quot;A String&quot;, # The already created pipeline to run.
-    &quot;ephemeralPipeline&quot;: { # The pipeline object. Represents a transformation from a set of input # A new pipeline object to run once and then delete.
-        # parameters to a set of output parameters. The transformation is defined
-        # as a docker image and command to run within that image. Each pipeline
-        # is run on a Google Compute Engine VM. A pipeline can be created with the
-        # `create` method and then later run with the `run` method, or a pipeline can
-        # be defined and run all at once with the `run` method.
-      &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
-          # This name can be used for filtering Pipelines in ListPipelines.
+      &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
+          # WRITE access.
       &quot;pipelineId&quot;: &quot;A String&quot;, # Unique pipeline id that is generated by the service when CreatePipeline
           # is called. Cannot be specified in the Pipeline used in the
           # CreatePipelineRequest, and will be populated in the response to
           # CreatePipeline and all subsequent Get and List calls. Indicates that the
           # service has registered this pipeline.
-      &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
-          # WRITE access.
-      &quot;outputParameters&quot;: [ # Output parameters of the pipeline.
-        { # Parameters facilitate setting and delivering data into the
-            # pipeline&#x27;s execution environment. They are defined at create time,
-            # with optional defaults, and can be overridden at run time.
-            #
-            # If `localCopy` is unset, then the parameter specifies a string that
-            # is passed as-is into the pipeline, as the value of the environment
-            # variable with the given name.  A default value can be optionally
-            # specified at create time. The default can be overridden at run time
-            # using the inputs map. If no default is given, a value must be
-            # supplied at runtime.
-            #
-            # If `localCopy` is defined, then the parameter specifies a data
-            # source or sink, both in Google Cloud Storage and on the Docker container
-            # where the pipeline computation is run. The service account associated with
-            # the Pipeline (by
-            # default the project&#x27;s Compute Engine service account) must have access to the
-            # Google Cloud Storage paths.
-            #
-            # At run time, the Google Cloud Storage paths can be overridden if a default
-            # was provided at create time, or must be set otherwise. The pipeline runner
-            # should add a key/value pair to either the inputs or outputs map. The
-            # indicated data copies will be carried out before/after pipeline execution,
-            # just as if the corresponding arguments were provided to `gsutil cp`.
-            #
-            # For example: Given the following `PipelineParameter`, specified
-            # in the `inputParameters` list:
-            #
-            # ```
-            # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
-            # ```
-            #
-            # where `disk` is defined in the `PipelineResources` object as:
-            #
-            # ```
-            # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
-            # ```
-            #
-            # We create a disk named `pd1`, mount it on the host VM, and map
-            # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
-            # runtime, an entry for `input_file` would be required in the inputs
-            # map, such as:
-            #
-            # ```
-            #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
-            # ```
-            #
-            # This would generate the following gsutil call:
-            #
-            # ```
-            #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
-            # ```
-            #
-            # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
-            # Docker container. Acceptable paths are:
-            #
-            # &lt;table&gt;
-            #   &lt;thead&gt;
-            #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
-            #   &lt;/thead&gt;
-            #   &lt;tbody&gt;
-            #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-            #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
-            #   &lt;/tbody&gt;
-            # &lt;/table&gt;
-            #
-            # For outputs, the direction of the copy is reversed:
-            #
-            # ```
-            #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
-            # ```
-            #
-            # Acceptable paths are:
-            #
-            # &lt;table&gt;
-            #   &lt;thead&gt;
-            #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
-            #   &lt;/thead&gt;
-            #   &lt;tbody&gt;
-            #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
-            #     &lt;tr&gt;
-            #       &lt;td&gt;file&lt;/td&gt;
-            #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
-            #     &lt;/tr&gt;
-            #     &lt;tr&gt;
-            #       &lt;td&gt;glob&lt;/td&gt;
-            #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
-            #   &lt;/tbody&gt;
-            # &lt;/table&gt;
-            #
-            # One restriction due to docker limitations, is that for outputs that are found
-            # on the boot disk, the local path cannot be a glob and must be a file.
-          &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-              # If `localCopy` is present, then this must be a Google Cloud Storage path
-              # beginning with `gs://`.
-          &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-              # as the key to the input and output maps in RunPipeline.
-          &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
-          &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
-              # `LocalCopy` indicates where on the VM the file should be. The value
-              # given to this parameter (either at runtime or using `defaultValue`)
-              # must be the remote path where the file should be.
-            &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
-                # located. Can be the name of one of the disks specified in the
-                # Resources field, or &quot;boot&quot;, which represents the Docker
-                # instance&#x27;s boot disk and has a mount point of `/`.
-            &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
-                # this input should be localized to and from, relative to the specified
-                # disk&#x27;s mount point. For example: file.txt,
-          },
-        },
-      ],
-      &quot;docker&quot;: { # The Docker execuctor specification. # Specifies the docker run information.
-        &quot;cmd&quot;: &quot;A String&quot;, # Required. The command or newline delimited script to run. The command
-            # string will be executed within a bash shell.
-            #
-            # If the command exits with a non-zero exit code, output parameter
-            # de-localization will be skipped and the pipeline operation&#x27;s
-            # `error` field will be populated.
-            #
-            # Maximum command string length is 16384.
-        &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
-            # Users that run pipelines must have READ access to the image.
-      },
-      &quot;description&quot;: &quot;A String&quot;, # User-specified description.
       &quot;inputParameters&quot;: [ # Input parameters of the pipeline.
         { # Parameters facilitate setting and delivering data into the
             # pipeline&#x27;s execution environment. They are defined at create time,
@@ -1929,12 +615,6 @@
             #
             # One restriction due to docker limitations, is that for outputs that are found
             # on the boot disk, the local path cannot be a glob and must be a file.
-          &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
-              # If `localCopy` is present, then this must be a Google Cloud Storage path
-              # beginning with `gs://`.
-          &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
-              # as the key to the input and output maps in RunPipeline.
-          &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
           &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
               # `LocalCopy` indicates where on the VM the file should be. The value
               # given to this parameter (either at runtime or using `defaultValue`)
@@ -1947,8 +627,196 @@
                 # this input should be localized to and from, relative to the specified
                 # disk&#x27;s mount point. For example: file.txt,
           },
+          &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+          &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+              # If `localCopy` is present, then this must be a Google Cloud Storage path
+              # beginning with `gs://`.
+          &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+              # as the key to the input and output maps in RunPipeline.
         },
       ],
+      &quot;outputParameters&quot;: [ # Output parameters of the pipeline.
+        { # Parameters facilitate setting and delivering data into the
+            # pipeline&#x27;s execution environment. They are defined at create time,
+            # with optional defaults, and can be overridden at run time.
+            #
+            # If `localCopy` is unset, then the parameter specifies a string that
+            # is passed as-is into the pipeline, as the value of the environment
+            # variable with the given name.  A default value can be optionally
+            # specified at create time. The default can be overridden at run time
+            # using the inputs map. If no default is given, a value must be
+            # supplied at runtime.
+            #
+            # If `localCopy` is defined, then the parameter specifies a data
+            # source or sink, both in Google Cloud Storage and on the Docker container
+            # where the pipeline computation is run. The service account associated with
+            # the Pipeline (by
+            # default the project&#x27;s Compute Engine service account) must have access to the
+            # Google Cloud Storage paths.
+            #
+            # At run time, the Google Cloud Storage paths can be overridden if a default
+            # was provided at create time, or must be set otherwise. The pipeline runner
+            # should add a key/value pair to either the inputs or outputs map. The
+            # indicated data copies will be carried out before/after pipeline execution,
+            # just as if the corresponding arguments were provided to `gsutil cp`.
+            #
+            # For example: Given the following `PipelineParameter`, specified
+            # in the `inputParameters` list:
+            #
+            # ```
+            # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
+            # ```
+            #
+            # where `disk` is defined in the `PipelineResources` object as:
+            #
+            # ```
+            # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
+            # ```
+            #
+            # We create a disk named `pd1`, mount it on the host VM, and map
+            # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
+            # runtime, an entry for `input_file` would be required in the inputs
+            # map, such as:
+            #
+            # ```
+            #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
+            # ```
+            #
+            # This would generate the following gsutil call:
+            #
+            # ```
+            #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+            # ```
+            #
+            # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+            # Docker container. Acceptable paths are:
+            #
+            # &lt;table&gt;
+            #   &lt;thead&gt;
+            #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
+            #   &lt;/thead&gt;
+            #   &lt;tbody&gt;
+            #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+            #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
+            #   &lt;/tbody&gt;
+            # &lt;/table&gt;
+            #
+            # For outputs, the direction of the copy is reversed:
+            #
+            # ```
+            #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+            # ```
+            #
+            # Acceptable paths are:
+            #
+            # &lt;table&gt;
+            #   &lt;thead&gt;
+            #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
+            #   &lt;/thead&gt;
+            #   &lt;tbody&gt;
+            #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+            #     &lt;tr&gt;
+            #       &lt;td&gt;file&lt;/td&gt;
+            #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
+            #     &lt;/tr&gt;
+            #     &lt;tr&gt;
+            #       &lt;td&gt;glob&lt;/td&gt;
+            #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
+            #   &lt;/tbody&gt;
+            # &lt;/table&gt;
+            #
+            # One restriction due to docker limitations, is that for outputs that are found
+            # on the boot disk, the local path cannot be a glob and must be a file.
+          &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+              # `LocalCopy` indicates where on the VM the file should be. The value
+              # given to this parameter (either at runtime or using `defaultValue`)
+              # must be the remote path where the file should be.
+            &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
+                # located. Can be the name of one of the disks specified in the
+                # Resources field, or &quot;boot&quot;, which represents the Docker
+                # instance&#x27;s boot disk and has a mount point of `/`.
+            &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
+                # this input should be localized to and from, relative to the specified
+                # disk&#x27;s mount point. For example: file.txt,
+          },
+          &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+          &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+              # If `localCopy` is present, then this must be a Google Cloud Storage path
+              # beginning with `gs://`.
+          &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+              # as the key to the input and output maps in RunPipeline.
+        },
+      ],
+      &quot;description&quot;: &quot;A String&quot;, # User-specified description.
+      &quot;docker&quot;: { # The Docker execuctor specification. # Specifies the docker run information.
+        &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
+            # Users that run pipelines must have READ access to the image.
+        &quot;cmd&quot;: &quot;A String&quot;, # Required. The command or newline delimited script to run. The command
+            # string will be executed within a bash shell.
+            #
+            # If the command exits with a non-zero exit code, output parameter
+            # de-localization will be skipped and the pipeline operation&#x27;s
+            # `error` field will be populated.
+            #
+            # Maximum command string length is 16384.
+      },
+      &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
+          # This name can be used for filtering Pipelines in ListPipelines.
+    }</pre>
+</div>
+
+<div class="method">
+    <code class="details" id="delete">delete(pipelineId, x__xgafv=None)</code>
+  <pre>Deletes a pipeline based on ID.
+
+Caller must have WRITE permission to the project.
+
+Args:
+  pipelineId: string, Caller must have WRITE access to the project in which this pipeline
+is defined. (required)
+  x__xgafv: string, V1 error format.
+    Allowed values
+      1 - v1 error format
+      2 - v2 error format
+
+Returns:
+  An object of the form:
+
+    { # A generic empty message that you can re-use to avoid defining duplicated
+      # empty messages in your APIs. A typical example is to use it as the request
+      # or the response type of an API method. For instance:
+      #
+      #     service Foo {
+      #       rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+      #     }
+      #
+      # The JSON representation for `Empty` is empty JSON object `{}`.
+  }</pre>
+</div>
+
+<div class="method">
+    <code class="details" id="get">get(pipelineId, x__xgafv=None)</code>
+  <pre>Retrieves a pipeline based on ID.
+
+Caller must have READ permission to the project.
+
+Args:
+  pipelineId: string, Caller must have READ access to the project in which this pipeline
+is defined. (required)
+  x__xgafv: string, V1 error format.
+    Allowed values
+      1 - v1 error format
+      2 - v2 error format
+
+Returns:
+  An object of the form:
+
+    { # The pipeline object. Represents a transformation from a set of input
+        # parameters to a set of output parameters. The transformation is defined
+        # as a docker image and command to run within that image. Each pipeline
+        # is run on a Google Compute Engine VM. A pipeline can be created with the
+        # `create` method and then later run with the `run` method, or a pipeline can
+        # be defined and run all at once with the `run` method.
       &quot;resources&quot;: { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
           # Required fields:
           #
@@ -1957,25 +825,14 @@
           #
           # *
           # minimumRamGb
-        &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
         &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
             # must be true for both create time and run time. Cannot be true at run time
             # if false at create time.
-        &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
-            # creation will restricted. If empty, any zone may be chosen.
-          &quot;A String&quot;,
-        ],
+        &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
         &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
             # By specifying this parameter, you will download and install the following
             # third-party software onto your managed Compute Engine instances:
             # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
-        &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
-            # By specifying this parameter, you will download and install the following
-            # third-party software onto your managed Compute Engine instances: NVIDIA®
-            # Tesla® drivers and NVIDIA® CUDA toolkit.
-            # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
-            # available accelerator types.
-        &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
         &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
             # feature that may go away. Defaults to false.
             # Corresponds to `--no_address` flag for [gcloud compute instances create]
@@ -1988,35 +845,1178 @@
             # Before using this, you must
             # [configure access to Google services from internal
             # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+        &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
+            # creation will restricted. If empty, any zone may be chosen.
+          &quot;A String&quot;,
+        ],
+        &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
         &quot;disks&quot;: [ # Disks to attach.
           { # A Google Compute Engine disk resource specification.
-            &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
-                # parameters. Must be 1 - 63 characters.
-                # The name &quot;boot&quot; is reserved for system use.
-            &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
-            &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
-                # of the pipeline run, regardless of what this field is set to.
-            &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
-                # This field is not applicable for local SSD.
+            &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
+                # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+                # and
+                # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+                # for more details.
             &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
                 # Specifies the path in the docker container where files on
                 # this disk should be located. For example, if `mountPoint`
                 # is `/mnt/disk`, and the parameter has `localPath`
                 # `inputs/file.txt`, the docker container can access the data at
                 # `/mnt/disk/inputs/file.txt`.
+            &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+                # of the pipeline run, regardless of what this field is set to.
+            &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
+                # parameters. Must be 1 - 63 characters.
+                # The name &quot;boot&quot; is reserved for system use.
+            &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
+            &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
+                # This field is not applicable for local SSD.
             &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
                 # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
                 # for more details.
                 # Can only be set at create time.
+          },
+        ],
+        &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
+            # By specifying this parameter, you will download and install the following
+            # third-party software onto your managed Compute Engine instances: NVIDIA®
+            # Tesla® drivers and NVIDIA® CUDA toolkit.
+            # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+            # available accelerator types.
+        &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
+      },
+      &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
+          # WRITE access.
+      &quot;pipelineId&quot;: &quot;A String&quot;, # Unique pipeline id that is generated by the service when CreatePipeline
+          # is called. Cannot be specified in the Pipeline used in the
+          # CreatePipelineRequest, and will be populated in the response to
+          # CreatePipeline and all subsequent Get and List calls. Indicates that the
+          # service has registered this pipeline.
+      &quot;inputParameters&quot;: [ # Input parameters of the pipeline.
+        { # Parameters facilitate setting and delivering data into the
+            # pipeline&#x27;s execution environment. They are defined at create time,
+            # with optional defaults, and can be overridden at run time.
+            #
+            # If `localCopy` is unset, then the parameter specifies a string that
+            # is passed as-is into the pipeline, as the value of the environment
+            # variable with the given name.  A default value can be optionally
+            # specified at create time. The default can be overridden at run time
+            # using the inputs map. If no default is given, a value must be
+            # supplied at runtime.
+            #
+            # If `localCopy` is defined, then the parameter specifies a data
+            # source or sink, both in Google Cloud Storage and on the Docker container
+            # where the pipeline computation is run. The service account associated with
+            # the Pipeline (by
+            # default the project&#x27;s Compute Engine service account) must have access to the
+            # Google Cloud Storage paths.
+            #
+            # At run time, the Google Cloud Storage paths can be overridden if a default
+            # was provided at create time, or must be set otherwise. The pipeline runner
+            # should add a key/value pair to either the inputs or outputs map. The
+            # indicated data copies will be carried out before/after pipeline execution,
+            # just as if the corresponding arguments were provided to `gsutil cp`.
+            #
+            # For example: Given the following `PipelineParameter`, specified
+            # in the `inputParameters` list:
+            #
+            # ```
+            # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
+            # ```
+            #
+            # where `disk` is defined in the `PipelineResources` object as:
+            #
+            # ```
+            # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
+            # ```
+            #
+            # We create a disk named `pd1`, mount it on the host VM, and map
+            # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
+            # runtime, an entry for `input_file` would be required in the inputs
+            # map, such as:
+            #
+            # ```
+            #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
+            # ```
+            #
+            # This would generate the following gsutil call:
+            #
+            # ```
+            #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+            # ```
+            #
+            # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+            # Docker container. Acceptable paths are:
+            #
+            # &lt;table&gt;
+            #   &lt;thead&gt;
+            #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
+            #   &lt;/thead&gt;
+            #   &lt;tbody&gt;
+            #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+            #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
+            #   &lt;/tbody&gt;
+            # &lt;/table&gt;
+            #
+            # For outputs, the direction of the copy is reversed:
+            #
+            # ```
+            #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+            # ```
+            #
+            # Acceptable paths are:
+            #
+            # &lt;table&gt;
+            #   &lt;thead&gt;
+            #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
+            #   &lt;/thead&gt;
+            #   &lt;tbody&gt;
+            #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+            #     &lt;tr&gt;
+            #       &lt;td&gt;file&lt;/td&gt;
+            #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
+            #     &lt;/tr&gt;
+            #     &lt;tr&gt;
+            #       &lt;td&gt;glob&lt;/td&gt;
+            #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
+            #   &lt;/tbody&gt;
+            # &lt;/table&gt;
+            #
+            # One restriction due to docker limitations, is that for outputs that are found
+            # on the boot disk, the local path cannot be a glob and must be a file.
+          &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+              # `LocalCopy` indicates where on the VM the file should be. The value
+              # given to this parameter (either at runtime or using `defaultValue`)
+              # must be the remote path where the file should be.
+            &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
+                # located. Can be the name of one of the disks specified in the
+                # Resources field, or &quot;boot&quot;, which represents the Docker
+                # instance&#x27;s boot disk and has a mount point of `/`.
+            &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
+                # this input should be localized to and from, relative to the specified
+                # disk&#x27;s mount point. For example: file.txt,
+          },
+          &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+          &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+              # If `localCopy` is present, then this must be a Google Cloud Storage path
+              # beginning with `gs://`.
+          &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+              # as the key to the input and output maps in RunPipeline.
+        },
+      ],
+      &quot;outputParameters&quot;: [ # Output parameters of the pipeline.
+        { # Parameters facilitate setting and delivering data into the
+            # pipeline&#x27;s execution environment. They are defined at create time,
+            # with optional defaults, and can be overridden at run time.
+            #
+            # If `localCopy` is unset, then the parameter specifies a string that
+            # is passed as-is into the pipeline, as the value of the environment
+            # variable with the given name.  A default value can be optionally
+            # specified at create time. The default can be overridden at run time
+            # using the inputs map. If no default is given, a value must be
+            # supplied at runtime.
+            #
+            # If `localCopy` is defined, then the parameter specifies a data
+            # source or sink, both in Google Cloud Storage and on the Docker container
+            # where the pipeline computation is run. The service account associated with
+            # the Pipeline (by
+            # default the project&#x27;s Compute Engine service account) must have access to the
+            # Google Cloud Storage paths.
+            #
+            # At run time, the Google Cloud Storage paths can be overridden if a default
+            # was provided at create time, or must be set otherwise. The pipeline runner
+            # should add a key/value pair to either the inputs or outputs map. The
+            # indicated data copies will be carried out before/after pipeline execution,
+            # just as if the corresponding arguments were provided to `gsutil cp`.
+            #
+            # For example: Given the following `PipelineParameter`, specified
+            # in the `inputParameters` list:
+            #
+            # ```
+            # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
+            # ```
+            #
+            # where `disk` is defined in the `PipelineResources` object as:
+            #
+            # ```
+            # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
+            # ```
+            #
+            # We create a disk named `pd1`, mount it on the host VM, and map
+            # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
+            # runtime, an entry for `input_file` would be required in the inputs
+            # map, such as:
+            #
+            # ```
+            #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
+            # ```
+            #
+            # This would generate the following gsutil call:
+            #
+            # ```
+            #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+            # ```
+            #
+            # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+            # Docker container. Acceptable paths are:
+            #
+            # &lt;table&gt;
+            #   &lt;thead&gt;
+            #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
+            #   &lt;/thead&gt;
+            #   &lt;tbody&gt;
+            #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+            #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
+            #   &lt;/tbody&gt;
+            # &lt;/table&gt;
+            #
+            # For outputs, the direction of the copy is reversed:
+            #
+            # ```
+            #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+            # ```
+            #
+            # Acceptable paths are:
+            #
+            # &lt;table&gt;
+            #   &lt;thead&gt;
+            #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
+            #   &lt;/thead&gt;
+            #   &lt;tbody&gt;
+            #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+            #     &lt;tr&gt;
+            #       &lt;td&gt;file&lt;/td&gt;
+            #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
+            #     &lt;/tr&gt;
+            #     &lt;tr&gt;
+            #       &lt;td&gt;glob&lt;/td&gt;
+            #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
+            #   &lt;/tbody&gt;
+            # &lt;/table&gt;
+            #
+            # One restriction due to docker limitations, is that for outputs that are found
+            # on the boot disk, the local path cannot be a glob and must be a file.
+          &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+              # `LocalCopy` indicates where on the VM the file should be. The value
+              # given to this parameter (either at runtime or using `defaultValue`)
+              # must be the remote path where the file should be.
+            &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
+                # located. Can be the name of one of the disks specified in the
+                # Resources field, or &quot;boot&quot;, which represents the Docker
+                # instance&#x27;s boot disk and has a mount point of `/`.
+            &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
+                # this input should be localized to and from, relative to the specified
+                # disk&#x27;s mount point. For example: file.txt,
+          },
+          &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+          &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+              # If `localCopy` is present, then this must be a Google Cloud Storage path
+              # beginning with `gs://`.
+          &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+              # as the key to the input and output maps in RunPipeline.
+        },
+      ],
+      &quot;description&quot;: &quot;A String&quot;, # User-specified description.
+      &quot;docker&quot;: { # The Docker execuctor specification. # Specifies the docker run information.
+        &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
+            # Users that run pipelines must have READ access to the image.
+        &quot;cmd&quot;: &quot;A String&quot;, # Required. The command or newline delimited script to run. The command
+            # string will be executed within a bash shell.
+            #
+            # If the command exits with a non-zero exit code, output parameter
+            # de-localization will be skipped and the pipeline operation&#x27;s
+            # `error` field will be populated.
+            #
+            # Maximum command string length is 16384.
+      },
+      &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
+          # This name can be used for filtering Pipelines in ListPipelines.
+    }</pre>
+</div>
+
+<div class="method">
+    <code class="details" id="getControllerConfig">getControllerConfig(validationToken=None, operationId=None, x__xgafv=None)</code>
+  <pre>Gets controller configuration information. Should only be called
+by VMs created by the Pipelines Service and not by end users.
+
+Args:
+  validationToken: string, A parameter
+  operationId: string, The operation to retrieve controller configuration for.
+  x__xgafv: string, V1 error format.
+    Allowed values
+      1 - v1 error format
+      2 - v2 error format
+
+Returns:
+  An object of the form:
+
+    { # Stores the information that the controller will fetch from the
+      # server in order to run. Should only be used by VMs created by the
+      # Pipelines Service and not by end users.
+    &quot;cmd&quot;: &quot;A String&quot;,
+    &quot;gcsSinks&quot;: {
+      &quot;a_key&quot;: {
+        &quot;values&quot;: [
+          &quot;A String&quot;,
+        ],
+      },
+    },
+    &quot;gcsLogPath&quot;: &quot;A String&quot;,
+    &quot;machineType&quot;: &quot;A String&quot;,
+    &quot;disks&quot;: {
+      &quot;a_key&quot;: &quot;A String&quot;,
+    },
+    &quot;vars&quot;: {
+      &quot;a_key&quot;: &quot;A String&quot;,
+    },
+    &quot;image&quot;: &quot;A String&quot;,
+    &quot;gcsSources&quot;: {
+      &quot;a_key&quot;: {
+        &quot;values&quot;: [
+          &quot;A String&quot;,
+        ],
+      },
+    },
+  }</pre>
+</div>
+
+<div class="method">
+    <code class="details" id="list">list(namePrefix=None, pageSize=None, projectId=None, pageToken=None, x__xgafv=None)</code>
+  <pre>Lists pipelines.
+
+Caller must have READ permission to the project.
+
+Args:
+  namePrefix: string, Pipelines with names that match this prefix should be
+returned.  If unspecified, all pipelines in the project, up to
+`pageSize`, will be returned.
+  pageSize: integer, Number of pipelines to return at once. Defaults to 256, and max
+is 2048.
+  projectId: string, Required. The name of the project to search for pipelines. Caller
+must have READ access to this project.
+  pageToken: string, Token to use to indicate where to start getting results.
+If unspecified, returns the first page of results.
+  x__xgafv: string, V1 error format.
+    Allowed values
+      1 - v1 error format
+      2 - v2 error format
+
+Returns:
+  An object of the form:
+
+    { # The response of ListPipelines. Contains at most `pageSize`
+      # pipelines. If it contains `pageSize` pipelines, and more pipelines
+      # exist, then `nextPageToken` will be populated and should be
+      # used as the `pageToken` argument to a subsequent ListPipelines
+      # request.
+    &quot;nextPageToken&quot;: &quot;A String&quot;, # The token to use to get the next page of results.
+    &quot;pipelines&quot;: [ # The matched pipelines.
+      { # The pipeline object. Represents a transformation from a set of input
+            # parameters to a set of output parameters. The transformation is defined
+            # as a docker image and command to run within that image. Each pipeline
+            # is run on a Google Compute Engine VM. A pipeline can be created with the
+            # `create` method and then later run with the `run` method, or a pipeline can
+            # be defined and run all at once with the `run` method.
+          &quot;resources&quot;: { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
+              # Required fields:
+              #
+              # *
+              # minimumCpuCores
+              #
+              # *
+              # minimumRamGb
+            &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
+                # must be true for both create time and run time. Cannot be true at run time
+                # if false at create time.
+            &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
+            &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
+                # By specifying this parameter, you will download and install the following
+                # third-party software onto your managed Compute Engine instances:
+                # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
+            &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
+                # feature that may go away. Defaults to false.
+                # Corresponds to `--no_address` flag for [gcloud compute instances create]
+                # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
+                # In order to use this, must be true for both create time and run time.
+                # Cannot be true at run time if false at create time. If you need to ssh into
+                # a private IP VM for debugging, you can ssh to a public VM and then ssh into
+                # the private VM&#x27;s Internal IP.  If noAddress is set, this pipeline run may
+                # only load docker images from Google Container Registry and not Docker Hub.
+                # Before using this, you must
+                # [configure access to Google services from internal
+                # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+            &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
+                # creation will restricted. If empty, any zone may be chosen.
+              &quot;A String&quot;,
+            ],
+            &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+            &quot;disks&quot;: [ # Disks to attach.
+              { # A Google Compute Engine disk resource specification.
+                &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
+                    # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+                    # and
+                    # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+                    # for more details.
+                &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
+                    # Specifies the path in the docker container where files on
+                    # this disk should be located. For example, if `mountPoint`
+                    # is `/mnt/disk`, and the parameter has `localPath`
+                    # `inputs/file.txt`, the docker container can access the data at
+                    # `/mnt/disk/inputs/file.txt`.
+                &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+                    # of the pipeline run, regardless of what this field is set to.
+                &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
+                    # parameters. Must be 1 - 63 characters.
+                    # The name &quot;boot&quot; is reserved for system use.
+                &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
+                &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
+                    # This field is not applicable for local SSD.
+                &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
+                    # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
+                    # for more details.
+                    # Can only be set at create time.
+              },
+            ],
+            &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
+                # By specifying this parameter, you will download and install the following
+                # third-party software onto your managed Compute Engine instances: NVIDIA®
+                # Tesla® drivers and NVIDIA® CUDA toolkit.
+                # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+                # available accelerator types.
+            &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
+          },
+          &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
+              # WRITE access.
+          &quot;pipelineId&quot;: &quot;A String&quot;, # Unique pipeline id that is generated by the service when CreatePipeline
+              # is called. Cannot be specified in the Pipeline used in the
+              # CreatePipelineRequest, and will be populated in the response to
+              # CreatePipeline and all subsequent Get and List calls. Indicates that the
+              # service has registered this pipeline.
+          &quot;inputParameters&quot;: [ # Input parameters of the pipeline.
+            { # Parameters facilitate setting and delivering data into the
+                # pipeline&#x27;s execution environment. They are defined at create time,
+                # with optional defaults, and can be overridden at run time.
+                #
+                # If `localCopy` is unset, then the parameter specifies a string that
+                # is passed as-is into the pipeline, as the value of the environment
+                # variable with the given name.  A default value can be optionally
+                # specified at create time. The default can be overridden at run time
+                # using the inputs map. If no default is given, a value must be
+                # supplied at runtime.
+                #
+                # If `localCopy` is defined, then the parameter specifies a data
+                # source or sink, both in Google Cloud Storage and on the Docker container
+                # where the pipeline computation is run. The service account associated with
+                # the Pipeline (by
+                # default the project&#x27;s Compute Engine service account) must have access to the
+                # Google Cloud Storage paths.
+                #
+                # At run time, the Google Cloud Storage paths can be overridden if a default
+                # was provided at create time, or must be set otherwise. The pipeline runner
+                # should add a key/value pair to either the inputs or outputs map. The
+                # indicated data copies will be carried out before/after pipeline execution,
+                # just as if the corresponding arguments were provided to `gsutil cp`.
+                #
+                # For example: Given the following `PipelineParameter`, specified
+                # in the `inputParameters` list:
+                #
+                # ```
+                # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
+                # ```
+                #
+                # where `disk` is defined in the `PipelineResources` object as:
+                #
+                # ```
+                # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
+                # ```
+                #
+                # We create a disk named `pd1`, mount it on the host VM, and map
+                # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
+                # runtime, an entry for `input_file` would be required in the inputs
+                # map, such as:
+                #
+                # ```
+                #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
+                # ```
+                #
+                # This would generate the following gsutil call:
+                #
+                # ```
+                #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+                # ```
+                #
+                # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+                # Docker container. Acceptable paths are:
+                #
+                # &lt;table&gt;
+                #   &lt;thead&gt;
+                #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
+                #   &lt;/thead&gt;
+                #   &lt;tbody&gt;
+                #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+                #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
+                #   &lt;/tbody&gt;
+                # &lt;/table&gt;
+                #
+                # For outputs, the direction of the copy is reversed:
+                #
+                # ```
+                #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+                # ```
+                #
+                # Acceptable paths are:
+                #
+                # &lt;table&gt;
+                #   &lt;thead&gt;
+                #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
+                #   &lt;/thead&gt;
+                #   &lt;tbody&gt;
+                #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+                #     &lt;tr&gt;
+                #       &lt;td&gt;file&lt;/td&gt;
+                #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
+                #     &lt;/tr&gt;
+                #     &lt;tr&gt;
+                #       &lt;td&gt;glob&lt;/td&gt;
+                #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
+                #   &lt;/tbody&gt;
+                # &lt;/table&gt;
+                #
+                # One restriction due to docker limitations, is that for outputs that are found
+                # on the boot disk, the local path cannot be a glob and must be a file.
+              &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+                  # `LocalCopy` indicates where on the VM the file should be. The value
+                  # given to this parameter (either at runtime or using `defaultValue`)
+                  # must be the remote path where the file should be.
+                &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
+                    # located. Can be the name of one of the disks specified in the
+                    # Resources field, or &quot;boot&quot;, which represents the Docker
+                    # instance&#x27;s boot disk and has a mount point of `/`.
+                &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
+                    # this input should be localized to and from, relative to the specified
+                    # disk&#x27;s mount point. For example: file.txt,
+              },
+              &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+              &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+                  # If `localCopy` is present, then this must be a Google Cloud Storage path
+                  # beginning with `gs://`.
+              &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+                  # as the key to the input and output maps in RunPipeline.
+            },
+          ],
+          &quot;outputParameters&quot;: [ # Output parameters of the pipeline.
+            { # Parameters facilitate setting and delivering data into the
+                # pipeline&#x27;s execution environment. They are defined at create time,
+                # with optional defaults, and can be overridden at run time.
+                #
+                # If `localCopy` is unset, then the parameter specifies a string that
+                # is passed as-is into the pipeline, as the value of the environment
+                # variable with the given name.  A default value can be optionally
+                # specified at create time. The default can be overridden at run time
+                # using the inputs map. If no default is given, a value must be
+                # supplied at runtime.
+                #
+                # If `localCopy` is defined, then the parameter specifies a data
+                # source or sink, both in Google Cloud Storage and on the Docker container
+                # where the pipeline computation is run. The service account associated with
+                # the Pipeline (by
+                # default the project&#x27;s Compute Engine service account) must have access to the
+                # Google Cloud Storage paths.
+                #
+                # At run time, the Google Cloud Storage paths can be overridden if a default
+                # was provided at create time, or must be set otherwise. The pipeline runner
+                # should add a key/value pair to either the inputs or outputs map. The
+                # indicated data copies will be carried out before/after pipeline execution,
+                # just as if the corresponding arguments were provided to `gsutil cp`.
+                #
+                # For example: Given the following `PipelineParameter`, specified
+                # in the `inputParameters` list:
+                #
+                # ```
+                # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
+                # ```
+                #
+                # where `disk` is defined in the `PipelineResources` object as:
+                #
+                # ```
+                # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
+                # ```
+                #
+                # We create a disk named `pd1`, mount it on the host VM, and map
+                # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
+                # runtime, an entry for `input_file` would be required in the inputs
+                # map, such as:
+                #
+                # ```
+                #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
+                # ```
+                #
+                # This would generate the following gsutil call:
+                #
+                # ```
+                #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+                # ```
+                #
+                # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+                # Docker container. Acceptable paths are:
+                #
+                # &lt;table&gt;
+                #   &lt;thead&gt;
+                #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
+                #   &lt;/thead&gt;
+                #   &lt;tbody&gt;
+                #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+                #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
+                #   &lt;/tbody&gt;
+                # &lt;/table&gt;
+                #
+                # For outputs, the direction of the copy is reversed:
+                #
+                # ```
+                #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+                # ```
+                #
+                # Acceptable paths are:
+                #
+                # &lt;table&gt;
+                #   &lt;thead&gt;
+                #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
+                #   &lt;/thead&gt;
+                #   &lt;tbody&gt;
+                #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+                #     &lt;tr&gt;
+                #       &lt;td&gt;file&lt;/td&gt;
+                #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
+                #     &lt;/tr&gt;
+                #     &lt;tr&gt;
+                #       &lt;td&gt;glob&lt;/td&gt;
+                #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
+                #   &lt;/tbody&gt;
+                # &lt;/table&gt;
+                #
+                # One restriction due to docker limitations, is that for outputs that are found
+                # on the boot disk, the local path cannot be a glob and must be a file.
+              &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+                  # `LocalCopy` indicates where on the VM the file should be. The value
+                  # given to this parameter (either at runtime or using `defaultValue`)
+                  # must be the remote path where the file should be.
+                &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
+                    # located. Can be the name of one of the disks specified in the
+                    # Resources field, or &quot;boot&quot;, which represents the Docker
+                    # instance&#x27;s boot disk and has a mount point of `/`.
+                &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
+                    # this input should be localized to and from, relative to the specified
+                    # disk&#x27;s mount point. For example: file.txt,
+              },
+              &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+              &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+                  # If `localCopy` is present, then this must be a Google Cloud Storage path
+                  # beginning with `gs://`.
+              &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+                  # as the key to the input and output maps in RunPipeline.
+            },
+          ],
+          &quot;description&quot;: &quot;A String&quot;, # User-specified description.
+          &quot;docker&quot;: { # The Docker execuctor specification. # Specifies the docker run information.
+            &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
+                # Users that run pipelines must have READ access to the image.
+            &quot;cmd&quot;: &quot;A String&quot;, # Required. The command or newline delimited script to run. The command
+                # string will be executed within a bash shell.
+                #
+                # If the command exits with a non-zero exit code, output parameter
+                # de-localization will be skipped and the pipeline operation&#x27;s
+                # `error` field will be populated.
+                #
+                # Maximum command string length is 16384.
+          },
+          &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
+              # This name can be used for filtering Pipelines in ListPipelines.
+        },
+    ],
+  }</pre>
+</div>
+
+<div class="method">
+    <code class="details" id="list_next">list_next(previous_request, previous_response)</code>
+  <pre>Retrieves the next page of results.
+
+Args:
+  previous_request: The request for the previous page. (required)
+  previous_response: The response from the request for the previous page. (required)
+
+Returns:
+  A request object that you can call &#x27;execute()&#x27; on to request the next
+  page. Returns None if there are no more items in the collection.
+    </pre>
+</div>
+
+<div class="method">
+    <code class="details" id="run">run(body=None, x__xgafv=None)</code>
+  <pre>Runs a pipeline. If `pipelineId` is specified in the request, then
+run a saved pipeline. If `ephemeralPipeline` is specified, then run
+that pipeline once without saving a copy.
+
+The caller must have READ permission to the project where the pipeline
+is stored and WRITE permission to the project where the pipeline will be
+run, as VMs will be created and storage will be used.
+
+If a pipeline operation is still running after 6 days, it will be canceled.
+
+Args:
+  body: object, The request body.
+    The object takes the form of:
+
+{ # The request to run a pipeline. If `pipelineId` is specified, it
+      # refers to a saved pipeline created with CreatePipeline and set as
+      # the `pipelineId` of the returned Pipeline object. If
+      # `ephemeralPipeline` is specified, that pipeline is run once
+      # with the given args and not saved. It is an error to specify both
+      # `pipelineId` and `ephemeralPipeline`. `pipelineArgs`
+      # must be specified.
+    &quot;pipelineId&quot;: &quot;A String&quot;, # The already created pipeline to run.
+    &quot;ephemeralPipeline&quot;: { # The pipeline object. Represents a transformation from a set of input # A new pipeline object to run once and then delete.
+          # parameters to a set of output parameters. The transformation is defined
+          # as a docker image and command to run within that image. Each pipeline
+          # is run on a Google Compute Engine VM. A pipeline can be created with the
+          # `create` method and then later run with the `run` method, or a pipeline can
+          # be defined and run all at once with the `run` method.
+        &quot;resources&quot;: { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
+            # Required fields:
+            #
+            # *
+            # minimumCpuCores
+            #
+            # *
+            # minimumRamGb
+          &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
+              # must be true for both create time and run time. Cannot be true at run time
+              # if false at create time.
+          &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
+          &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
+              # By specifying this parameter, you will download and install the following
+              # third-party software onto your managed Compute Engine instances:
+              # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
+          &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
+              # feature that may go away. Defaults to false.
+              # Corresponds to `--no_address` flag for [gcloud compute instances create]
+              # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
+              # In order to use this, must be true for both create time and run time.
+              # Cannot be true at run time if false at create time. If you need to ssh into
+              # a private IP VM for debugging, you can ssh to a public VM and then ssh into
+              # the private VM&#x27;s Internal IP.  If noAddress is set, this pipeline run may
+              # only load docker images from Google Container Registry and not Docker Hub.
+              # Before using this, you must
+              # [configure access to Google services from internal
+              # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+          &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
+              # creation will restricted. If empty, any zone may be chosen.
+            &quot;A String&quot;,
+          ],
+          &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+          &quot;disks&quot;: [ # Disks to attach.
+            { # A Google Compute Engine disk resource specification.
+              &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
+                  # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+                  # and
+                  # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+                  # for more details.
+              &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
+                  # Specifies the path in the docker container where files on
+                  # this disk should be located. For example, if `mountPoint`
+                  # is `/mnt/disk`, and the parameter has `localPath`
+                  # `inputs/file.txt`, the docker container can access the data at
+                  # `/mnt/disk/inputs/file.txt`.
+              &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+                  # of the pipeline run, regardless of what this field is set to.
+              &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
+                  # parameters. Must be 1 - 63 characters.
+                  # The name &quot;boot&quot; is reserved for system use.
+              &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
+              &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
+                  # This field is not applicable for local SSD.
+              &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
+                  # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
+                  # for more details.
+                  # Can only be set at create time.
+            },
+          ],
+          &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
+              # By specifying this parameter, you will download and install the following
+              # third-party software onto your managed Compute Engine instances: NVIDIA®
+              # Tesla® drivers and NVIDIA® CUDA toolkit.
+              # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+              # available accelerator types.
+          &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
+        },
+        &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to create the pipeline. The caller must have
+            # WRITE access.
+        &quot;pipelineId&quot;: &quot;A String&quot;, # Unique pipeline id that is generated by the service when CreatePipeline
+            # is called. Cannot be specified in the Pipeline used in the
+            # CreatePipelineRequest, and will be populated in the response to
+            # CreatePipeline and all subsequent Get and List calls. Indicates that the
+            # service has registered this pipeline.
+        &quot;inputParameters&quot;: [ # Input parameters of the pipeline.
+          { # Parameters facilitate setting and delivering data into the
+              # pipeline&#x27;s execution environment. They are defined at create time,
+              # with optional defaults, and can be overridden at run time.
+              #
+              # If `localCopy` is unset, then the parameter specifies a string that
+              # is passed as-is into the pipeline, as the value of the environment
+              # variable with the given name.  A default value can be optionally
+              # specified at create time. The default can be overridden at run time
+              # using the inputs map. If no default is given, a value must be
+              # supplied at runtime.
+              #
+              # If `localCopy` is defined, then the parameter specifies a data
+              # source or sink, both in Google Cloud Storage and on the Docker container
+              # where the pipeline computation is run. The service account associated with
+              # the Pipeline (by
+              # default the project&#x27;s Compute Engine service account) must have access to the
+              # Google Cloud Storage paths.
+              #
+              # At run time, the Google Cloud Storage paths can be overridden if a default
+              # was provided at create time, or must be set otherwise. The pipeline runner
+              # should add a key/value pair to either the inputs or outputs map. The
+              # indicated data copies will be carried out before/after pipeline execution,
+              # just as if the corresponding arguments were provided to `gsutil cp`.
+              #
+              # For example: Given the following `PipelineParameter`, specified
+              # in the `inputParameters` list:
+              #
+              # ```
+              # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
+              # ```
+              #
+              # where `disk` is defined in the `PipelineResources` object as:
+              #
+              # ```
+              # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
+              # ```
+              #
+              # We create a disk named `pd1`, mount it on the host VM, and map
+              # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
+              # runtime, an entry for `input_file` would be required in the inputs
+              # map, such as:
+              #
+              # ```
+              #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
+              # ```
+              #
+              # This would generate the following gsutil call:
+              #
+              # ```
+              #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+              # ```
+              #
+              # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+              # Docker container. Acceptable paths are:
+              #
+              # &lt;table&gt;
+              #   &lt;thead&gt;
+              #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
+              #   &lt;/thead&gt;
+              #   &lt;tbody&gt;
+              #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+              #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
+              #   &lt;/tbody&gt;
+              # &lt;/table&gt;
+              #
+              # For outputs, the direction of the copy is reversed:
+              #
+              # ```
+              #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+              # ```
+              #
+              # Acceptable paths are:
+              #
+              # &lt;table&gt;
+              #   &lt;thead&gt;
+              #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
+              #   &lt;/thead&gt;
+              #   &lt;tbody&gt;
+              #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+              #     &lt;tr&gt;
+              #       &lt;td&gt;file&lt;/td&gt;
+              #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
+              #     &lt;/tr&gt;
+              #     &lt;tr&gt;
+              #       &lt;td&gt;glob&lt;/td&gt;
+              #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
+              #   &lt;/tbody&gt;
+              # &lt;/table&gt;
+              #
+              # One restriction due to docker limitations, is that for outputs that are found
+              # on the boot disk, the local path cannot be a glob and must be a file.
+            &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+                # `LocalCopy` indicates where on the VM the file should be. The value
+                # given to this parameter (either at runtime or using `defaultValue`)
+                # must be the remote path where the file should be.
+              &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
+                  # located. Can be the name of one of the disks specified in the
+                  # Resources field, or &quot;boot&quot;, which represents the Docker
+                  # instance&#x27;s boot disk and has a mount point of `/`.
+              &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
+                  # this input should be localized to and from, relative to the specified
+                  # disk&#x27;s mount point. For example: file.txt,
+            },
+            &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+            &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+                # If `localCopy` is present, then this must be a Google Cloud Storage path
+                # beginning with `gs://`.
+            &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+                # as the key to the input and output maps in RunPipeline.
+          },
+        ],
+        &quot;outputParameters&quot;: [ # Output parameters of the pipeline.
+          { # Parameters facilitate setting and delivering data into the
+              # pipeline&#x27;s execution environment. They are defined at create time,
+              # with optional defaults, and can be overridden at run time.
+              #
+              # If `localCopy` is unset, then the parameter specifies a string that
+              # is passed as-is into the pipeline, as the value of the environment
+              # variable with the given name.  A default value can be optionally
+              # specified at create time. The default can be overridden at run time
+              # using the inputs map. If no default is given, a value must be
+              # supplied at runtime.
+              #
+              # If `localCopy` is defined, then the parameter specifies a data
+              # source or sink, both in Google Cloud Storage and on the Docker container
+              # where the pipeline computation is run. The service account associated with
+              # the Pipeline (by
+              # default the project&#x27;s Compute Engine service account) must have access to the
+              # Google Cloud Storage paths.
+              #
+              # At run time, the Google Cloud Storage paths can be overridden if a default
+              # was provided at create time, or must be set otherwise. The pipeline runner
+              # should add a key/value pair to either the inputs or outputs map. The
+              # indicated data copies will be carried out before/after pipeline execution,
+              # just as if the corresponding arguments were provided to `gsutil cp`.
+              #
+              # For example: Given the following `PipelineParameter`, specified
+              # in the `inputParameters` list:
+              #
+              # ```
+              # {name: &quot;input_file&quot;, localCopy: {path: &quot;file.txt&quot;, disk: &quot;pd1&quot;}}
+              # ```
+              #
+              # where `disk` is defined in the `PipelineResources` object as:
+              #
+              # ```
+              # {name: &quot;pd1&quot;, mountPoint: &quot;/mnt/disk/&quot;}
+              # ```
+              #
+              # We create a disk named `pd1`, mount it on the host VM, and map
+              # `/mnt/pd1` to `/mnt/disk` in the docker container.  At
+              # runtime, an entry for `input_file` would be required in the inputs
+              # map, such as:
+              #
+              # ```
+              #   inputs[&quot;input_file&quot;] = &quot;gs://my-bucket/bar.txt&quot;
+              # ```
+              #
+              # This would generate the following gsutil call:
+              #
+              # ```
+              #   gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+              # ```
+              #
+              # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+              # Docker container. Acceptable paths are:
+              #
+              # &lt;table&gt;
+              #   &lt;thead&gt;
+              #     &lt;tr&gt;&lt;th&gt;Google Cloud storage path&lt;/th&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;/tr&gt;
+              #   &lt;/thead&gt;
+              #   &lt;tbody&gt;
+              #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+              #     &lt;tr&gt;&lt;td&gt;glob&lt;/td&gt;&lt;td&gt;directory&lt;/td&gt;&lt;/tr&gt;
+              #   &lt;/tbody&gt;
+              # &lt;/table&gt;
+              #
+              # For outputs, the direction of the copy is reversed:
+              #
+              # ```
+              #   gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+              # ```
+              #
+              # Acceptable paths are:
+              #
+              # &lt;table&gt;
+              #   &lt;thead&gt;
+              #     &lt;tr&gt;&lt;th&gt;Local path&lt;/th&gt;&lt;th&gt;Google Cloud Storage path&lt;/th&gt;&lt;/tr&gt;
+              #   &lt;/thead&gt;
+              #   &lt;tbody&gt;
+              #     &lt;tr&gt;&lt;td&gt;file&lt;/td&gt;&lt;td&gt;file&lt;/td&gt;&lt;/tr&gt;
+              #     &lt;tr&gt;
+              #       &lt;td&gt;file&lt;/td&gt;
+              #       &lt;td&gt;directory - directory must already exist&lt;/td&gt;
+              #     &lt;/tr&gt;
+              #     &lt;tr&gt;
+              #       &lt;td&gt;glob&lt;/td&gt;
+              #       &lt;td&gt;directory - directory will be created if it doesn&#x27;t exist&lt;/td&gt;&lt;/tr&gt;
+              #   &lt;/tbody&gt;
+              # &lt;/table&gt;
+              #
+              # One restriction due to docker limitations, is that for outputs that are found
+              # on the boot disk, the local path cannot be a glob and must be a file.
+            &quot;localCopy&quot;: { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+                # `LocalCopy` indicates where on the VM the file should be. The value
+                # given to this parameter (either at runtime or using `defaultValue`)
+                # must be the remote path where the file should be.
+              &quot;disk&quot;: &quot;A String&quot;, # Required. The name of the disk where this parameter is
+                  # located. Can be the name of one of the disks specified in the
+                  # Resources field, or &quot;boot&quot;, which represents the Docker
+                  # instance&#x27;s boot disk and has a mount point of `/`.
+              &quot;path&quot;: &quot;A String&quot;, # Required. The path within the user&#x27;s docker container where
+                  # this input should be localized to and from, relative to the specified
+                  # disk&#x27;s mount point. For example: file.txt,
+            },
+            &quot;description&quot;: &quot;A String&quot;, # Human-readable description.
+            &quot;defaultValue&quot;: &quot;A String&quot;, # The default value for this parameter. Can be overridden at runtime.
+                # If `localCopy` is present, then this must be a Google Cloud Storage path
+                # beginning with `gs://`.
+            &quot;name&quot;: &quot;A String&quot;, # Required. Name of the parameter - the pipeline runner uses this string
+                # as the key to the input and output maps in RunPipeline.
+          },
+        ],
+        &quot;description&quot;: &quot;A String&quot;, # User-specified description.
+        &quot;docker&quot;: { # The Docker execuctor specification. # Specifies the docker run information.
+          &quot;imageName&quot;: &quot;A String&quot;, # Required. Image name from either Docker Hub or Google Container Registry.
+              # Users that run pipelines must have READ access to the image.
+          &quot;cmd&quot;: &quot;A String&quot;, # Required. The command or newline delimited script to run. The command
+              # string will be executed within a bash shell.
+              #
+              # If the command exits with a non-zero exit code, output parameter
+              # de-localization will be skipped and the pipeline operation&#x27;s
+              # `error` field will be populated.
+              #
+              # Maximum command string length is 16384.
+        },
+        &quot;name&quot;: &quot;A String&quot;, # Required. A user specified pipeline name that does not have to be unique.
+            # This name can be used for filtering Pipelines in ListPipelines.
+      },
+    &quot;pipelineArgs&quot;: { # The pipeline run arguments. # The arguments to use when running this pipeline.
+      &quot;logging&quot;: { # The logging options for the pipeline run. # Required. Logging options. Used by the service to communicate results
+          # to the user.
+        &quot;gcsPath&quot;: &quot;A String&quot;, # The location in Google Cloud Storage to which the pipeline logs
+            # will be copied. Can be specified as a fully qualified directory
+            # path, in which case logs will be output with a unique identifier
+            # as the filename in that directory, or as a fully specified path,
+            # which must end in `.log`, in which case that path will be
+            # used, and the user must ensure that logs are not
+            # overwritten. Stdout and stderr logs from the run are also
+            # generated and output as `-stdout.log` and `-stderr.log`.
+      },
+      &quot;inputs&quot;: { # Pipeline input arguments; keys are defined in the pipeline documentation.
+          # All input parameters that do not have default values  must be specified.
+          # If parameters with defaults are specified here, the defaults will be
+          # overridden.
+        &quot;a_key&quot;: &quot;A String&quot;,
+      },
+      &quot;resources&quot;: { # The system resources for the pipeline run. # Specifies resource requirements/overrides for the pipeline run.
+        &quot;preemptible&quot;: True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
+            # must be true for both create time and run time. Cannot be true at run time
+            # if false at create time.
+        &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
+        &quot;acceleratorCount&quot;: &quot;A String&quot;, # Optional. The number of accelerators of the specified type to attach.
+            # By specifying this parameter, you will download and install the following
+            # third-party software onto your managed Compute Engine instances:
+            # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
+        &quot;noAddress&quot;: True or False, # Whether to assign an external IP to the instance. This is an experimental
+            # feature that may go away. Defaults to false.
+            # Corresponds to `--no_address` flag for [gcloud compute instances create]
+            # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
+            # In order to use this, must be true for both create time and run time.
+            # Cannot be true at run time if false at create time. If you need to ssh into
+            # a private IP VM for debugging, you can ssh to a public VM and then ssh into
+            # the private VM&#x27;s Internal IP.  If noAddress is set, this pipeline run may
+            # only load docker images from Google Container Registry and not Docker Hub.
+            # Before using this, you must
+            # [configure access to Google services from internal
+            # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+        &quot;zones&quot;: [ # List of Google Compute Engine availability zones to which resource
+            # creation will restricted. If empty, any zone may be chosen.
+          &quot;A String&quot;,
+        ],
+        &quot;minimumRamGb&quot;: 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+        &quot;disks&quot;: [ # Disks to attach.
+          { # A Google Compute Engine disk resource specification.
             &quot;source&quot;: &quot;A String&quot;, # The full or partial URL of the persistent disk to attach. See
                 # https://cloud.google.com/compute/docs/reference/latest/instances#resource
                 # and
                 # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
                 # for more details.
+            &quot;mountPoint&quot;: &quot;A String&quot;, # Required at create time and cannot be overridden at run time.
+                # Specifies the path in the docker container where files on
+                # this disk should be located. For example, if `mountPoint`
+                # is `/mnt/disk`, and the parameter has `localPath`
+                # `inputs/file.txt`, the docker container can access the data at
+                # `/mnt/disk/inputs/file.txt`.
+            &quot;autoDelete&quot;: True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+                # of the pipeline run, regardless of what this field is set to.
+            &quot;name&quot;: &quot;A String&quot;, # Required. The name of the disk that can be used in the pipeline
+                # parameters. Must be 1 - 63 characters.
+                # The name &quot;boot&quot; is reserved for system use.
+            &quot;type&quot;: &quot;A String&quot;, # Required. The type of the disk to create.
+            &quot;sizeGb&quot;: 42, # The size of the disk. Defaults to 500 (GB).
+                # This field is not applicable for local SSD.
+            &quot;readOnly&quot;: True or False, # Specifies how a sourced-base persistent disk will be mounted. See
+                # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
+                # for more details.
+                # Can only be set at create time.
           },
         ],
-        &quot;bootDiskSizeGb&quot;: 42, # The size of the boot disk. Defaults to 10 (GB).
+        &quot;acceleratorType&quot;: &quot;A String&quot;, # Optional. The Compute Engine defined accelerator type.
+            # By specifying this parameter, you will download and install the following
+            # third-party software onto your managed Compute Engine instances: NVIDIA®
+            # Tesla® drivers and NVIDIA® CUDA toolkit.
+            # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+            # available accelerator types.
+        &quot;minimumCpuCores&quot;: 42, # The minimum number of cores to use. Defaults to 1.
       },
+      &quot;labels&quot;: { # Labels to apply to this pipeline run. Labels will also be applied to
+          # compute resources (VM, disks) created by this pipeline run. When listing
+          # operations, operations can filtered by labels.
+          # Label keys may not be empty; label values may be empty. Non-empty labels
+          # must be 1-63 characters long, and comply with [RFC1035]
+          # (https://www.ietf.org/rfc/rfc1035.txt).
+          # Specifically, the name must be 1-63 characters long and match the regular
+          # expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
+          # character must be a lowercase letter, and all following characters must be
+          # a dash, lowercase letter, or digit, except the last character, which cannot
+          # be a dash.
+        &quot;a_key&quot;: &quot;A String&quot;,
+      },
+      &quot;serviceAccount&quot;: { # A Google Cloud Service Account. # The Google Cloud Service Account that will be used to access data and
+          # services. By default, the compute service account associated with
+          # `projectId` is used.
+        &quot;scopes&quot;: [ # List of scopes to be enabled for this service account on the VM.
+            # The following scopes are automatically included:
+            #
+            # * https://www.googleapis.com/auth/compute
+            # * https://www.googleapis.com/auth/devstorage.full_control
+            # * https://www.googleapis.com/auth/genomics
+            # * https://www.googleapis.com/auth/logging.write
+            # * https://www.googleapis.com/auth/monitoring.write
+          &quot;A String&quot;,
+        ],
+        &quot;email&quot;: &quot;A String&quot;, # Email address of the service account. Defaults to `default`,
+            # which uses the compute service account associated with the project.
+      },
+      &quot;clientId&quot;: &quot;A String&quot;, # This field is deprecated. Use `labels` instead. Client-specified pipeline
+          # operation identifier.
+      &quot;projectId&quot;: &quot;A String&quot;, # Required. The project in which to run the pipeline. The caller must have
+          # WRITER access to all Google Cloud services and resources (e.g. Google
+          # Compute Engine) will be used.
+      &quot;outputs&quot;: { # Pipeline output arguments; keys are defined in the pipeline
+          # documentation.  All output parameters of without default values
+          # must be specified.  If parameters with defaults are specified
+          # here, the defaults will be overridden.
+        &quot;a_key&quot;: &quot;A String&quot;,
+      },
+      &quot;keepVmAliveOnFailureDuration&quot;: &quot;A String&quot;, # How long to keep the VM up after a failure (for example docker command
+          # failed, copying input or output files failed, etc). While the VM is up, one
+          # can ssh into the VM to debug. Default is 0; maximum allowed value is 1 day.
     },
   }
 
@@ -2030,7 +2030,6 @@
 
     { # This resource represents a long-running operation that is the result of a
       # network API call.
-    &quot;name&quot;: &quot;A String&quot;, # The server-assigned name, which is only unique within the same service that originally returns it. For example&amp;#58; `operations/CJHU7Oi_ChDrveSpBRjfuL-qzoWAgEw`
     &quot;error&quot;: { # The `Status` type defines a logical error model that is suitable for # The error result of the operation in case of failure or cancellation.
         # different programming environments, including REST APIs and RPC APIs. It is
         # used by [gRPC](https://github.com/grpc). Each `Status` message contains
@@ -2039,19 +2038,20 @@
         # You can find out more about this error model and how to work with it in the
         # [API Design Guide](https://cloud.google.com/apis/design/errors).
       &quot;code&quot;: 42, # The status code, which should be an enum value of google.rpc.Code.
-      &quot;message&quot;: &quot;A String&quot;, # A developer-facing error message, which should be in English. Any
-          # user-facing error message should be localized and sent in the
-          # google.rpc.Status.details field, or localized by the client.
       &quot;details&quot;: [ # A list of messages that carry the error details.  There is a common set of
           # message types for APIs to use.
         {
           &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
         },
       ],
+      &quot;message&quot;: &quot;A String&quot;, # A developer-facing error message, which should be in English. Any
+          # user-facing error message should be localized and sent in the
+          # google.rpc.Status.details field, or localized by the client.
     },
     &quot;metadata&quot;: { # An OperationMetadata or Metadata object. This will always be returned with the Operation.
       &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
     },
+    &quot;name&quot;: &quot;A String&quot;, # The server-assigned name, which is only unique within the same service that originally returns it. For example&amp;#58; `operations/CJHU7Oi_ChDrveSpBRjfuL-qzoWAgEw`
     &quot;done&quot;: True or False, # If the value is `false`, it means the operation is still in progress.
         # If `true`, the operation is completed, and either `error` or `response` is
         # available.
@@ -2073,9 +2073,8 @@
 
 { # Request to set operation status. Should only be used by VMs
       # created by the Pipelines Service and not by end users.
-    &quot;errorMessage&quot;: &quot;A String&quot;,
-    &quot;validationToken&quot;: &quot;A String&quot;,
     &quot;errorCode&quot;: &quot;A String&quot;,
+    &quot;operationId&quot;: &quot;A String&quot;,
     &quot;timestampEvents&quot;: [
       { # Stores the list of events and times they occured for major events in job
           # execution.
@@ -2083,7 +2082,8 @@
         &quot;description&quot;: &quot;A String&quot;, # String indicating the type of event
       },
     ],
-    &quot;operationId&quot;: &quot;A String&quot;,
+    &quot;errorMessage&quot;: &quot;A String&quot;,
+    &quot;validationToken&quot;: &quot;A String&quot;,
   }
 
   x__xgafv: string, V1 error format.