docs: update generated docs (#981)
diff --git a/docs/dyn/genomics_v1alpha2.pipelines.html b/docs/dyn/genomics_v1alpha2.pipelines.html
index ec8a23e..3239542 100644
--- a/docs/dyn/genomics_v1alpha2.pipelines.html
+++ b/docs/dyn/genomics_v1alpha2.pipelines.html
@@ -84,10 +84,10 @@
<code><a href="#get">get(pipelineId, x__xgafv=None)</a></code></p>
<p class="firstline">Retrieves a pipeline based on ID.</p>
<p class="toc_element">
- <code><a href="#getControllerConfig">getControllerConfig(operationId=None, validationToken=None, x__xgafv=None)</a></code></p>
+ <code><a href="#getControllerConfig">getControllerConfig(validationToken=None, operationId=None, x__xgafv=None)</a></code></p>
<p class="firstline">Gets controller configuration information. Should only be called</p>
<p class="toc_element">
- <code><a href="#list">list(pageToken=None, pageSize=None, projectId=None, namePrefix=None, x__xgafv=None)</a></code></p>
+ <code><a href="#list">list(namePrefix=None, pageSize=None, projectId=None, pageToken=None, x__xgafv=None)</a></code></p>
<p class="firstline">Lists pipelines.</p>
<p class="toc_element">
<code><a href="#list_next">list_next(previous_request, previous_response)</a></code></p>
@@ -113,475 +113,86 @@
The object takes the form of:
{ # The pipeline object. Represents a transformation from a set of input
- # parameters to a set of output parameters. The transformation is defined
- # as a docker image and command to run within that image. Each pipeline
- # is run on a Google Compute Engine VM. A pipeline can be created with the
- # `create` method and then later run with the `run` method, or a pipeline can
- # be defined and run all at once with the `run` method.
- "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
- # This name can be used for filtering Pipelines in ListPipelines.
- "pipelineId": "A String", # Unique pipeline id that is generated by the service when CreatePipeline
- # is called. Cannot be specified in the Pipeline used in the
- # CreatePipelineRequest, and will be populated in the response to
- # CreatePipeline and all subsequent Get and List calls. Indicates that the
- # service has registered this pipeline.
- "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
- # WRITE access.
- "outputParameters": [ # Output parameters of the pipeline.
- { # Parameters facilitate setting and delivering data into the
- # pipeline's execution environment. They are defined at create time,
- # with optional defaults, and can be overridden at run time.
- #
- # If `localCopy` is unset, then the parameter specifies a string that
- # is passed as-is into the pipeline, as the value of the environment
- # variable with the given name. A default value can be optionally
- # specified at create time. The default can be overridden at run time
- # using the inputs map. If no default is given, a value must be
- # supplied at runtime.
- #
- # If `localCopy` is defined, then the parameter specifies a data
- # source or sink, both in Google Cloud Storage and on the Docker container
- # where the pipeline computation is run. The service account associated with
- # the Pipeline (by
- # default the project's Compute Engine service account) must have access to the
- # Google Cloud Storage paths.
- #
- # At run time, the Google Cloud Storage paths can be overridden if a default
- # was provided at create time, or must be set otherwise. The pipeline runner
- # should add a key/value pair to either the inputs or outputs map. The
- # indicated data copies will be carried out before/after pipeline execution,
- # just as if the corresponding arguments were provided to `gsutil cp`.
- #
- # For example: Given the following `PipelineParameter`, specified
- # in the `inputParameters` list:
- #
- # ```
- # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
- # ```
- #
- # where `disk` is defined in the `PipelineResources` object as:
- #
- # ```
- # {name: "pd1", mountPoint: "/mnt/disk/"}
- # ```
- #
- # We create a disk named `pd1`, mount it on the host VM, and map
- # `/mnt/pd1` to `/mnt/disk` in the docker container. At
- # runtime, an entry for `input_file` would be required in the inputs
- # map, such as:
- #
- # ```
- # inputs["input_file"] = "gs://my-bucket/bar.txt"
- # ```
- #
- # This would generate the following gsutil call:
- #
- # ```
- # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
- # ```
- #
- # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
- # Docker container. Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr><td>glob</td><td>directory</td></tr>
- # </tbody>
- # </table>
- #
- # For outputs, the direction of the copy is reversed:
- #
- # ```
- # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
- # ```
- #
- # Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr>
- # <td>file</td>
- # <td>directory - directory must already exist</td>
- # </tr>
- # <tr>
- # <td>glob</td>
- # <td>directory - directory will be created if it doesn't exist</td></tr>
- # </tbody>
- # </table>
- #
- # One restriction due to docker limitations, is that for outputs that are found
- # on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
- "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
- # `LocalCopy` indicates where on the VM the file should be. The value
- # given to this parameter (either at runtime or using `defaultValue`)
- # must be the remote path where the file should be.
- "disk": "A String", # Required. The name of the disk where this parameter is
- # located. Can be the name of one of the disks specified in the
- # Resources field, or "boot", which represents the Docker
- # instance's boot disk and has a mount point of `/`.
- "path": "A String", # Required. The path within the user's docker container where
- # this input should be localized to and from, relative to the specified
- # disk's mount point. For example: file.txt,
- },
- },
- ],
- "docker": { # The Docker execuctor specification. # Specifies the docker run information.
- "cmd": "A String", # Required. The command or newline delimited script to run. The command
- # string will be executed within a bash shell.
- #
- # If the command exits with a non-zero exit code, output parameter
- # de-localization will be skipped and the pipeline operation's
- # `error` field will be populated.
- #
- # Maximum command string length is 16384.
- "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
- # Users that run pipelines must have READ access to the image.
- },
- "description": "A String", # User-specified description.
- "inputParameters": [ # Input parameters of the pipeline.
- { # Parameters facilitate setting and delivering data into the
- # pipeline's execution environment. They are defined at create time,
- # with optional defaults, and can be overridden at run time.
- #
- # If `localCopy` is unset, then the parameter specifies a string that
- # is passed as-is into the pipeline, as the value of the environment
- # variable with the given name. A default value can be optionally
- # specified at create time. The default can be overridden at run time
- # using the inputs map. If no default is given, a value must be
- # supplied at runtime.
- #
- # If `localCopy` is defined, then the parameter specifies a data
- # source or sink, both in Google Cloud Storage and on the Docker container
- # where the pipeline computation is run. The service account associated with
- # the Pipeline (by
- # default the project's Compute Engine service account) must have access to the
- # Google Cloud Storage paths.
- #
- # At run time, the Google Cloud Storage paths can be overridden if a default
- # was provided at create time, or must be set otherwise. The pipeline runner
- # should add a key/value pair to either the inputs or outputs map. The
- # indicated data copies will be carried out before/after pipeline execution,
- # just as if the corresponding arguments were provided to `gsutil cp`.
- #
- # For example: Given the following `PipelineParameter`, specified
- # in the `inputParameters` list:
- #
- # ```
- # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
- # ```
- #
- # where `disk` is defined in the `PipelineResources` object as:
- #
- # ```
- # {name: "pd1", mountPoint: "/mnt/disk/"}
- # ```
- #
- # We create a disk named `pd1`, mount it on the host VM, and map
- # `/mnt/pd1` to `/mnt/disk` in the docker container. At
- # runtime, an entry for `input_file` would be required in the inputs
- # map, such as:
- #
- # ```
- # inputs["input_file"] = "gs://my-bucket/bar.txt"
- # ```
- #
- # This would generate the following gsutil call:
- #
- # ```
- # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
- # ```
- #
- # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
- # Docker container. Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr><td>glob</td><td>directory</td></tr>
- # </tbody>
- # </table>
- #
- # For outputs, the direction of the copy is reversed:
- #
- # ```
- # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
- # ```
- #
- # Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr>
- # <td>file</td>
- # <td>directory - directory must already exist</td>
- # </tr>
- # <tr>
- # <td>glob</td>
- # <td>directory - directory will be created if it doesn't exist</td></tr>
- # </tbody>
- # </table>
- #
- # One restriction due to docker limitations, is that for outputs that are found
- # on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
- "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
- # `LocalCopy` indicates where on the VM the file should be. The value
- # given to this parameter (either at runtime or using `defaultValue`)
- # must be the remote path where the file should be.
- "disk": "A String", # Required. The name of the disk where this parameter is
- # located. Can be the name of one of the disks specified in the
- # Resources field, or "boot", which represents the Docker
- # instance's boot disk and has a mount point of `/`.
- "path": "A String", # Required. The path within the user's docker container where
- # this input should be localized to and from, relative to the specified
- # disk's mount point. For example: file.txt,
- },
- },
- ],
- "resources": { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
- # Required fields:
- #
- # *
- # minimumCpuCores
- #
- # *
- # minimumRamGb
- "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
- "preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
- # must be true for both create time and run time. Cannot be true at run time
- # if false at create time.
- "zones": [ # List of Google Compute Engine availability zones to which resource
- # creation will restricted. If empty, any zone may be chosen.
- "A String",
- ],
- "acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances:
- # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
- "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances: NVIDIA®
- # Tesla® drivers and NVIDIA® CUDA toolkit.
- # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
- # available accelerator types.
- "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
- "noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
- # feature that may go away. Defaults to false.
- # Corresponds to `--no_address` flag for [gcloud compute instances create]
- # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
- # In order to use this, must be true for both create time and run time.
- # Cannot be true at run time if false at create time. If you need to ssh into
- # a private IP VM for debugging, you can ssh to a public VM and then ssh into
- # the private VM's Internal IP. If noAddress is set, this pipeline run may
- # only load docker images from Google Container Registry and not Docker Hub.
- # Before using this, you must
- # [configure access to Google services from internal
- # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
- "disks": [ # Disks to attach.
- { # A Google Compute Engine disk resource specification.
- "name": "A String", # Required. The name of the disk that can be used in the pipeline
- # parameters. Must be 1 - 63 characters.
- # The name "boot" is reserved for system use.
- "type": "A String", # Required. The type of the disk to create.
- "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
- # of the pipeline run, regardless of what this field is set to.
- "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
- # This field is not applicable for local SSD.
- "mountPoint": "A String", # Required at create time and cannot be overridden at run time.
- # Specifies the path in the docker container where files on
- # this disk should be located. For example, if `mountPoint`
- # is `/mnt/disk`, and the parameter has `localPath`
- # `inputs/file.txt`, the docker container can access the data at
- # `/mnt/disk/inputs/file.txt`.
- "readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
- # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
- # for more details.
- # Can only be set at create time.
- "source": "A String", # The full or partial URL of the persistent disk to attach. See
- # https://cloud.google.com/compute/docs/reference/latest/instances#resource
- # and
- # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
- # for more details.
- },
- ],
- "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
- },
-}
-
- x__xgafv: string, V1 error format.
- Allowed values
- 1 - v1 error format
- 2 - v2 error format
-
-Returns:
- An object of the form:
-
- { # The pipeline object. Represents a transformation from a set of input
# parameters to a set of output parameters. The transformation is defined
# as a docker image and command to run within that image. Each pipeline
# is run on a Google Compute Engine VM. A pipeline can be created with the
# `create` method and then later run with the `run` method, or a pipeline can
# be defined and run all at once with the `run` method.
- "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
- # This name can be used for filtering Pipelines in ListPipelines.
+ "resources": { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
+ # Required fields:
+ #
+ # *
+ # minimumCpuCores
+ #
+ # *
+ # minimumRamGb
+ "preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
+ # must be true for both create time and run time. Cannot be true at run time
+ # if false at create time.
+ "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
+ "acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances:
+ # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
+ "noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
+ # feature that may go away. Defaults to false.
+ # Corresponds to `--no_address` flag for [gcloud compute instances create]
+ # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
+ # In order to use this, must be true for both create time and run time.
+ # Cannot be true at run time if false at create time. If you need to ssh into
+ # a private IP VM for debugging, you can ssh to a public VM and then ssh into
+ # the private VM's Internal IP. If noAddress is set, this pipeline run may
+ # only load docker images from Google Container Registry and not Docker Hub.
+ # Before using this, you must
+ # [configure access to Google services from internal
+ # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+ "zones": [ # List of Google Compute Engine availability zones to which resource
+ # creation will restricted. If empty, any zone may be chosen.
+ "A String",
+ ],
+ "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+ "disks": [ # Disks to attach.
+ { # A Google Compute Engine disk resource specification.
+ "source": "A String", # The full or partial URL of the persistent disk to attach. See
+ # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+ # and
+ # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+ # for more details.
+ "mountPoint": "A String", # Required at create time and cannot be overridden at run time.
+ # Specifies the path in the docker container where files on
+ # this disk should be located. For example, if `mountPoint`
+ # is `/mnt/disk`, and the parameter has `localPath`
+ # `inputs/file.txt`, the docker container can access the data at
+ # `/mnt/disk/inputs/file.txt`.
+ "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+ # of the pipeline run, regardless of what this field is set to.
+ "name": "A String", # Required. The name of the disk that can be used in the pipeline
+ # parameters. Must be 1 - 63 characters.
+ # The name "boot" is reserved for system use.
+ "type": "A String", # Required. The type of the disk to create.
+ "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
+ # This field is not applicable for local SSD.
+ "readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
+ # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
+ # for more details.
+ # Can only be set at create time.
+ },
+ ],
+ "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances: NVIDIA®
+ # Tesla® drivers and NVIDIA® CUDA toolkit.
+ # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+ # available accelerator types.
+ "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
+ },
+ "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
+ # WRITE access.
"pipelineId": "A String", # Unique pipeline id that is generated by the service when CreatePipeline
# is called. Cannot be specified in the Pipeline used in the
# CreatePipelineRequest, and will be populated in the response to
# CreatePipeline and all subsequent Get and List calls. Indicates that the
# service has registered this pipeline.
- "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
- # WRITE access.
- "outputParameters": [ # Output parameters of the pipeline.
- { # Parameters facilitate setting and delivering data into the
- # pipeline's execution environment. They are defined at create time,
- # with optional defaults, and can be overridden at run time.
- #
- # If `localCopy` is unset, then the parameter specifies a string that
- # is passed as-is into the pipeline, as the value of the environment
- # variable with the given name. A default value can be optionally
- # specified at create time. The default can be overridden at run time
- # using the inputs map. If no default is given, a value must be
- # supplied at runtime.
- #
- # If `localCopy` is defined, then the parameter specifies a data
- # source or sink, both in Google Cloud Storage and on the Docker container
- # where the pipeline computation is run. The service account associated with
- # the Pipeline (by
- # default the project's Compute Engine service account) must have access to the
- # Google Cloud Storage paths.
- #
- # At run time, the Google Cloud Storage paths can be overridden if a default
- # was provided at create time, or must be set otherwise. The pipeline runner
- # should add a key/value pair to either the inputs or outputs map. The
- # indicated data copies will be carried out before/after pipeline execution,
- # just as if the corresponding arguments were provided to `gsutil cp`.
- #
- # For example: Given the following `PipelineParameter`, specified
- # in the `inputParameters` list:
- #
- # ```
- # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
- # ```
- #
- # where `disk` is defined in the `PipelineResources` object as:
- #
- # ```
- # {name: "pd1", mountPoint: "/mnt/disk/"}
- # ```
- #
- # We create a disk named `pd1`, mount it on the host VM, and map
- # `/mnt/pd1` to `/mnt/disk` in the docker container. At
- # runtime, an entry for `input_file` would be required in the inputs
- # map, such as:
- #
- # ```
- # inputs["input_file"] = "gs://my-bucket/bar.txt"
- # ```
- #
- # This would generate the following gsutil call:
- #
- # ```
- # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
- # ```
- #
- # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
- # Docker container. Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr><td>glob</td><td>directory</td></tr>
- # </tbody>
- # </table>
- #
- # For outputs, the direction of the copy is reversed:
- #
- # ```
- # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
- # ```
- #
- # Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr>
- # <td>file</td>
- # <td>directory - directory must already exist</td>
- # </tr>
- # <tr>
- # <td>glob</td>
- # <td>directory - directory will be created if it doesn't exist</td></tr>
- # </tbody>
- # </table>
- #
- # One restriction due to docker limitations, is that for outputs that are found
- # on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
- "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
- # `LocalCopy` indicates where on the VM the file should be. The value
- # given to this parameter (either at runtime or using `defaultValue`)
- # must be the remote path where the file should be.
- "disk": "A String", # Required. The name of the disk where this parameter is
- # located. Can be the name of one of the disks specified in the
- # Resources field, or "boot", which represents the Docker
- # instance's boot disk and has a mount point of `/`.
- "path": "A String", # Required. The path within the user's docker container where
- # this input should be localized to and from, relative to the specified
- # disk's mount point. For example: file.txt,
- },
- },
- ],
- "docker": { # The Docker execuctor specification. # Specifies the docker run information.
- "cmd": "A String", # Required. The command or newline delimited script to run. The command
- # string will be executed within a bash shell.
- #
- # If the command exits with a non-zero exit code, output parameter
- # de-localization will be skipped and the pipeline operation's
- # `error` field will be populated.
- #
- # Maximum command string length is 16384.
- "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
- # Users that run pipelines must have READ access to the image.
- },
- "description": "A String", # User-specified description.
"inputParameters": [ # Input parameters of the pipeline.
{ # Parameters facilitate setting and delivering data into the
# pipeline's execution environment. They are defined at create time,
@@ -674,12 +285,6 @@
#
# One restriction due to docker limitations, is that for outputs that are found
# on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
"localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
# `LocalCopy` indicates where on the VM the file should be. The value
# given to this parameter (either at runtime or using `defaultValue`)
@@ -692,140 +297,14 @@
# this input should be localized to and from, relative to the specified
# disk's mount point. For example: file.txt,
},
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
},
],
- "resources": { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
- # Required fields:
- #
- # *
- # minimumCpuCores
- #
- # *
- # minimumRamGb
- "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
- "preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
- # must be true for both create time and run time. Cannot be true at run time
- # if false at create time.
- "zones": [ # List of Google Compute Engine availability zones to which resource
- # creation will restricted. If empty, any zone may be chosen.
- "A String",
- ],
- "acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances:
- # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
- "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances: NVIDIA®
- # Tesla® drivers and NVIDIA® CUDA toolkit.
- # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
- # available accelerator types.
- "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
- "noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
- # feature that may go away. Defaults to false.
- # Corresponds to `--no_address` flag for [gcloud compute instances create]
- # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
- # In order to use this, must be true for both create time and run time.
- # Cannot be true at run time if false at create time. If you need to ssh into
- # a private IP VM for debugging, you can ssh to a public VM and then ssh into
- # the private VM's Internal IP. If noAddress is set, this pipeline run may
- # only load docker images from Google Container Registry and not Docker Hub.
- # Before using this, you must
- # [configure access to Google services from internal
- # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
- "disks": [ # Disks to attach.
- { # A Google Compute Engine disk resource specification.
- "name": "A String", # Required. The name of the disk that can be used in the pipeline
- # parameters. Must be 1 - 63 characters.
- # The name "boot" is reserved for system use.
- "type": "A String", # Required. The type of the disk to create.
- "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
- # of the pipeline run, regardless of what this field is set to.
- "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
- # This field is not applicable for local SSD.
- "mountPoint": "A String", # Required at create time and cannot be overridden at run time.
- # Specifies the path in the docker container where files on
- # this disk should be located. For example, if `mountPoint`
- # is `/mnt/disk`, and the parameter has `localPath`
- # `inputs/file.txt`, the docker container can access the data at
- # `/mnt/disk/inputs/file.txt`.
- "readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
- # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
- # for more details.
- # Can only be set at create time.
- "source": "A String", # The full or partial URL of the persistent disk to attach. See
- # https://cloud.google.com/compute/docs/reference/latest/instances#resource
- # and
- # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
- # for more details.
- },
- ],
- "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
- },
- }</pre>
-</div>
-
-<div class="method">
- <code class="details" id="delete">delete(pipelineId, x__xgafv=None)</code>
- <pre>Deletes a pipeline based on ID.
-
-Caller must have WRITE permission to the project.
-
-Args:
- pipelineId: string, Caller must have WRITE access to the project in which this pipeline
-is defined. (required)
- x__xgafv: string, V1 error format.
- Allowed values
- 1 - v1 error format
- 2 - v2 error format
-
-Returns:
- An object of the form:
-
- { # A generic empty message that you can re-use to avoid defining duplicated
- # empty messages in your APIs. A typical example is to use it as the request
- # or the response type of an API method. For instance:
- #
- # service Foo {
- # rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
- # }
- #
- # The JSON representation for `Empty` is empty JSON object `{}`.
- }</pre>
-</div>
-
-<div class="method">
- <code class="details" id="get">get(pipelineId, x__xgafv=None)</code>
- <pre>Retrieves a pipeline based on ID.
-
-Caller must have READ permission to the project.
-
-Args:
- pipelineId: string, Caller must have READ access to the project in which this pipeline
-is defined. (required)
- x__xgafv: string, V1 error format.
- Allowed values
- 1 - v1 error format
- 2 - v2 error format
-
-Returns:
- An object of the form:
-
- { # The pipeline object. Represents a transformation from a set of input
- # parameters to a set of output parameters. The transformation is defined
- # as a docker image and command to run within that image. Each pipeline
- # is run on a Google Compute Engine VM. A pipeline can be created with the
- # `create` method and then later run with the `run` method, or a pipeline can
- # be defined and run all at once with the `run` method.
- "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
- # This name can be used for filtering Pipelines in ListPipelines.
- "pipelineId": "A String", # Unique pipeline id that is generated by the service when CreatePipeline
- # is called. Cannot be specified in the Pipeline used in the
- # CreatePipelineRequest, and will be populated in the response to
- # CreatePipeline and all subsequent Get and List calls. Indicates that the
- # service has registered this pipeline.
- "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
- # WRITE access.
"outputParameters": [ # Output parameters of the pipeline.
{ # Parameters facilitate setting and delivering data into the
# pipeline's execution environment. They are defined at create time,
@@ -918,12 +397,6 @@
#
# One restriction due to docker limitations, is that for outputs that are found
# on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
"localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
# `LocalCopy` indicates where on the VM the file should be. The value
# given to this parameter (either at runtime or using `defaultValue`)
@@ -936,9 +409,18 @@
# this input should be localized to and from, relative to the specified
# disk's mount point. For example: file.txt,
},
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
},
],
+ "description": "A String", # User-specified description.
"docker": { # The Docker execuctor specification. # Specifies the docker run information.
+ "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
+ # Users that run pipelines must have READ access to the image.
"cmd": "A String", # Required. The command or newline delimited script to run. The command
# string will be executed within a bash shell.
#
@@ -947,201 +429,11 @@
# `error` field will be populated.
#
# Maximum command string length is 16384.
- "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
- # Users that run pipelines must have READ access to the image.
},
- "description": "A String", # User-specified description.
- "inputParameters": [ # Input parameters of the pipeline.
- { # Parameters facilitate setting and delivering data into the
- # pipeline's execution environment. They are defined at create time,
- # with optional defaults, and can be overridden at run time.
- #
- # If `localCopy` is unset, then the parameter specifies a string that
- # is passed as-is into the pipeline, as the value of the environment
- # variable with the given name. A default value can be optionally
- # specified at create time. The default can be overridden at run time
- # using the inputs map. If no default is given, a value must be
- # supplied at runtime.
- #
- # If `localCopy` is defined, then the parameter specifies a data
- # source or sink, both in Google Cloud Storage and on the Docker container
- # where the pipeline computation is run. The service account associated with
- # the Pipeline (by
- # default the project's Compute Engine service account) must have access to the
- # Google Cloud Storage paths.
- #
- # At run time, the Google Cloud Storage paths can be overridden if a default
- # was provided at create time, or must be set otherwise. The pipeline runner
- # should add a key/value pair to either the inputs or outputs map. The
- # indicated data copies will be carried out before/after pipeline execution,
- # just as if the corresponding arguments were provided to `gsutil cp`.
- #
- # For example: Given the following `PipelineParameter`, specified
- # in the `inputParameters` list:
- #
- # ```
- # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
- # ```
- #
- # where `disk` is defined in the `PipelineResources` object as:
- #
- # ```
- # {name: "pd1", mountPoint: "/mnt/disk/"}
- # ```
- #
- # We create a disk named `pd1`, mount it on the host VM, and map
- # `/mnt/pd1` to `/mnt/disk` in the docker container. At
- # runtime, an entry for `input_file` would be required in the inputs
- # map, such as:
- #
- # ```
- # inputs["input_file"] = "gs://my-bucket/bar.txt"
- # ```
- #
- # This would generate the following gsutil call:
- #
- # ```
- # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
- # ```
- #
- # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
- # Docker container. Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr><td>glob</td><td>directory</td></tr>
- # </tbody>
- # </table>
- #
- # For outputs, the direction of the copy is reversed:
- #
- # ```
- # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
- # ```
- #
- # Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr>
- # <td>file</td>
- # <td>directory - directory must already exist</td>
- # </tr>
- # <tr>
- # <td>glob</td>
- # <td>directory - directory will be created if it doesn't exist</td></tr>
- # </tbody>
- # </table>
- #
- # One restriction due to docker limitations, is that for outputs that are found
- # on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
- "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
- # `LocalCopy` indicates where on the VM the file should be. The value
- # given to this parameter (either at runtime or using `defaultValue`)
- # must be the remote path where the file should be.
- "disk": "A String", # Required. The name of the disk where this parameter is
- # located. Can be the name of one of the disks specified in the
- # Resources field, or "boot", which represents the Docker
- # instance's boot disk and has a mount point of `/`.
- "path": "A String", # Required. The path within the user's docker container where
- # this input should be localized to and from, relative to the specified
- # disk's mount point. For example: file.txt,
- },
- },
- ],
- "resources": { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
- # Required fields:
- #
- # *
- # minimumCpuCores
- #
- # *
- # minimumRamGb
- "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
- "preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
- # must be true for both create time and run time. Cannot be true at run time
- # if false at create time.
- "zones": [ # List of Google Compute Engine availability zones to which resource
- # creation will restricted. If empty, any zone may be chosen.
- "A String",
- ],
- "acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances:
- # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
- "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances: NVIDIA®
- # Tesla® drivers and NVIDIA® CUDA toolkit.
- # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
- # available accelerator types.
- "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
- "noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
- # feature that may go away. Defaults to false.
- # Corresponds to `--no_address` flag for [gcloud compute instances create]
- # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
- # In order to use this, must be true for both create time and run time.
- # Cannot be true at run time if false at create time. If you need to ssh into
- # a private IP VM for debugging, you can ssh to a public VM and then ssh into
- # the private VM's Internal IP. If noAddress is set, this pipeline run may
- # only load docker images from Google Container Registry and not Docker Hub.
- # Before using this, you must
- # [configure access to Google services from internal
- # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
- "disks": [ # Disks to attach.
- { # A Google Compute Engine disk resource specification.
- "name": "A String", # Required. The name of the disk that can be used in the pipeline
- # parameters. Must be 1 - 63 characters.
- # The name "boot" is reserved for system use.
- "type": "A String", # Required. The type of the disk to create.
- "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
- # of the pipeline run, regardless of what this field is set to.
- "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
- # This field is not applicable for local SSD.
- "mountPoint": "A String", # Required at create time and cannot be overridden at run time.
- # Specifies the path in the docker container where files on
- # this disk should be located. For example, if `mountPoint`
- # is `/mnt/disk`, and the parameter has `localPath`
- # `inputs/file.txt`, the docker container can access the data at
- # `/mnt/disk/inputs/file.txt`.
- "readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
- # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
- # for more details.
- # Can only be set at create time.
- "source": "A String", # The full or partial URL of the persistent disk to attach. See
- # https://cloud.google.com/compute/docs/reference/latest/instances#resource
- # and
- # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
- # for more details.
- },
- ],
- "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
- },
- }</pre>
-</div>
+ "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
+ # This name can be used for filtering Pipelines in ListPipelines.
+ }
-<div class="method">
- <code class="details" id="getControllerConfig">getControllerConfig(operationId=None, validationToken=None, x__xgafv=None)</code>
- <pre>Gets controller configuration information. Should only be called
-by VMs created by the Pipelines Service and not by end users.
-
-Args:
- operationId: string, The operation to retrieve controller configuration for.
- validationToken: string, A parameter
x__xgafv: string, V1 error format.
Allowed values
1 - v1 error format
@@ -1150,504 +442,28 @@
Returns:
An object of the form:
- { # Stores the information that the controller will fetch from the
- # server in order to run. Should only be used by VMs created by the
- # Pipelines Service and not by end users.
- "machineType": "A String",
- "cmd": "A String",
- "vars": {
- "a_key": "A String",
- },
- "image": "A String",
- "gcsLogPath": "A String",
- "gcsSources": {
- "a_key": {
- "values": [
- "A String",
- ],
- },
- },
- "gcsSinks": {
- "a_key": {
- "values": [
- "A String",
- ],
- },
- },
- "disks": {
- "a_key": "A String",
- },
- }</pre>
-</div>
-
-<div class="method">
- <code class="details" id="list">list(pageToken=None, pageSize=None, projectId=None, namePrefix=None, x__xgafv=None)</code>
- <pre>Lists pipelines.
-
-Caller must have READ permission to the project.
-
-Args:
- pageToken: string, Token to use to indicate where to start getting results.
-If unspecified, returns the first page of results.
- pageSize: integer, Number of pipelines to return at once. Defaults to 256, and max
-is 2048.
- projectId: string, Required. The name of the project to search for pipelines. Caller
-must have READ access to this project.
- namePrefix: string, Pipelines with names that match this prefix should be
-returned. If unspecified, all pipelines in the project, up to
-`pageSize`, will be returned.
- x__xgafv: string, V1 error format.
- Allowed values
- 1 - v1 error format
- 2 - v2 error format
-
-Returns:
- An object of the form:
-
- { # The response of ListPipelines. Contains at most `pageSize`
- # pipelines. If it contains `pageSize` pipelines, and more pipelines
- # exist, then `nextPageToken` will be populated and should be
- # used as the `pageToken` argument to a subsequent ListPipelines
- # request.
- "nextPageToken": "A String", # The token to use to get the next page of results.
- "pipelines": [ # The matched pipelines.
- { # The pipeline object. Represents a transformation from a set of input
- # parameters to a set of output parameters. The transformation is defined
- # as a docker image and command to run within that image. Each pipeline
- # is run on a Google Compute Engine VM. A pipeline can be created with the
- # `create` method and then later run with the `run` method, or a pipeline can
- # be defined and run all at once with the `run` method.
- "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
- # This name can be used for filtering Pipelines in ListPipelines.
- "pipelineId": "A String", # Unique pipeline id that is generated by the service when CreatePipeline
- # is called. Cannot be specified in the Pipeline used in the
- # CreatePipelineRequest, and will be populated in the response to
- # CreatePipeline and all subsequent Get and List calls. Indicates that the
- # service has registered this pipeline.
- "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
- # WRITE access.
- "outputParameters": [ # Output parameters of the pipeline.
- { # Parameters facilitate setting and delivering data into the
- # pipeline's execution environment. They are defined at create time,
- # with optional defaults, and can be overridden at run time.
- #
- # If `localCopy` is unset, then the parameter specifies a string that
- # is passed as-is into the pipeline, as the value of the environment
- # variable with the given name. A default value can be optionally
- # specified at create time. The default can be overridden at run time
- # using the inputs map. If no default is given, a value must be
- # supplied at runtime.
- #
- # If `localCopy` is defined, then the parameter specifies a data
- # source or sink, both in Google Cloud Storage and on the Docker container
- # where the pipeline computation is run. The service account associated with
- # the Pipeline (by
- # default the project's Compute Engine service account) must have access to the
- # Google Cloud Storage paths.
- #
- # At run time, the Google Cloud Storage paths can be overridden if a default
- # was provided at create time, or must be set otherwise. The pipeline runner
- # should add a key/value pair to either the inputs or outputs map. The
- # indicated data copies will be carried out before/after pipeline execution,
- # just as if the corresponding arguments were provided to `gsutil cp`.
- #
- # For example: Given the following `PipelineParameter`, specified
- # in the `inputParameters` list:
- #
- # ```
- # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
- # ```
- #
- # where `disk` is defined in the `PipelineResources` object as:
- #
- # ```
- # {name: "pd1", mountPoint: "/mnt/disk/"}
- # ```
- #
- # We create a disk named `pd1`, mount it on the host VM, and map
- # `/mnt/pd1` to `/mnt/disk` in the docker container. At
- # runtime, an entry for `input_file` would be required in the inputs
- # map, such as:
- #
- # ```
- # inputs["input_file"] = "gs://my-bucket/bar.txt"
- # ```
- #
- # This would generate the following gsutil call:
- #
- # ```
- # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
- # ```
- #
- # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
- # Docker container. Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr><td>glob</td><td>directory</td></tr>
- # </tbody>
- # </table>
- #
- # For outputs, the direction of the copy is reversed:
- #
- # ```
- # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
- # ```
- #
- # Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr>
- # <td>file</td>
- # <td>directory - directory must already exist</td>
- # </tr>
- # <tr>
- # <td>glob</td>
- # <td>directory - directory will be created if it doesn't exist</td></tr>
- # </tbody>
- # </table>
- #
- # One restriction due to docker limitations, is that for outputs that are found
- # on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
- "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
- # `LocalCopy` indicates where on the VM the file should be. The value
- # given to this parameter (either at runtime or using `defaultValue`)
- # must be the remote path where the file should be.
- "disk": "A String", # Required. The name of the disk where this parameter is
- # located. Can be the name of one of the disks specified in the
- # Resources field, or "boot", which represents the Docker
- # instance's boot disk and has a mount point of `/`.
- "path": "A String", # Required. The path within the user's docker container where
- # this input should be localized to and from, relative to the specified
- # disk's mount point. For example: file.txt,
- },
- },
- ],
- "docker": { # The Docker execuctor specification. # Specifies the docker run information.
- "cmd": "A String", # Required. The command or newline delimited script to run. The command
- # string will be executed within a bash shell.
- #
- # If the command exits with a non-zero exit code, output parameter
- # de-localization will be skipped and the pipeline operation's
- # `error` field will be populated.
- #
- # Maximum command string length is 16384.
- "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
- # Users that run pipelines must have READ access to the image.
- },
- "description": "A String", # User-specified description.
- "inputParameters": [ # Input parameters of the pipeline.
- { # Parameters facilitate setting and delivering data into the
- # pipeline's execution environment. They are defined at create time,
- # with optional defaults, and can be overridden at run time.
- #
- # If `localCopy` is unset, then the parameter specifies a string that
- # is passed as-is into the pipeline, as the value of the environment
- # variable with the given name. A default value can be optionally
- # specified at create time. The default can be overridden at run time
- # using the inputs map. If no default is given, a value must be
- # supplied at runtime.
- #
- # If `localCopy` is defined, then the parameter specifies a data
- # source or sink, both in Google Cloud Storage and on the Docker container
- # where the pipeline computation is run. The service account associated with
- # the Pipeline (by
- # default the project's Compute Engine service account) must have access to the
- # Google Cloud Storage paths.
- #
- # At run time, the Google Cloud Storage paths can be overridden if a default
- # was provided at create time, or must be set otherwise. The pipeline runner
- # should add a key/value pair to either the inputs or outputs map. The
- # indicated data copies will be carried out before/after pipeline execution,
- # just as if the corresponding arguments were provided to `gsutil cp`.
- #
- # For example: Given the following `PipelineParameter`, specified
- # in the `inputParameters` list:
- #
- # ```
- # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
- # ```
- #
- # where `disk` is defined in the `PipelineResources` object as:
- #
- # ```
- # {name: "pd1", mountPoint: "/mnt/disk/"}
- # ```
- #
- # We create a disk named `pd1`, mount it on the host VM, and map
- # `/mnt/pd1` to `/mnt/disk` in the docker container. At
- # runtime, an entry for `input_file` would be required in the inputs
- # map, such as:
- #
- # ```
- # inputs["input_file"] = "gs://my-bucket/bar.txt"
- # ```
- #
- # This would generate the following gsutil call:
- #
- # ```
- # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
- # ```
- #
- # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
- # Docker container. Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr><td>glob</td><td>directory</td></tr>
- # </tbody>
- # </table>
- #
- # For outputs, the direction of the copy is reversed:
- #
- # ```
- # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
- # ```
- #
- # Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr>
- # <td>file</td>
- # <td>directory - directory must already exist</td>
- # </tr>
- # <tr>
- # <td>glob</td>
- # <td>directory - directory will be created if it doesn't exist</td></tr>
- # </tbody>
- # </table>
- #
- # One restriction due to docker limitations, is that for outputs that are found
- # on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
- "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
- # `LocalCopy` indicates where on the VM the file should be. The value
- # given to this parameter (either at runtime or using `defaultValue`)
- # must be the remote path where the file should be.
- "disk": "A String", # Required. The name of the disk where this parameter is
- # located. Can be the name of one of the disks specified in the
- # Resources field, or "boot", which represents the Docker
- # instance's boot disk and has a mount point of `/`.
- "path": "A String", # Required. The path within the user's docker container where
- # this input should be localized to and from, relative to the specified
- # disk's mount point. For example: file.txt,
- },
- },
- ],
- "resources": { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
- # Required fields:
- #
- # *
- # minimumCpuCores
- #
- # *
- # minimumRamGb
- "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
- "preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
- # must be true for both create time and run time. Cannot be true at run time
- # if false at create time.
- "zones": [ # List of Google Compute Engine availability zones to which resource
- # creation will restricted. If empty, any zone may be chosen.
- "A String",
- ],
- "acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances:
- # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
- "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances: NVIDIA®
- # Tesla® drivers and NVIDIA® CUDA toolkit.
- # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
- # available accelerator types.
- "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
- "noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
- # feature that may go away. Defaults to false.
- # Corresponds to `--no_address` flag for [gcloud compute instances create]
- # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
- # In order to use this, must be true for both create time and run time.
- # Cannot be true at run time if false at create time. If you need to ssh into
- # a private IP VM for debugging, you can ssh to a public VM and then ssh into
- # the private VM's Internal IP. If noAddress is set, this pipeline run may
- # only load docker images from Google Container Registry and not Docker Hub.
- # Before using this, you must
- # [configure access to Google services from internal
- # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
- "disks": [ # Disks to attach.
- { # A Google Compute Engine disk resource specification.
- "name": "A String", # Required. The name of the disk that can be used in the pipeline
- # parameters. Must be 1 - 63 characters.
- # The name "boot" is reserved for system use.
- "type": "A String", # Required. The type of the disk to create.
- "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
- # of the pipeline run, regardless of what this field is set to.
- "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
- # This field is not applicable for local SSD.
- "mountPoint": "A String", # Required at create time and cannot be overridden at run time.
- # Specifies the path in the docker container where files on
- # this disk should be located. For example, if `mountPoint`
- # is `/mnt/disk`, and the parameter has `localPath`
- # `inputs/file.txt`, the docker container can access the data at
- # `/mnt/disk/inputs/file.txt`.
- "readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
- # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
- # for more details.
- # Can only be set at create time.
- "source": "A String", # The full or partial URL of the persistent disk to attach. See
- # https://cloud.google.com/compute/docs/reference/latest/instances#resource
- # and
- # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
- # for more details.
- },
- ],
- "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
- },
- },
- ],
- }</pre>
-</div>
-
-<div class="method">
- <code class="details" id="list_next">list_next(previous_request, previous_response)</code>
- <pre>Retrieves the next page of results.
-
-Args:
- previous_request: The request for the previous page. (required)
- previous_response: The response from the request for the previous page. (required)
-
-Returns:
- A request object that you can call 'execute()' on to request the next
- page. Returns None if there are no more items in the collection.
- </pre>
-</div>
-
-<div class="method">
- <code class="details" id="run">run(body=None, x__xgafv=None)</code>
- <pre>Runs a pipeline. If `pipelineId` is specified in the request, then
-run a saved pipeline. If `ephemeralPipeline` is specified, then run
-that pipeline once without saving a copy.
-
-The caller must have READ permission to the project where the pipeline
-is stored and WRITE permission to the project where the pipeline will be
-run, as VMs will be created and storage will be used.
-
-If a pipeline operation is still running after 6 days, it will be canceled.
-
-Args:
- body: object, The request body.
- The object takes the form of:
-
-{ # The request to run a pipeline. If `pipelineId` is specified, it
- # refers to a saved pipeline created with CreatePipeline and set as
- # the `pipelineId` of the returned Pipeline object. If
- # `ephemeralPipeline` is specified, that pipeline is run once
- # with the given args and not saved. It is an error to specify both
- # `pipelineId` and `ephemeralPipeline`. `pipelineArgs`
- # must be specified.
- "pipelineArgs": { # The pipeline run arguments. # The arguments to use when running this pipeline.
- "projectId": "A String", # Required. The project in which to run the pipeline. The caller must have
- # WRITER access to all Google Cloud services and resources (e.g. Google
- # Compute Engine) will be used.
- "clientId": "A String", # This field is deprecated. Use `labels` instead. Client-specified pipeline
- # operation identifier.
- "serviceAccount": { # A Google Cloud Service Account. # The Google Cloud Service Account that will be used to access data and
- # services. By default, the compute service account associated with
- # `projectId` is used.
- "email": "A String", # Email address of the service account. Defaults to `default`,
- # which uses the compute service account associated with the project.
- "scopes": [ # List of scopes to be enabled for this service account on the VM.
- # The following scopes are automatically included:
- #
- # * https://www.googleapis.com/auth/compute
- # * https://www.googleapis.com/auth/devstorage.full_control
- # * https://www.googleapis.com/auth/genomics
- # * https://www.googleapis.com/auth/logging.write
- # * https://www.googleapis.com/auth/monitoring.write
- "A String",
- ],
- },
- "inputs": { # Pipeline input arguments; keys are defined in the pipeline documentation.
- # All input parameters that do not have default values must be specified.
- # If parameters with defaults are specified here, the defaults will be
- # overridden.
- "a_key": "A String",
- },
- "labels": { # Labels to apply to this pipeline run. Labels will also be applied to
- # compute resources (VM, disks) created by this pipeline run. When listing
- # operations, operations can filtered by labels.
- # Label keys may not be empty; label values may be empty. Non-empty labels
- # must be 1-63 characters long, and comply with [RFC1035]
- # (https://www.ietf.org/rfc/rfc1035.txt).
- # Specifically, the name must be 1-63 characters long and match the regular
- # expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
- # character must be a lowercase letter, and all following characters must be
- # a dash, lowercase letter, or digit, except the last character, which cannot
- # be a dash.
- "a_key": "A String",
- },
- "logging": { # The logging options for the pipeline run. # Required. Logging options. Used by the service to communicate results
- # to the user.
- "gcsPath": "A String", # The location in Google Cloud Storage to which the pipeline logs
- # will be copied. Can be specified as a fully qualified directory
- # path, in which case logs will be output with a unique identifier
- # as the filename in that directory, or as a fully specified path,
- # which must end in `.log`, in which case that path will be
- # used, and the user must ensure that logs are not
- # overwritten. Stdout and stderr logs from the run are also
- # generated and output as `-stdout.log` and `-stderr.log`.
- },
- "keepVmAliveOnFailureDuration": "A String", # How long to keep the VM up after a failure (for example docker command
- # failed, copying input or output files failed, etc). While the VM is up, one
- # can ssh into the VM to debug. Default is 0; maximum allowed value is 1 day.
- "resources": { # The system resources for the pipeline run. # Specifies resource requirements/overrides for the pipeline run.
- "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+ { # The pipeline object. Represents a transformation from a set of input
+ # parameters to a set of output parameters. The transformation is defined
+ # as a docker image and command to run within that image. Each pipeline
+ # is run on a Google Compute Engine VM. A pipeline can be created with the
+ # `create` method and then later run with the `run` method, or a pipeline can
+ # be defined and run all at once with the `run` method.
+ "resources": { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
+ # Required fields:
+ #
+ # *
+ # minimumCpuCores
+ #
+ # *
+ # minimumRamGb
"preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
# must be true for both create time and run time. Cannot be true at run time
# if false at create time.
- "zones": [ # List of Google Compute Engine availability zones to which resource
- # creation will restricted. If empty, any zone may be chosen.
- "A String",
- ],
+ "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
"acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
# By specifying this parameter, you will download and install the following
# third-party software onto your managed Compute Engine instances:
# NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
- "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances: NVIDIA®
- # Tesla® drivers and NVIDIA® CUDA toolkit.
- # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
- # available accelerator types.
- "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
"noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
# feature that may go away. Defaults to false.
# Corresponds to `--no_address` flag for [gcloud compute instances create]
@@ -1660,183 +476,53 @@
# Before using this, you must
# [configure access to Google services from internal
# IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+ "zones": [ # List of Google Compute Engine availability zones to which resource
+ # creation will restricted. If empty, any zone may be chosen.
+ "A String",
+ ],
+ "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
"disks": [ # Disks to attach.
{ # A Google Compute Engine disk resource specification.
- "name": "A String", # Required. The name of the disk that can be used in the pipeline
- # parameters. Must be 1 - 63 characters.
- # The name "boot" is reserved for system use.
- "type": "A String", # Required. The type of the disk to create.
- "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
- # of the pipeline run, regardless of what this field is set to.
- "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
- # This field is not applicable for local SSD.
+ "source": "A String", # The full or partial URL of the persistent disk to attach. See
+ # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+ # and
+ # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+ # for more details.
"mountPoint": "A String", # Required at create time and cannot be overridden at run time.
# Specifies the path in the docker container where files on
# this disk should be located. For example, if `mountPoint`
# is `/mnt/disk`, and the parameter has `localPath`
# `inputs/file.txt`, the docker container can access the data at
# `/mnt/disk/inputs/file.txt`.
+ "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+ # of the pipeline run, regardless of what this field is set to.
+ "name": "A String", # Required. The name of the disk that can be used in the pipeline
+ # parameters. Must be 1 - 63 characters.
+ # The name "boot" is reserved for system use.
+ "type": "A String", # Required. The type of the disk to create.
+ "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
+ # This field is not applicable for local SSD.
"readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
# https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
# for more details.
# Can only be set at create time.
- "source": "A String", # The full or partial URL of the persistent disk to attach. See
- # https://cloud.google.com/compute/docs/reference/latest/instances#resource
- # and
- # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
- # for more details.
},
],
- "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
+ "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances: NVIDIA®
+ # Tesla® drivers and NVIDIA® CUDA toolkit.
+ # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+ # available accelerator types.
+ "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
},
- "outputs": { # Pipeline output arguments; keys are defined in the pipeline
- # documentation. All output parameters of without default values
- # must be specified. If parameters with defaults are specified
- # here, the defaults will be overridden.
- "a_key": "A String",
- },
- },
- "pipelineId": "A String", # The already created pipeline to run.
- "ephemeralPipeline": { # The pipeline object. Represents a transformation from a set of input # A new pipeline object to run once and then delete.
- # parameters to a set of output parameters. The transformation is defined
- # as a docker image and command to run within that image. Each pipeline
- # is run on a Google Compute Engine VM. A pipeline can be created with the
- # `create` method and then later run with the `run` method, or a pipeline can
- # be defined and run all at once with the `run` method.
- "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
- # This name can be used for filtering Pipelines in ListPipelines.
+ "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
+ # WRITE access.
"pipelineId": "A String", # Unique pipeline id that is generated by the service when CreatePipeline
# is called. Cannot be specified in the Pipeline used in the
# CreatePipelineRequest, and will be populated in the response to
# CreatePipeline and all subsequent Get and List calls. Indicates that the
# service has registered this pipeline.
- "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
- # WRITE access.
- "outputParameters": [ # Output parameters of the pipeline.
- { # Parameters facilitate setting and delivering data into the
- # pipeline's execution environment. They are defined at create time,
- # with optional defaults, and can be overridden at run time.
- #
- # If `localCopy` is unset, then the parameter specifies a string that
- # is passed as-is into the pipeline, as the value of the environment
- # variable with the given name. A default value can be optionally
- # specified at create time. The default can be overridden at run time
- # using the inputs map. If no default is given, a value must be
- # supplied at runtime.
- #
- # If `localCopy` is defined, then the parameter specifies a data
- # source or sink, both in Google Cloud Storage and on the Docker container
- # where the pipeline computation is run. The service account associated with
- # the Pipeline (by
- # default the project's Compute Engine service account) must have access to the
- # Google Cloud Storage paths.
- #
- # At run time, the Google Cloud Storage paths can be overridden if a default
- # was provided at create time, or must be set otherwise. The pipeline runner
- # should add a key/value pair to either the inputs or outputs map. The
- # indicated data copies will be carried out before/after pipeline execution,
- # just as if the corresponding arguments were provided to `gsutil cp`.
- #
- # For example: Given the following `PipelineParameter`, specified
- # in the `inputParameters` list:
- #
- # ```
- # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
- # ```
- #
- # where `disk` is defined in the `PipelineResources` object as:
- #
- # ```
- # {name: "pd1", mountPoint: "/mnt/disk/"}
- # ```
- #
- # We create a disk named `pd1`, mount it on the host VM, and map
- # `/mnt/pd1` to `/mnt/disk` in the docker container. At
- # runtime, an entry for `input_file` would be required in the inputs
- # map, such as:
- #
- # ```
- # inputs["input_file"] = "gs://my-bucket/bar.txt"
- # ```
- #
- # This would generate the following gsutil call:
- #
- # ```
- # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
- # ```
- #
- # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
- # Docker container. Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr><td>glob</td><td>directory</td></tr>
- # </tbody>
- # </table>
- #
- # For outputs, the direction of the copy is reversed:
- #
- # ```
- # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
- # ```
- #
- # Acceptable paths are:
- #
- # <table>
- # <thead>
- # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
- # </thead>
- # <tbody>
- # <tr><td>file</td><td>file</td></tr>
- # <tr>
- # <td>file</td>
- # <td>directory - directory must already exist</td>
- # </tr>
- # <tr>
- # <td>glob</td>
- # <td>directory - directory will be created if it doesn't exist</td></tr>
- # </tbody>
- # </table>
- #
- # One restriction due to docker limitations, is that for outputs that are found
- # on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
- "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
- # `LocalCopy` indicates where on the VM the file should be. The value
- # given to this parameter (either at runtime or using `defaultValue`)
- # must be the remote path where the file should be.
- "disk": "A String", # Required. The name of the disk where this parameter is
- # located. Can be the name of one of the disks specified in the
- # Resources field, or "boot", which represents the Docker
- # instance's boot disk and has a mount point of `/`.
- "path": "A String", # Required. The path within the user's docker container where
- # this input should be localized to and from, relative to the specified
- # disk's mount point. For example: file.txt,
- },
- },
- ],
- "docker": { # The Docker execuctor specification. # Specifies the docker run information.
- "cmd": "A String", # Required. The command or newline delimited script to run. The command
- # string will be executed within a bash shell.
- #
- # If the command exits with a non-zero exit code, output parameter
- # de-localization will be skipped and the pipeline operation's
- # `error` field will be populated.
- #
- # Maximum command string length is 16384.
- "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
- # Users that run pipelines must have READ access to the image.
- },
- "description": "A String", # User-specified description.
"inputParameters": [ # Input parameters of the pipeline.
{ # Parameters facilitate setting and delivering data into the
# pipeline's execution environment. They are defined at create time,
@@ -1929,12 +615,6 @@
#
# One restriction due to docker limitations, is that for outputs that are found
# on the boot disk, the local path cannot be a glob and must be a file.
- "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
- # If `localCopy` is present, then this must be a Google Cloud Storage path
- # beginning with `gs://`.
- "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
- # as the key to the input and output maps in RunPipeline.
- "description": "A String", # Human-readable description.
"localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
# `LocalCopy` indicates where on the VM the file should be. The value
# given to this parameter (either at runtime or using `defaultValue`)
@@ -1947,8 +627,196 @@
# this input should be localized to and from, relative to the specified
# disk's mount point. For example: file.txt,
},
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
},
],
+ "outputParameters": [ # Output parameters of the pipeline.
+ { # Parameters facilitate setting and delivering data into the
+ # pipeline's execution environment. They are defined at create time,
+ # with optional defaults, and can be overridden at run time.
+ #
+ # If `localCopy` is unset, then the parameter specifies a string that
+ # is passed as-is into the pipeline, as the value of the environment
+ # variable with the given name. A default value can be optionally
+ # specified at create time. The default can be overridden at run time
+ # using the inputs map. If no default is given, a value must be
+ # supplied at runtime.
+ #
+ # If `localCopy` is defined, then the parameter specifies a data
+ # source or sink, both in Google Cloud Storage and on the Docker container
+ # where the pipeline computation is run. The service account associated with
+ # the Pipeline (by
+ # default the project's Compute Engine service account) must have access to the
+ # Google Cloud Storage paths.
+ #
+ # At run time, the Google Cloud Storage paths can be overridden if a default
+ # was provided at create time, or must be set otherwise. The pipeline runner
+ # should add a key/value pair to either the inputs or outputs map. The
+ # indicated data copies will be carried out before/after pipeline execution,
+ # just as if the corresponding arguments were provided to `gsutil cp`.
+ #
+ # For example: Given the following `PipelineParameter`, specified
+ # in the `inputParameters` list:
+ #
+ # ```
+ # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
+ # ```
+ #
+ # where `disk` is defined in the `PipelineResources` object as:
+ #
+ # ```
+ # {name: "pd1", mountPoint: "/mnt/disk/"}
+ # ```
+ #
+ # We create a disk named `pd1`, mount it on the host VM, and map
+ # `/mnt/pd1` to `/mnt/disk` in the docker container. At
+ # runtime, an entry for `input_file` would be required in the inputs
+ # map, such as:
+ #
+ # ```
+ # inputs["input_file"] = "gs://my-bucket/bar.txt"
+ # ```
+ #
+ # This would generate the following gsutil call:
+ #
+ # ```
+ # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+ # ```
+ #
+ # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+ # Docker container. Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr><td>glob</td><td>directory</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # For outputs, the direction of the copy is reversed:
+ #
+ # ```
+ # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+ # ```
+ #
+ # Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr>
+ # <td>file</td>
+ # <td>directory - directory must already exist</td>
+ # </tr>
+ # <tr>
+ # <td>glob</td>
+ # <td>directory - directory will be created if it doesn't exist</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # One restriction due to docker limitations, is that for outputs that are found
+ # on the boot disk, the local path cannot be a glob and must be a file.
+ "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+ # `LocalCopy` indicates where on the VM the file should be. The value
+ # given to this parameter (either at runtime or using `defaultValue`)
+ # must be the remote path where the file should be.
+ "disk": "A String", # Required. The name of the disk where this parameter is
+ # located. Can be the name of one of the disks specified in the
+ # Resources field, or "boot", which represents the Docker
+ # instance's boot disk and has a mount point of `/`.
+ "path": "A String", # Required. The path within the user's docker container where
+ # this input should be localized to and from, relative to the specified
+ # disk's mount point. For example: file.txt,
+ },
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
+ },
+ ],
+ "description": "A String", # User-specified description.
+ "docker": { # The Docker execuctor specification. # Specifies the docker run information.
+ "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
+ # Users that run pipelines must have READ access to the image.
+ "cmd": "A String", # Required. The command or newline delimited script to run. The command
+ # string will be executed within a bash shell.
+ #
+ # If the command exits with a non-zero exit code, output parameter
+ # de-localization will be skipped and the pipeline operation's
+ # `error` field will be populated.
+ #
+ # Maximum command string length is 16384.
+ },
+ "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
+ # This name can be used for filtering Pipelines in ListPipelines.
+ }</pre>
+</div>
+
+<div class="method">
+ <code class="details" id="delete">delete(pipelineId, x__xgafv=None)</code>
+ <pre>Deletes a pipeline based on ID.
+
+Caller must have WRITE permission to the project.
+
+Args:
+ pipelineId: string, Caller must have WRITE access to the project in which this pipeline
+is defined. (required)
+ x__xgafv: string, V1 error format.
+ Allowed values
+ 1 - v1 error format
+ 2 - v2 error format
+
+Returns:
+ An object of the form:
+
+ { # A generic empty message that you can re-use to avoid defining duplicated
+ # empty messages in your APIs. A typical example is to use it as the request
+ # or the response type of an API method. For instance:
+ #
+ # service Foo {
+ # rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
+ # }
+ #
+ # The JSON representation for `Empty` is empty JSON object `{}`.
+ }</pre>
+</div>
+
+<div class="method">
+ <code class="details" id="get">get(pipelineId, x__xgafv=None)</code>
+ <pre>Retrieves a pipeline based on ID.
+
+Caller must have READ permission to the project.
+
+Args:
+ pipelineId: string, Caller must have READ access to the project in which this pipeline
+is defined. (required)
+ x__xgafv: string, V1 error format.
+ Allowed values
+ 1 - v1 error format
+ 2 - v2 error format
+
+Returns:
+ An object of the form:
+
+ { # The pipeline object. Represents a transformation from a set of input
+ # parameters to a set of output parameters. The transformation is defined
+ # as a docker image and command to run within that image. Each pipeline
+ # is run on a Google Compute Engine VM. A pipeline can be created with the
+ # `create` method and then later run with the `run` method, or a pipeline can
+ # be defined and run all at once with the `run` method.
"resources": { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
# Required fields:
#
@@ -1957,25 +825,14 @@
#
# *
# minimumRamGb
- "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
"preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
# must be true for both create time and run time. Cannot be true at run time
# if false at create time.
- "zones": [ # List of Google Compute Engine availability zones to which resource
- # creation will restricted. If empty, any zone may be chosen.
- "A String",
- ],
+ "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
"acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
# By specifying this parameter, you will download and install the following
# third-party software onto your managed Compute Engine instances:
# NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
- "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
- # By specifying this parameter, you will download and install the following
- # third-party software onto your managed Compute Engine instances: NVIDIA®
- # Tesla® drivers and NVIDIA® CUDA toolkit.
- # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
- # available accelerator types.
- "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
"noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
# feature that may go away. Defaults to false.
# Corresponds to `--no_address` flag for [gcloud compute instances create]
@@ -1988,35 +845,1178 @@
# Before using this, you must
# [configure access to Google services from internal
# IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+ "zones": [ # List of Google Compute Engine availability zones to which resource
+ # creation will restricted. If empty, any zone may be chosen.
+ "A String",
+ ],
+ "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
"disks": [ # Disks to attach.
{ # A Google Compute Engine disk resource specification.
- "name": "A String", # Required. The name of the disk that can be used in the pipeline
- # parameters. Must be 1 - 63 characters.
- # The name "boot" is reserved for system use.
- "type": "A String", # Required. The type of the disk to create.
- "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
- # of the pipeline run, regardless of what this field is set to.
- "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
- # This field is not applicable for local SSD.
+ "source": "A String", # The full or partial URL of the persistent disk to attach. See
+ # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+ # and
+ # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+ # for more details.
"mountPoint": "A String", # Required at create time and cannot be overridden at run time.
# Specifies the path in the docker container where files on
# this disk should be located. For example, if `mountPoint`
# is `/mnt/disk`, and the parameter has `localPath`
# `inputs/file.txt`, the docker container can access the data at
# `/mnt/disk/inputs/file.txt`.
+ "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+ # of the pipeline run, regardless of what this field is set to.
+ "name": "A String", # Required. The name of the disk that can be used in the pipeline
+ # parameters. Must be 1 - 63 characters.
+ # The name "boot" is reserved for system use.
+ "type": "A String", # Required. The type of the disk to create.
+ "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
+ # This field is not applicable for local SSD.
"readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
# https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
# for more details.
# Can only be set at create time.
+ },
+ ],
+ "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances: NVIDIA®
+ # Tesla® drivers and NVIDIA® CUDA toolkit.
+ # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+ # available accelerator types.
+ "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
+ },
+ "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
+ # WRITE access.
+ "pipelineId": "A String", # Unique pipeline id that is generated by the service when CreatePipeline
+ # is called. Cannot be specified in the Pipeline used in the
+ # CreatePipelineRequest, and will be populated in the response to
+ # CreatePipeline and all subsequent Get and List calls. Indicates that the
+ # service has registered this pipeline.
+ "inputParameters": [ # Input parameters of the pipeline.
+ { # Parameters facilitate setting and delivering data into the
+ # pipeline's execution environment. They are defined at create time,
+ # with optional defaults, and can be overridden at run time.
+ #
+ # If `localCopy` is unset, then the parameter specifies a string that
+ # is passed as-is into the pipeline, as the value of the environment
+ # variable with the given name. A default value can be optionally
+ # specified at create time. The default can be overridden at run time
+ # using the inputs map. If no default is given, a value must be
+ # supplied at runtime.
+ #
+ # If `localCopy` is defined, then the parameter specifies a data
+ # source or sink, both in Google Cloud Storage and on the Docker container
+ # where the pipeline computation is run. The service account associated with
+ # the Pipeline (by
+ # default the project's Compute Engine service account) must have access to the
+ # Google Cloud Storage paths.
+ #
+ # At run time, the Google Cloud Storage paths can be overridden if a default
+ # was provided at create time, or must be set otherwise. The pipeline runner
+ # should add a key/value pair to either the inputs or outputs map. The
+ # indicated data copies will be carried out before/after pipeline execution,
+ # just as if the corresponding arguments were provided to `gsutil cp`.
+ #
+ # For example: Given the following `PipelineParameter`, specified
+ # in the `inputParameters` list:
+ #
+ # ```
+ # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
+ # ```
+ #
+ # where `disk` is defined in the `PipelineResources` object as:
+ #
+ # ```
+ # {name: "pd1", mountPoint: "/mnt/disk/"}
+ # ```
+ #
+ # We create a disk named `pd1`, mount it on the host VM, and map
+ # `/mnt/pd1` to `/mnt/disk` in the docker container. At
+ # runtime, an entry for `input_file` would be required in the inputs
+ # map, such as:
+ #
+ # ```
+ # inputs["input_file"] = "gs://my-bucket/bar.txt"
+ # ```
+ #
+ # This would generate the following gsutil call:
+ #
+ # ```
+ # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+ # ```
+ #
+ # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+ # Docker container. Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr><td>glob</td><td>directory</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # For outputs, the direction of the copy is reversed:
+ #
+ # ```
+ # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+ # ```
+ #
+ # Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr>
+ # <td>file</td>
+ # <td>directory - directory must already exist</td>
+ # </tr>
+ # <tr>
+ # <td>glob</td>
+ # <td>directory - directory will be created if it doesn't exist</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # One restriction due to docker limitations, is that for outputs that are found
+ # on the boot disk, the local path cannot be a glob and must be a file.
+ "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+ # `LocalCopy` indicates where on the VM the file should be. The value
+ # given to this parameter (either at runtime or using `defaultValue`)
+ # must be the remote path where the file should be.
+ "disk": "A String", # Required. The name of the disk where this parameter is
+ # located. Can be the name of one of the disks specified in the
+ # Resources field, or "boot", which represents the Docker
+ # instance's boot disk and has a mount point of `/`.
+ "path": "A String", # Required. The path within the user's docker container where
+ # this input should be localized to and from, relative to the specified
+ # disk's mount point. For example: file.txt,
+ },
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
+ },
+ ],
+ "outputParameters": [ # Output parameters of the pipeline.
+ { # Parameters facilitate setting and delivering data into the
+ # pipeline's execution environment. They are defined at create time,
+ # with optional defaults, and can be overridden at run time.
+ #
+ # If `localCopy` is unset, then the parameter specifies a string that
+ # is passed as-is into the pipeline, as the value of the environment
+ # variable with the given name. A default value can be optionally
+ # specified at create time. The default can be overridden at run time
+ # using the inputs map. If no default is given, a value must be
+ # supplied at runtime.
+ #
+ # If `localCopy` is defined, then the parameter specifies a data
+ # source or sink, both in Google Cloud Storage and on the Docker container
+ # where the pipeline computation is run. The service account associated with
+ # the Pipeline (by
+ # default the project's Compute Engine service account) must have access to the
+ # Google Cloud Storage paths.
+ #
+ # At run time, the Google Cloud Storage paths can be overridden if a default
+ # was provided at create time, or must be set otherwise. The pipeline runner
+ # should add a key/value pair to either the inputs or outputs map. The
+ # indicated data copies will be carried out before/after pipeline execution,
+ # just as if the corresponding arguments were provided to `gsutil cp`.
+ #
+ # For example: Given the following `PipelineParameter`, specified
+ # in the `inputParameters` list:
+ #
+ # ```
+ # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
+ # ```
+ #
+ # where `disk` is defined in the `PipelineResources` object as:
+ #
+ # ```
+ # {name: "pd1", mountPoint: "/mnt/disk/"}
+ # ```
+ #
+ # We create a disk named `pd1`, mount it on the host VM, and map
+ # `/mnt/pd1` to `/mnt/disk` in the docker container. At
+ # runtime, an entry for `input_file` would be required in the inputs
+ # map, such as:
+ #
+ # ```
+ # inputs["input_file"] = "gs://my-bucket/bar.txt"
+ # ```
+ #
+ # This would generate the following gsutil call:
+ #
+ # ```
+ # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+ # ```
+ #
+ # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+ # Docker container. Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr><td>glob</td><td>directory</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # For outputs, the direction of the copy is reversed:
+ #
+ # ```
+ # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+ # ```
+ #
+ # Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr>
+ # <td>file</td>
+ # <td>directory - directory must already exist</td>
+ # </tr>
+ # <tr>
+ # <td>glob</td>
+ # <td>directory - directory will be created if it doesn't exist</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # One restriction due to docker limitations, is that for outputs that are found
+ # on the boot disk, the local path cannot be a glob and must be a file.
+ "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+ # `LocalCopy` indicates where on the VM the file should be. The value
+ # given to this parameter (either at runtime or using `defaultValue`)
+ # must be the remote path where the file should be.
+ "disk": "A String", # Required. The name of the disk where this parameter is
+ # located. Can be the name of one of the disks specified in the
+ # Resources field, or "boot", which represents the Docker
+ # instance's boot disk and has a mount point of `/`.
+ "path": "A String", # Required. The path within the user's docker container where
+ # this input should be localized to and from, relative to the specified
+ # disk's mount point. For example: file.txt,
+ },
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
+ },
+ ],
+ "description": "A String", # User-specified description.
+ "docker": { # The Docker execuctor specification. # Specifies the docker run information.
+ "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
+ # Users that run pipelines must have READ access to the image.
+ "cmd": "A String", # Required. The command or newline delimited script to run. The command
+ # string will be executed within a bash shell.
+ #
+ # If the command exits with a non-zero exit code, output parameter
+ # de-localization will be skipped and the pipeline operation's
+ # `error` field will be populated.
+ #
+ # Maximum command string length is 16384.
+ },
+ "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
+ # This name can be used for filtering Pipelines in ListPipelines.
+ }</pre>
+</div>
+
+<div class="method">
+ <code class="details" id="getControllerConfig">getControllerConfig(validationToken=None, operationId=None, x__xgafv=None)</code>
+ <pre>Gets controller configuration information. Should only be called
+by VMs created by the Pipelines Service and not by end users.
+
+Args:
+ validationToken: string, A parameter
+ operationId: string, The operation to retrieve controller configuration for.
+ x__xgafv: string, V1 error format.
+ Allowed values
+ 1 - v1 error format
+ 2 - v2 error format
+
+Returns:
+ An object of the form:
+
+ { # Stores the information that the controller will fetch from the
+ # server in order to run. Should only be used by VMs created by the
+ # Pipelines Service and not by end users.
+ "cmd": "A String",
+ "gcsSinks": {
+ "a_key": {
+ "values": [
+ "A String",
+ ],
+ },
+ },
+ "gcsLogPath": "A String",
+ "machineType": "A String",
+ "disks": {
+ "a_key": "A String",
+ },
+ "vars": {
+ "a_key": "A String",
+ },
+ "image": "A String",
+ "gcsSources": {
+ "a_key": {
+ "values": [
+ "A String",
+ ],
+ },
+ },
+ }</pre>
+</div>
+
+<div class="method">
+ <code class="details" id="list">list(namePrefix=None, pageSize=None, projectId=None, pageToken=None, x__xgafv=None)</code>
+ <pre>Lists pipelines.
+
+Caller must have READ permission to the project.
+
+Args:
+ namePrefix: string, Pipelines with names that match this prefix should be
+returned. If unspecified, all pipelines in the project, up to
+`pageSize`, will be returned.
+ pageSize: integer, Number of pipelines to return at once. Defaults to 256, and max
+is 2048.
+ projectId: string, Required. The name of the project to search for pipelines. Caller
+must have READ access to this project.
+ pageToken: string, Token to use to indicate where to start getting results.
+If unspecified, returns the first page of results.
+ x__xgafv: string, V1 error format.
+ Allowed values
+ 1 - v1 error format
+ 2 - v2 error format
+
+Returns:
+ An object of the form:
+
+ { # The response of ListPipelines. Contains at most `pageSize`
+ # pipelines. If it contains `pageSize` pipelines, and more pipelines
+ # exist, then `nextPageToken` will be populated and should be
+ # used as the `pageToken` argument to a subsequent ListPipelines
+ # request.
+ "nextPageToken": "A String", # The token to use to get the next page of results.
+ "pipelines": [ # The matched pipelines.
+ { # The pipeline object. Represents a transformation from a set of input
+ # parameters to a set of output parameters. The transformation is defined
+ # as a docker image and command to run within that image. Each pipeline
+ # is run on a Google Compute Engine VM. A pipeline can be created with the
+ # `create` method and then later run with the `run` method, or a pipeline can
+ # be defined and run all at once with the `run` method.
+ "resources": { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
+ # Required fields:
+ #
+ # *
+ # minimumCpuCores
+ #
+ # *
+ # minimumRamGb
+ "preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
+ # must be true for both create time and run time. Cannot be true at run time
+ # if false at create time.
+ "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
+ "acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances:
+ # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
+ "noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
+ # feature that may go away. Defaults to false.
+ # Corresponds to `--no_address` flag for [gcloud compute instances create]
+ # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
+ # In order to use this, must be true for both create time and run time.
+ # Cannot be true at run time if false at create time. If you need to ssh into
+ # a private IP VM for debugging, you can ssh to a public VM and then ssh into
+ # the private VM's Internal IP. If noAddress is set, this pipeline run may
+ # only load docker images from Google Container Registry and not Docker Hub.
+ # Before using this, you must
+ # [configure access to Google services from internal
+ # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+ "zones": [ # List of Google Compute Engine availability zones to which resource
+ # creation will restricted. If empty, any zone may be chosen.
+ "A String",
+ ],
+ "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+ "disks": [ # Disks to attach.
+ { # A Google Compute Engine disk resource specification.
+ "source": "A String", # The full or partial URL of the persistent disk to attach. See
+ # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+ # and
+ # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+ # for more details.
+ "mountPoint": "A String", # Required at create time and cannot be overridden at run time.
+ # Specifies the path in the docker container where files on
+ # this disk should be located. For example, if `mountPoint`
+ # is `/mnt/disk`, and the parameter has `localPath`
+ # `inputs/file.txt`, the docker container can access the data at
+ # `/mnt/disk/inputs/file.txt`.
+ "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+ # of the pipeline run, regardless of what this field is set to.
+ "name": "A String", # Required. The name of the disk that can be used in the pipeline
+ # parameters. Must be 1 - 63 characters.
+ # The name "boot" is reserved for system use.
+ "type": "A String", # Required. The type of the disk to create.
+ "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
+ # This field is not applicable for local SSD.
+ "readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
+ # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
+ # for more details.
+ # Can only be set at create time.
+ },
+ ],
+ "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances: NVIDIA®
+ # Tesla® drivers and NVIDIA® CUDA toolkit.
+ # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+ # available accelerator types.
+ "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
+ },
+ "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
+ # WRITE access.
+ "pipelineId": "A String", # Unique pipeline id that is generated by the service when CreatePipeline
+ # is called. Cannot be specified in the Pipeline used in the
+ # CreatePipelineRequest, and will be populated in the response to
+ # CreatePipeline and all subsequent Get and List calls. Indicates that the
+ # service has registered this pipeline.
+ "inputParameters": [ # Input parameters of the pipeline.
+ { # Parameters facilitate setting and delivering data into the
+ # pipeline's execution environment. They are defined at create time,
+ # with optional defaults, and can be overridden at run time.
+ #
+ # If `localCopy` is unset, then the parameter specifies a string that
+ # is passed as-is into the pipeline, as the value of the environment
+ # variable with the given name. A default value can be optionally
+ # specified at create time. The default can be overridden at run time
+ # using the inputs map. If no default is given, a value must be
+ # supplied at runtime.
+ #
+ # If `localCopy` is defined, then the parameter specifies a data
+ # source or sink, both in Google Cloud Storage and on the Docker container
+ # where the pipeline computation is run. The service account associated with
+ # the Pipeline (by
+ # default the project's Compute Engine service account) must have access to the
+ # Google Cloud Storage paths.
+ #
+ # At run time, the Google Cloud Storage paths can be overridden if a default
+ # was provided at create time, or must be set otherwise. The pipeline runner
+ # should add a key/value pair to either the inputs or outputs map. The
+ # indicated data copies will be carried out before/after pipeline execution,
+ # just as if the corresponding arguments were provided to `gsutil cp`.
+ #
+ # For example: Given the following `PipelineParameter`, specified
+ # in the `inputParameters` list:
+ #
+ # ```
+ # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
+ # ```
+ #
+ # where `disk` is defined in the `PipelineResources` object as:
+ #
+ # ```
+ # {name: "pd1", mountPoint: "/mnt/disk/"}
+ # ```
+ #
+ # We create a disk named `pd1`, mount it on the host VM, and map
+ # `/mnt/pd1` to `/mnt/disk` in the docker container. At
+ # runtime, an entry for `input_file` would be required in the inputs
+ # map, such as:
+ #
+ # ```
+ # inputs["input_file"] = "gs://my-bucket/bar.txt"
+ # ```
+ #
+ # This would generate the following gsutil call:
+ #
+ # ```
+ # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+ # ```
+ #
+ # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+ # Docker container. Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr><td>glob</td><td>directory</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # For outputs, the direction of the copy is reversed:
+ #
+ # ```
+ # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+ # ```
+ #
+ # Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr>
+ # <td>file</td>
+ # <td>directory - directory must already exist</td>
+ # </tr>
+ # <tr>
+ # <td>glob</td>
+ # <td>directory - directory will be created if it doesn't exist</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # One restriction due to docker limitations, is that for outputs that are found
+ # on the boot disk, the local path cannot be a glob and must be a file.
+ "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+ # `LocalCopy` indicates where on the VM the file should be. The value
+ # given to this parameter (either at runtime or using `defaultValue`)
+ # must be the remote path where the file should be.
+ "disk": "A String", # Required. The name of the disk where this parameter is
+ # located. Can be the name of one of the disks specified in the
+ # Resources field, or "boot", which represents the Docker
+ # instance's boot disk and has a mount point of `/`.
+ "path": "A String", # Required. The path within the user's docker container where
+ # this input should be localized to and from, relative to the specified
+ # disk's mount point. For example: file.txt,
+ },
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
+ },
+ ],
+ "outputParameters": [ # Output parameters of the pipeline.
+ { # Parameters facilitate setting and delivering data into the
+ # pipeline's execution environment. They are defined at create time,
+ # with optional defaults, and can be overridden at run time.
+ #
+ # If `localCopy` is unset, then the parameter specifies a string that
+ # is passed as-is into the pipeline, as the value of the environment
+ # variable with the given name. A default value can be optionally
+ # specified at create time. The default can be overridden at run time
+ # using the inputs map. If no default is given, a value must be
+ # supplied at runtime.
+ #
+ # If `localCopy` is defined, then the parameter specifies a data
+ # source or sink, both in Google Cloud Storage and on the Docker container
+ # where the pipeline computation is run. The service account associated with
+ # the Pipeline (by
+ # default the project's Compute Engine service account) must have access to the
+ # Google Cloud Storage paths.
+ #
+ # At run time, the Google Cloud Storage paths can be overridden if a default
+ # was provided at create time, or must be set otherwise. The pipeline runner
+ # should add a key/value pair to either the inputs or outputs map. The
+ # indicated data copies will be carried out before/after pipeline execution,
+ # just as if the corresponding arguments were provided to `gsutil cp`.
+ #
+ # For example: Given the following `PipelineParameter`, specified
+ # in the `inputParameters` list:
+ #
+ # ```
+ # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
+ # ```
+ #
+ # where `disk` is defined in the `PipelineResources` object as:
+ #
+ # ```
+ # {name: "pd1", mountPoint: "/mnt/disk/"}
+ # ```
+ #
+ # We create a disk named `pd1`, mount it on the host VM, and map
+ # `/mnt/pd1` to `/mnt/disk` in the docker container. At
+ # runtime, an entry for `input_file` would be required in the inputs
+ # map, such as:
+ #
+ # ```
+ # inputs["input_file"] = "gs://my-bucket/bar.txt"
+ # ```
+ #
+ # This would generate the following gsutil call:
+ #
+ # ```
+ # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+ # ```
+ #
+ # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+ # Docker container. Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr><td>glob</td><td>directory</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # For outputs, the direction of the copy is reversed:
+ #
+ # ```
+ # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+ # ```
+ #
+ # Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr>
+ # <td>file</td>
+ # <td>directory - directory must already exist</td>
+ # </tr>
+ # <tr>
+ # <td>glob</td>
+ # <td>directory - directory will be created if it doesn't exist</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # One restriction due to docker limitations, is that for outputs that are found
+ # on the boot disk, the local path cannot be a glob and must be a file.
+ "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+ # `LocalCopy` indicates where on the VM the file should be. The value
+ # given to this parameter (either at runtime or using `defaultValue`)
+ # must be the remote path where the file should be.
+ "disk": "A String", # Required. The name of the disk where this parameter is
+ # located. Can be the name of one of the disks specified in the
+ # Resources field, or "boot", which represents the Docker
+ # instance's boot disk and has a mount point of `/`.
+ "path": "A String", # Required. The path within the user's docker container where
+ # this input should be localized to and from, relative to the specified
+ # disk's mount point. For example: file.txt,
+ },
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
+ },
+ ],
+ "description": "A String", # User-specified description.
+ "docker": { # The Docker execuctor specification. # Specifies the docker run information.
+ "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
+ # Users that run pipelines must have READ access to the image.
+ "cmd": "A String", # Required. The command or newline delimited script to run. The command
+ # string will be executed within a bash shell.
+ #
+ # If the command exits with a non-zero exit code, output parameter
+ # de-localization will be skipped and the pipeline operation's
+ # `error` field will be populated.
+ #
+ # Maximum command string length is 16384.
+ },
+ "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
+ # This name can be used for filtering Pipelines in ListPipelines.
+ },
+ ],
+ }</pre>
+</div>
+
+<div class="method">
+ <code class="details" id="list_next">list_next(previous_request, previous_response)</code>
+ <pre>Retrieves the next page of results.
+
+Args:
+ previous_request: The request for the previous page. (required)
+ previous_response: The response from the request for the previous page. (required)
+
+Returns:
+ A request object that you can call 'execute()' on to request the next
+ page. Returns None if there are no more items in the collection.
+ </pre>
+</div>
+
+<div class="method">
+ <code class="details" id="run">run(body=None, x__xgafv=None)</code>
+ <pre>Runs a pipeline. If `pipelineId` is specified in the request, then
+run a saved pipeline. If `ephemeralPipeline` is specified, then run
+that pipeline once without saving a copy.
+
+The caller must have READ permission to the project where the pipeline
+is stored and WRITE permission to the project where the pipeline will be
+run, as VMs will be created and storage will be used.
+
+If a pipeline operation is still running after 6 days, it will be canceled.
+
+Args:
+ body: object, The request body.
+ The object takes the form of:
+
+{ # The request to run a pipeline. If `pipelineId` is specified, it
+ # refers to a saved pipeline created with CreatePipeline and set as
+ # the `pipelineId` of the returned Pipeline object. If
+ # `ephemeralPipeline` is specified, that pipeline is run once
+ # with the given args and not saved. It is an error to specify both
+ # `pipelineId` and `ephemeralPipeline`. `pipelineArgs`
+ # must be specified.
+ "pipelineId": "A String", # The already created pipeline to run.
+ "ephemeralPipeline": { # The pipeline object. Represents a transformation from a set of input # A new pipeline object to run once and then delete.
+ # parameters to a set of output parameters. The transformation is defined
+ # as a docker image and command to run within that image. Each pipeline
+ # is run on a Google Compute Engine VM. A pipeline can be created with the
+ # `create` method and then later run with the `run` method, or a pipeline can
+ # be defined and run all at once with the `run` method.
+ "resources": { # The system resources for the pipeline run. # Required. Specifies resource requirements for the pipeline run.
+ # Required fields:
+ #
+ # *
+ # minimumCpuCores
+ #
+ # *
+ # minimumRamGb
+ "preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
+ # must be true for both create time and run time. Cannot be true at run time
+ # if false at create time.
+ "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
+ "acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances:
+ # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
+ "noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
+ # feature that may go away. Defaults to false.
+ # Corresponds to `--no_address` flag for [gcloud compute instances create]
+ # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
+ # In order to use this, must be true for both create time and run time.
+ # Cannot be true at run time if false at create time. If you need to ssh into
+ # a private IP VM for debugging, you can ssh to a public VM and then ssh into
+ # the private VM's Internal IP. If noAddress is set, this pipeline run may
+ # only load docker images from Google Container Registry and not Docker Hub.
+ # Before using this, you must
+ # [configure access to Google services from internal
+ # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+ "zones": [ # List of Google Compute Engine availability zones to which resource
+ # creation will restricted. If empty, any zone may be chosen.
+ "A String",
+ ],
+ "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+ "disks": [ # Disks to attach.
+ { # A Google Compute Engine disk resource specification.
+ "source": "A String", # The full or partial URL of the persistent disk to attach. See
+ # https://cloud.google.com/compute/docs/reference/latest/instances#resource
+ # and
+ # https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
+ # for more details.
+ "mountPoint": "A String", # Required at create time and cannot be overridden at run time.
+ # Specifies the path in the docker container where files on
+ # this disk should be located. For example, if `mountPoint`
+ # is `/mnt/disk`, and the parameter has `localPath`
+ # `inputs/file.txt`, the docker container can access the data at
+ # `/mnt/disk/inputs/file.txt`.
+ "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+ # of the pipeline run, regardless of what this field is set to.
+ "name": "A String", # Required. The name of the disk that can be used in the pipeline
+ # parameters. Must be 1 - 63 characters.
+ # The name "boot" is reserved for system use.
+ "type": "A String", # Required. The type of the disk to create.
+ "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
+ # This field is not applicable for local SSD.
+ "readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
+ # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
+ # for more details.
+ # Can only be set at create time.
+ },
+ ],
+ "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances: NVIDIA®
+ # Tesla® drivers and NVIDIA® CUDA toolkit.
+ # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+ # available accelerator types.
+ "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
+ },
+ "projectId": "A String", # Required. The project in which to create the pipeline. The caller must have
+ # WRITE access.
+ "pipelineId": "A String", # Unique pipeline id that is generated by the service when CreatePipeline
+ # is called. Cannot be specified in the Pipeline used in the
+ # CreatePipelineRequest, and will be populated in the response to
+ # CreatePipeline and all subsequent Get and List calls. Indicates that the
+ # service has registered this pipeline.
+ "inputParameters": [ # Input parameters of the pipeline.
+ { # Parameters facilitate setting and delivering data into the
+ # pipeline's execution environment. They are defined at create time,
+ # with optional defaults, and can be overridden at run time.
+ #
+ # If `localCopy` is unset, then the parameter specifies a string that
+ # is passed as-is into the pipeline, as the value of the environment
+ # variable with the given name. A default value can be optionally
+ # specified at create time. The default can be overridden at run time
+ # using the inputs map. If no default is given, a value must be
+ # supplied at runtime.
+ #
+ # If `localCopy` is defined, then the parameter specifies a data
+ # source or sink, both in Google Cloud Storage and on the Docker container
+ # where the pipeline computation is run. The service account associated with
+ # the Pipeline (by
+ # default the project's Compute Engine service account) must have access to the
+ # Google Cloud Storage paths.
+ #
+ # At run time, the Google Cloud Storage paths can be overridden if a default
+ # was provided at create time, or must be set otherwise. The pipeline runner
+ # should add a key/value pair to either the inputs or outputs map. The
+ # indicated data copies will be carried out before/after pipeline execution,
+ # just as if the corresponding arguments were provided to `gsutil cp`.
+ #
+ # For example: Given the following `PipelineParameter`, specified
+ # in the `inputParameters` list:
+ #
+ # ```
+ # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
+ # ```
+ #
+ # where `disk` is defined in the `PipelineResources` object as:
+ #
+ # ```
+ # {name: "pd1", mountPoint: "/mnt/disk/"}
+ # ```
+ #
+ # We create a disk named `pd1`, mount it on the host VM, and map
+ # `/mnt/pd1` to `/mnt/disk` in the docker container. At
+ # runtime, an entry for `input_file` would be required in the inputs
+ # map, such as:
+ #
+ # ```
+ # inputs["input_file"] = "gs://my-bucket/bar.txt"
+ # ```
+ #
+ # This would generate the following gsutil call:
+ #
+ # ```
+ # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+ # ```
+ #
+ # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+ # Docker container. Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr><td>glob</td><td>directory</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # For outputs, the direction of the copy is reversed:
+ #
+ # ```
+ # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+ # ```
+ #
+ # Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr>
+ # <td>file</td>
+ # <td>directory - directory must already exist</td>
+ # </tr>
+ # <tr>
+ # <td>glob</td>
+ # <td>directory - directory will be created if it doesn't exist</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # One restriction due to docker limitations, is that for outputs that are found
+ # on the boot disk, the local path cannot be a glob and must be a file.
+ "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+ # `LocalCopy` indicates where on the VM the file should be. The value
+ # given to this parameter (either at runtime or using `defaultValue`)
+ # must be the remote path where the file should be.
+ "disk": "A String", # Required. The name of the disk where this parameter is
+ # located. Can be the name of one of the disks specified in the
+ # Resources field, or "boot", which represents the Docker
+ # instance's boot disk and has a mount point of `/`.
+ "path": "A String", # Required. The path within the user's docker container where
+ # this input should be localized to and from, relative to the specified
+ # disk's mount point. For example: file.txt,
+ },
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
+ },
+ ],
+ "outputParameters": [ # Output parameters of the pipeline.
+ { # Parameters facilitate setting and delivering data into the
+ # pipeline's execution environment. They are defined at create time,
+ # with optional defaults, and can be overridden at run time.
+ #
+ # If `localCopy` is unset, then the parameter specifies a string that
+ # is passed as-is into the pipeline, as the value of the environment
+ # variable with the given name. A default value can be optionally
+ # specified at create time. The default can be overridden at run time
+ # using the inputs map. If no default is given, a value must be
+ # supplied at runtime.
+ #
+ # If `localCopy` is defined, then the parameter specifies a data
+ # source or sink, both in Google Cloud Storage and on the Docker container
+ # where the pipeline computation is run. The service account associated with
+ # the Pipeline (by
+ # default the project's Compute Engine service account) must have access to the
+ # Google Cloud Storage paths.
+ #
+ # At run time, the Google Cloud Storage paths can be overridden if a default
+ # was provided at create time, or must be set otherwise. The pipeline runner
+ # should add a key/value pair to either the inputs or outputs map. The
+ # indicated data copies will be carried out before/after pipeline execution,
+ # just as if the corresponding arguments were provided to `gsutil cp`.
+ #
+ # For example: Given the following `PipelineParameter`, specified
+ # in the `inputParameters` list:
+ #
+ # ```
+ # {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}}
+ # ```
+ #
+ # where `disk` is defined in the `PipelineResources` object as:
+ #
+ # ```
+ # {name: "pd1", mountPoint: "/mnt/disk/"}
+ # ```
+ #
+ # We create a disk named `pd1`, mount it on the host VM, and map
+ # `/mnt/pd1` to `/mnt/disk` in the docker container. At
+ # runtime, an entry for `input_file` would be required in the inputs
+ # map, such as:
+ #
+ # ```
+ # inputs["input_file"] = "gs://my-bucket/bar.txt"
+ # ```
+ #
+ # This would generate the following gsutil call:
+ #
+ # ```
+ # gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt
+ # ```
+ #
+ # The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the
+ # Docker container. Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Google Cloud storage path</th><th>Local path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr><td>glob</td><td>directory</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # For outputs, the direction of the copy is reversed:
+ #
+ # ```
+ # gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt
+ # ```
+ #
+ # Acceptable paths are:
+ #
+ # <table>
+ # <thead>
+ # <tr><th>Local path</th><th>Google Cloud Storage path</th></tr>
+ # </thead>
+ # <tbody>
+ # <tr><td>file</td><td>file</td></tr>
+ # <tr>
+ # <td>file</td>
+ # <td>directory - directory must already exist</td>
+ # </tr>
+ # <tr>
+ # <td>glob</td>
+ # <td>directory - directory will be created if it doesn't exist</td></tr>
+ # </tbody>
+ # </table>
+ #
+ # One restriction due to docker limitations, is that for outputs that are found
+ # on the boot disk, the local path cannot be a glob and must be a file.
+ "localCopy": { # LocalCopy defines how a remote file should be copied to and from the VM. # If present, this parameter is marked for copying to and from the VM.
+ # `LocalCopy` indicates where on the VM the file should be. The value
+ # given to this parameter (either at runtime or using `defaultValue`)
+ # must be the remote path where the file should be.
+ "disk": "A String", # Required. The name of the disk where this parameter is
+ # located. Can be the name of one of the disks specified in the
+ # Resources field, or "boot", which represents the Docker
+ # instance's boot disk and has a mount point of `/`.
+ "path": "A String", # Required. The path within the user's docker container where
+ # this input should be localized to and from, relative to the specified
+ # disk's mount point. For example: file.txt,
+ },
+ "description": "A String", # Human-readable description.
+ "defaultValue": "A String", # The default value for this parameter. Can be overridden at runtime.
+ # If `localCopy` is present, then this must be a Google Cloud Storage path
+ # beginning with `gs://`.
+ "name": "A String", # Required. Name of the parameter - the pipeline runner uses this string
+ # as the key to the input and output maps in RunPipeline.
+ },
+ ],
+ "description": "A String", # User-specified description.
+ "docker": { # The Docker execuctor specification. # Specifies the docker run information.
+ "imageName": "A String", # Required. Image name from either Docker Hub or Google Container Registry.
+ # Users that run pipelines must have READ access to the image.
+ "cmd": "A String", # Required. The command or newline delimited script to run. The command
+ # string will be executed within a bash shell.
+ #
+ # If the command exits with a non-zero exit code, output parameter
+ # de-localization will be skipped and the pipeline operation's
+ # `error` field will be populated.
+ #
+ # Maximum command string length is 16384.
+ },
+ "name": "A String", # Required. A user specified pipeline name that does not have to be unique.
+ # This name can be used for filtering Pipelines in ListPipelines.
+ },
+ "pipelineArgs": { # The pipeline run arguments. # The arguments to use when running this pipeline.
+ "logging": { # The logging options for the pipeline run. # Required. Logging options. Used by the service to communicate results
+ # to the user.
+ "gcsPath": "A String", # The location in Google Cloud Storage to which the pipeline logs
+ # will be copied. Can be specified as a fully qualified directory
+ # path, in which case logs will be output with a unique identifier
+ # as the filename in that directory, or as a fully specified path,
+ # which must end in `.log`, in which case that path will be
+ # used, and the user must ensure that logs are not
+ # overwritten. Stdout and stderr logs from the run are also
+ # generated and output as `-stdout.log` and `-stderr.log`.
+ },
+ "inputs": { # Pipeline input arguments; keys are defined in the pipeline documentation.
+ # All input parameters that do not have default values must be specified.
+ # If parameters with defaults are specified here, the defaults will be
+ # overridden.
+ "a_key": "A String",
+ },
+ "resources": { # The system resources for the pipeline run. # Specifies resource requirements/overrides for the pipeline run.
+ "preemptible": True or False, # Whether to use preemptible VMs. Defaults to `false`. In order to use this,
+ # must be true for both create time and run time. Cannot be true at run time
+ # if false at create time.
+ "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
+ "acceleratorCount": "A String", # Optional. The number of accelerators of the specified type to attach.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances:
+ # NVIDIA® Tesla® drivers and NVIDIA® CUDA toolkit.
+ "noAddress": True or False, # Whether to assign an external IP to the instance. This is an experimental
+ # feature that may go away. Defaults to false.
+ # Corresponds to `--no_address` flag for [gcloud compute instances create]
+ # (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create).
+ # In order to use this, must be true for both create time and run time.
+ # Cannot be true at run time if false at create time. If you need to ssh into
+ # a private IP VM for debugging, you can ssh to a public VM and then ssh into
+ # the private VM's Internal IP. If noAddress is set, this pipeline run may
+ # only load docker images from Google Container Registry and not Docker Hub.
+ # Before using this, you must
+ # [configure access to Google services from internal
+ # IPs](https://cloud.google.com/compute/docs/configure-private-google-access#configuring_access_to_google_services_from_internal_ips).
+ "zones": [ # List of Google Compute Engine availability zones to which resource
+ # creation will restricted. If empty, any zone may be chosen.
+ "A String",
+ ],
+ "minimumRamGb": 3.14, # The minimum amount of RAM to use. Defaults to 3.75 (GB)
+ "disks": [ # Disks to attach.
+ { # A Google Compute Engine disk resource specification.
"source": "A String", # The full or partial URL of the persistent disk to attach. See
# https://cloud.google.com/compute/docs/reference/latest/instances#resource
# and
# https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots
# for more details.
+ "mountPoint": "A String", # Required at create time and cannot be overridden at run time.
+ # Specifies the path in the docker container where files on
+ # this disk should be located. For example, if `mountPoint`
+ # is `/mnt/disk`, and the parameter has `localPath`
+ # `inputs/file.txt`, the docker container can access the data at
+ # `/mnt/disk/inputs/file.txt`.
+ "autoDelete": True or False, # Deprecated. Disks created by the Pipelines API will be deleted at the end
+ # of the pipeline run, regardless of what this field is set to.
+ "name": "A String", # Required. The name of the disk that can be used in the pipeline
+ # parameters. Must be 1 - 63 characters.
+ # The name "boot" is reserved for system use.
+ "type": "A String", # Required. The type of the disk to create.
+ "sizeGb": 42, # The size of the disk. Defaults to 500 (GB).
+ # This field is not applicable for local SSD.
+ "readOnly": True or False, # Specifies how a sourced-base persistent disk will be mounted. See
+ # https://cloud.google.com/compute/docs/disks/persistent-disks#use_multi_instances
+ # for more details.
+ # Can only be set at create time.
},
],
- "bootDiskSizeGb": 42, # The size of the boot disk. Defaults to 10 (GB).
+ "acceleratorType": "A String", # Optional. The Compute Engine defined accelerator type.
+ # By specifying this parameter, you will download and install the following
+ # third-party software onto your managed Compute Engine instances: NVIDIA®
+ # Tesla® drivers and NVIDIA® CUDA toolkit.
+ # Please see https://cloud.google.com/compute/docs/gpus/ for a list of
+ # available accelerator types.
+ "minimumCpuCores": 42, # The minimum number of cores to use. Defaults to 1.
},
+ "labels": { # Labels to apply to this pipeline run. Labels will also be applied to
+ # compute resources (VM, disks) created by this pipeline run. When listing
+ # operations, operations can filtered by labels.
+ # Label keys may not be empty; label values may be empty. Non-empty labels
+ # must be 1-63 characters long, and comply with [RFC1035]
+ # (https://www.ietf.org/rfc/rfc1035.txt).
+ # Specifically, the name must be 1-63 characters long and match the regular
+ # expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
+ # character must be a lowercase letter, and all following characters must be
+ # a dash, lowercase letter, or digit, except the last character, which cannot
+ # be a dash.
+ "a_key": "A String",
+ },
+ "serviceAccount": { # A Google Cloud Service Account. # The Google Cloud Service Account that will be used to access data and
+ # services. By default, the compute service account associated with
+ # `projectId` is used.
+ "scopes": [ # List of scopes to be enabled for this service account on the VM.
+ # The following scopes are automatically included:
+ #
+ # * https://www.googleapis.com/auth/compute
+ # * https://www.googleapis.com/auth/devstorage.full_control
+ # * https://www.googleapis.com/auth/genomics
+ # * https://www.googleapis.com/auth/logging.write
+ # * https://www.googleapis.com/auth/monitoring.write
+ "A String",
+ ],
+ "email": "A String", # Email address of the service account. Defaults to `default`,
+ # which uses the compute service account associated with the project.
+ },
+ "clientId": "A String", # This field is deprecated. Use `labels` instead. Client-specified pipeline
+ # operation identifier.
+ "projectId": "A String", # Required. The project in which to run the pipeline. The caller must have
+ # WRITER access to all Google Cloud services and resources (e.g. Google
+ # Compute Engine) will be used.
+ "outputs": { # Pipeline output arguments; keys are defined in the pipeline
+ # documentation. All output parameters of without default values
+ # must be specified. If parameters with defaults are specified
+ # here, the defaults will be overridden.
+ "a_key": "A String",
+ },
+ "keepVmAliveOnFailureDuration": "A String", # How long to keep the VM up after a failure (for example docker command
+ # failed, copying input or output files failed, etc). While the VM is up, one
+ # can ssh into the VM to debug. Default is 0; maximum allowed value is 1 day.
},
}
@@ -2030,7 +2030,6 @@
{ # This resource represents a long-running operation that is the result of a
# network API call.
- "name": "A String", # The server-assigned name, which is only unique within the same service that originally returns it. For example&#58; `operations/CJHU7Oi_ChDrveSpBRjfuL-qzoWAgEw`
"error": { # The `Status` type defines a logical error model that is suitable for # The error result of the operation in case of failure or cancellation.
# different programming environments, including REST APIs and RPC APIs. It is
# used by [gRPC](https://github.com/grpc). Each `Status` message contains
@@ -2039,19 +2038,20 @@
# You can find out more about this error model and how to work with it in the
# [API Design Guide](https://cloud.google.com/apis/design/errors).
"code": 42, # The status code, which should be an enum value of google.rpc.Code.
- "message": "A String", # A developer-facing error message, which should be in English. Any
- # user-facing error message should be localized and sent in the
- # google.rpc.Status.details field, or localized by the client.
"details": [ # A list of messages that carry the error details. There is a common set of
# message types for APIs to use.
{
"a_key": "", # Properties of the object. Contains field @type with type URL.
},
],
+ "message": "A String", # A developer-facing error message, which should be in English. Any
+ # user-facing error message should be localized and sent in the
+ # google.rpc.Status.details field, or localized by the client.
},
"metadata": { # An OperationMetadata or Metadata object. This will always be returned with the Operation.
"a_key": "", # Properties of the object. Contains field @type with type URL.
},
+ "name": "A String", # The server-assigned name, which is only unique within the same service that originally returns it. For example&#58; `operations/CJHU7Oi_ChDrveSpBRjfuL-qzoWAgEw`
"done": True or False, # If the value is `false`, it means the operation is still in progress.
# If `true`, the operation is completed, and either `error` or `response` is
# available.
@@ -2073,9 +2073,8 @@
{ # Request to set operation status. Should only be used by VMs
# created by the Pipelines Service and not by end users.
- "errorMessage": "A String",
- "validationToken": "A String",
"errorCode": "A String",
+ "operationId": "A String",
"timestampEvents": [
{ # Stores the list of events and times they occured for major events in job
# execution.
@@ -2083,7 +2082,8 @@
"description": "A String", # String indicating the type of event
},
],
- "operationId": "A String",
+ "errorMessage": "A String",
+ "validationToken": "A String",
}
x__xgafv: string, V1 error format.