docs: update generated docs (#1053)
Updates for both discovery docs and epydoc API Documentation
Fixes: #1049
diff --git a/docs/dyn/sqladmin_v1beta4.databases.html b/docs/dyn/sqladmin_v1beta4.databases.html
index 73f9adb..d2ac5ac 100644
--- a/docs/dyn/sqladmin_v1beta4.databases.html
+++ b/docs/dyn/sqladmin_v1beta4.databases.html
@@ -75,25 +75,33 @@
<h1><a href="sqladmin_v1beta4.html">Cloud SQL Admin API</a> . <a href="sqladmin_v1beta4.databases.html">databases</a></h1>
<h2>Instance Methods</h2>
<p class="toc_element">
+ <code><a href="#close">close()</a></code></p>
+<p class="firstline">Close httplib2 connections.</p>
+<p class="toc_element">
<code><a href="#delete">delete(project, instance, database, x__xgafv=None)</a></code></p>
<p class="firstline">Deletes a database from a Cloud SQL instance.</p>
<p class="toc_element">
<code><a href="#get">get(project, instance, database, x__xgafv=None)</a></code></p>
-<p class="firstline">Retrieves a resource containing information about a database inside a Cloud</p>
+<p class="firstline">Retrieves a resource containing information about a database inside a Cloud SQL instance.</p>
<p class="toc_element">
<code><a href="#insert">insert(project, instance, body=None, x__xgafv=None)</a></code></p>
-<p class="firstline">Inserts a resource containing information about a database inside a Cloud</p>
+<p class="firstline">Inserts a resource containing information about a database inside a Cloud SQL instance.</p>
<p class="toc_element">
<code><a href="#list">list(project, instance, x__xgafv=None)</a></code></p>
<p class="firstline">Lists databases in the specified Cloud SQL instance.</p>
<p class="toc_element">
<code><a href="#patch">patch(project, instance, database, body=None, x__xgafv=None)</a></code></p>
-<p class="firstline">Partially updates a resource containing information about a database inside</p>
+<p class="firstline">Partially updates a resource containing information about a database inside a Cloud SQL instance. This method supports patch semantics.</p>
<p class="toc_element">
<code><a href="#update">update(project, instance, database, body=None, x__xgafv=None)</a></code></p>
-<p class="firstline">Updates a resource containing information about a database inside a Cloud</p>
+<p class="firstline">Updates a resource containing information about a database inside a Cloud SQL instance.</p>
<h3>Method Details</h3>
<div class="method">
+ <code class="details" id="close">close()</code>
+ <pre>Close httplib2 connections.</pre>
+</div>
+
+<div class="method">
<code class="details" id="delete">delete(project, instance, database, x__xgafv=None)</code>
<pre>Deletes a database from a Cloud SQL instance.
@@ -109,130 +117,76 @@
Returns:
An object of the form:
- { # An Operation resource.&nbsp;For successful operations that return an
- # Operation resource, only the fields relevant to the operation are populated
- # in the resource.
- "targetLink": "A String",
- "operationType": "A String", # The type of the operation. Valid values are <code>CREATE</code>,
- # <code>DELETE</code>, <code>UPDATE</code>, <code>RESTART</code>,
- # <code>IMPORT</code>, <code>EXPORT</code>, <code>BACKUP_VOLUME</code>,
- # <code>RESTORE_VOLUME</code>, <code>CREATE_USER</code>,
- # <code>DELETE_USER</code>, <code>CREATE_DATABASE</code>,
- # <code>DELETE_DATABASE</code> .
- "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be
- # populated.
+ { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource.
+ "kind": "A String", # This is always *sql#operation*.
+ "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation.
+ "endTime": "A String", # The time this operation finished in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+ "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated.
"errors": [ # The list of errors encountered while processing this operation.
{ # Database instance operation error.
- "kind": "A String", # This is always <code>sql#operationError</code>.
"code": "A String", # Identifies the specific error that occurred.
+ "kind": "A String", # This is always *sql#operationError*.
"message": "A String", # Additional information about the error encountered.
},
],
- "kind": "A String", # This is always <code>sql#operationErrors</code>.
+ "kind": "A String", # This is always *sql#operationErrors*.
},
- "kind": "A String", # This is always <code>sql#operation</code>.
- "importContext": { # Database instance import context. # The context for import operation, if applicable.
- "database": "A String", # The target database for the import. If <code>fileType</code> is
- # <code>SQL</code>, this field is required only if the import file does not
- # specify a database, and is overridden by any database specification in the
- # import file. If <code>fileType</code> is <code>CSV</code>, one database
- # must be specified.
- "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
- "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
- "encryptionOptions": {
- "pvkPassword": "A String", # Password that encrypts the private key
- "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form
- # <code>gs://bucketName/fileName</code>. The instance must have
- # write permissions to the bucket and read access to the file.
- "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the
- # form <code>gs://bucketName/fileName</code>. The instance must have
- # write permissions to the bucket and read access to the file.
- },
- },
- "uri": "A String", # Path to the import file in Cloud Storage, in the form
- # <code>gs:
- # //bucketName/fileName</code>. Compressed gzip files (.gz) are supported
- # // when <code>fileType</code> is <code>SQL</code>. The instance must have
- # // write permissions to the bucket and read access to the file.
- "fileType": "A String", # The file type for the specified uri. <br><code>SQL</code>: The file
- # contains SQL statements. <br><code>CSV</code>: The file contains CSV data.
- "kind": "A String", # This is always <code>sql#importContext</code>.
- "csvImportOptions": { # Options for importing data as CSV.
- "table": "A String", # The table to which CSV data is imported.
- "columns": [ # The columns to which CSV data is imported. If not specified, all columns
- # of the database table are loaded with CSV data.
- "A String",
- ],
- },
- },
- "status": "A String", # The status of an operation. Valid values are <code>PENDING</code>,
- # <code>RUNNING</code>, <code>DONE</code>,
- # <code>SQL_OPERATION_STATUS_UNSPECIFIED</code>.
- "name": "A String", # An identifier that uniquely identifies the operation. You can use this
- # identifier to retrieve the Operations resource that has information about
- # the operation.
+ "status": "A String", # The status of an operation. Valid values are: *PENDING* *RUNNING* *DONE* *SQL_OPERATION_STATUS_UNSPECIFIED*
+ "selfLink": "A String", # The URI of this resource.
+ "startTime": "A String", # The time this operation actually started in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
"exportContext": { # Database instance export context. # The context for export operation, if applicable.
- "fileType": "A String", # The file type for the specified uri. <br><code>SQL</code>: The file
- # contains SQL statements. <br><code>CSV</code>: The file contains CSV data.
- "uri": "A String", # The path to the file in Google Cloud Storage where the export will be
- # stored. The URI is in the form <code>gs:
- # //bucketName/fileName</code>. If the file already exists, the requests
- # // succeeds, but the operation fails. If <code>fileType</code> is
- # // <code>SQL</code> and the filename ends with .gz, the contents are
- # // compressed.
"csvExportOptions": { # Options for exporting data as CSV.
"selectQuery": "A String", # The select query used to extract the data.
},
- "kind": "A String", # This is always <code>sql#exportContext</code>.
- "databases": [ # Databases to be exported. <br /> <b>MySQL instances:</b> If
- # <code>fileType</code> is <code>SQL</code> and no database is specified, all
- # databases are exported, except for the <code>mysql</code> system database.
- # If <code>fileType</code> is <code>CSV</code>, you can specify one database,
- # either by using this property or by using the
- # <code>csvExportOptions.selectQuery</code> property, which takes precedence
- # over this property. <br /> <b>PostgreSQL instances:</b> You must specify
- # one database to be exported. If <code>fileType</code> is <code>CSV</code>,
- # this database must match the one specified in the
- # <code>csvExportOptions.selectQuery</code> property.
+ "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+ "kind": "A String", # This is always *sql#exportContext*.
+ "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form *gs: //bucketName/fileName*. If the file already exists, the requests // succeeds, but the operation fails. If *fileType* is // *SQL* and the filename ends with .gz, the contents are // compressed.
+ "databases": [ # Databases to be exported. *MySQL instances:* If *fileType* is *SQL* and no database is specified, all databases are exported, except for the *mysql* system database. If *fileType* is *CSV*, you can specify one database, either by using this property or by using the *csvExportOptions.selectQuery* property, which takes precedence over this property. *PostgreSQL instances:* You must specify one database to be exported. If *fileType* is *CSV*, this database must match the one specified in the *csvExportOptions.selectQuery* property.
"A String",
],
+ "offload": True or False, # Option for export offload.
"sqlExportOptions": { # Options for exporting data as SQL statements.
- "tables": [ # Tables to export, or that were exported, from the specified database. If
- # you specify tables, specify one and only one database. For PostgreSQL
- # instances, you can specify only one table.
+ "schemaOnly": True or False, # Export only schemas.
+ "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table.
"A String",
],
- "schemaOnly": True or False, # Export only schemas.
"mysqlExportOptions": { # Options for exporting from MySQL.
- "masterData": 42, # Option to include SQL statement required to set up replication.
- # If set to <code>1</code>, the dump file includes
- # a CHANGE MASTER TO statement with the binary log coordinates.
- # If set to <code>2</code>, the CHANGE MASTER TO statement is written as
- # a SQL comment, and has no effect.
- # All other values are ignored.
+ "masterData": 42, # Option to include SQL statement required to set up replication. If set to *1*, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates. If set to *2*, the CHANGE MASTER TO statement is written as a SQL comment, and has no effect. All other values are ignored.
},
},
},
- "targetId": "A String", # Name of the database instance related to this operation.
- "endTime": "A String", # The time this operation finished in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
- "startTime": "A String", # The time this operation actually started in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
- "insertTime": "A String", # The time this operation was enqueued in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
"user": "A String", # The email address of the user who initiated this operation.
+ "targetId": "A String", # Name of the database instance related to this operation.
+ "insertTime": "A String", # The time this operation was enqueued in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+ "operationType": "A String", # The type of the operation. Valid values are: *CREATE* *DELETE* *UPDATE* *RESTART* *IMPORT* *EXPORT* *BACKUP_VOLUME* *RESTORE_VOLUME* *CREATE_USER* *DELETE_USER* *CREATE_DATABASE* *DELETE_DATABASE*
"targetProject": "A String", # The project ID of the target instance related to this operation.
- "selfLink": "A String", # The URI of this resource.
+ "targetLink": "A String",
+ "importContext": { # Database instance import context. # The context for import operation, if applicable.
+ "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+ "kind": "A String", # This is always *sql#importContext*.
+ "csvImportOptions": { # Options for importing data as CSV.
+ "columns": [ # The columns to which CSV data is imported. If not specified, all columns of the database table are loaded with CSV data.
+ "A String",
+ ],
+ "table": "A String", # The table to which CSV data is imported.
+ },
+ "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
+ "encryptionOptions": {
+ "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+ "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+ "pvkPassword": "A String", # Password that encrypts the private key
+ },
+ },
+ "database": "A String", # The target database for the import. If *fileType* is *SQL*, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If *fileType* is *CSV*, one database must be specified.
+ "uri": "A String", # Path to the import file in Cloud Storage, in the form *gs: //bucketName/fileName*. Compressed gzip files (.gz) are supported // when *fileType* is *SQL*. The instance must have // write permissions to the bucket and read access to the file.
+ "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
+ },
}</pre>
</div>
<div class="method">
<code class="details" id="get">get(project, instance, database, x__xgafv=None)</code>
- <pre>Retrieves a resource containing information about a database inside a Cloud
-SQL instance.
+ <pre>Retrieves a resource containing information about a database inside a Cloud SQL instance.
Args:
project: string, Project ID of the project that contains the instance. (required)
@@ -247,28 +201,24 @@
An object of the form:
{ # Represents a SQL database on the Cloud SQL instance.
- "collation": "A String", # The MySQL collation value.
- "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
- "name": "A String", # The name of the database in the Cloud SQL instance. This does not include
- # the project ID or instance name.
- "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google
- # apps domain is prefixed if applicable.
- "selfLink": "A String", # The URI of this resource.
+ "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
"sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance.
- "recoveryModel": "A String", # The recovery model of a SQL Server database
"compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible
+ "recoveryModel": "A String", # The recovery model of a SQL Server database
},
- "charset": "A String", # The MySQL charset value.
- "kind": "A String", # This is always <code>sql#database</code>.
- "etag": "A String", # This field is deprecated and will be removed from a future version of the
- # API.
+ "charset": "A String", # The Cloud SQL charset value.
+ "kind": "A String", # This is always *sql#database*.
+ "collation": "A String", # The Cloud SQL collation value.
+ "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
+ "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
+ "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
+ "selfLink": "A String", # The URI of this resource.
}</pre>
</div>
<div class="method">
<code class="details" id="insert">insert(project, instance, body=None, x__xgafv=None)</code>
- <pre>Inserts a resource containing information about a database inside a Cloud
-SQL instance.
+ <pre>Inserts a resource containing information about a database inside a Cloud SQL instance.
Args:
project: string, Project ID of the project that contains the instance. (required)
@@ -277,21 +227,18 @@
The object takes the form of:
{ # Represents a SQL database on the Cloud SQL instance.
- "collation": "A String", # The MySQL collation value.
- "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
- "name": "A String", # The name of the database in the Cloud SQL instance. This does not include
- # the project ID or instance name.
- "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google
- # apps domain is prefixed if applicable.
- "selfLink": "A String", # The URI of this resource.
+ "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
"sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance.
- "recoveryModel": "A String", # The recovery model of a SQL Server database
"compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible
+ "recoveryModel": "A String", # The recovery model of a SQL Server database
},
- "charset": "A String", # The MySQL charset value.
- "kind": "A String", # This is always <code>sql#database</code>.
- "etag": "A String", # This field is deprecated and will be removed from a future version of the
- # API.
+ "charset": "A String", # The Cloud SQL charset value.
+ "kind": "A String", # This is always *sql#database*.
+ "collation": "A String", # The Cloud SQL collation value.
+ "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
+ "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
+ "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
+ "selfLink": "A String", # The URI of this resource.
}
x__xgafv: string, V1 error format.
@@ -302,123 +249,70 @@
Returns:
An object of the form:
- { # An Operation resource.&nbsp;For successful operations that return an
- # Operation resource, only the fields relevant to the operation are populated
- # in the resource.
- "targetLink": "A String",
- "operationType": "A String", # The type of the operation. Valid values are <code>CREATE</code>,
- # <code>DELETE</code>, <code>UPDATE</code>, <code>RESTART</code>,
- # <code>IMPORT</code>, <code>EXPORT</code>, <code>BACKUP_VOLUME</code>,
- # <code>RESTORE_VOLUME</code>, <code>CREATE_USER</code>,
- # <code>DELETE_USER</code>, <code>CREATE_DATABASE</code>,
- # <code>DELETE_DATABASE</code> .
- "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be
- # populated.
+ { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource.
+ "kind": "A String", # This is always *sql#operation*.
+ "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation.
+ "endTime": "A String", # The time this operation finished in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+ "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated.
"errors": [ # The list of errors encountered while processing this operation.
{ # Database instance operation error.
- "kind": "A String", # This is always <code>sql#operationError</code>.
"code": "A String", # Identifies the specific error that occurred.
+ "kind": "A String", # This is always *sql#operationError*.
"message": "A String", # Additional information about the error encountered.
},
],
- "kind": "A String", # This is always <code>sql#operationErrors</code>.
+ "kind": "A String", # This is always *sql#operationErrors*.
},
- "kind": "A String", # This is always <code>sql#operation</code>.
- "importContext": { # Database instance import context. # The context for import operation, if applicable.
- "database": "A String", # The target database for the import. If <code>fileType</code> is
- # <code>SQL</code>, this field is required only if the import file does not
- # specify a database, and is overridden by any database specification in the
- # import file. If <code>fileType</code> is <code>CSV</code>, one database
- # must be specified.
- "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
- "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
- "encryptionOptions": {
- "pvkPassword": "A String", # Password that encrypts the private key
- "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form
- # <code>gs://bucketName/fileName</code>. The instance must have
- # write permissions to the bucket and read access to the file.
- "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the
- # form <code>gs://bucketName/fileName</code>. The instance must have
- # write permissions to the bucket and read access to the file.
- },
- },
- "uri": "A String", # Path to the import file in Cloud Storage, in the form
- # <code>gs:
- # //bucketName/fileName</code>. Compressed gzip files (.gz) are supported
- # // when <code>fileType</code> is <code>SQL</code>. The instance must have
- # // write permissions to the bucket and read access to the file.
- "fileType": "A String", # The file type for the specified uri. <br><code>SQL</code>: The file
- # contains SQL statements. <br><code>CSV</code>: The file contains CSV data.
- "kind": "A String", # This is always <code>sql#importContext</code>.
- "csvImportOptions": { # Options for importing data as CSV.
- "table": "A String", # The table to which CSV data is imported.
- "columns": [ # The columns to which CSV data is imported. If not specified, all columns
- # of the database table are loaded with CSV data.
- "A String",
- ],
- },
- },
- "status": "A String", # The status of an operation. Valid values are <code>PENDING</code>,
- # <code>RUNNING</code>, <code>DONE</code>,
- # <code>SQL_OPERATION_STATUS_UNSPECIFIED</code>.
- "name": "A String", # An identifier that uniquely identifies the operation. You can use this
- # identifier to retrieve the Operations resource that has information about
- # the operation.
+ "status": "A String", # The status of an operation. Valid values are: *PENDING* *RUNNING* *DONE* *SQL_OPERATION_STATUS_UNSPECIFIED*
+ "selfLink": "A String", # The URI of this resource.
+ "startTime": "A String", # The time this operation actually started in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
"exportContext": { # Database instance export context. # The context for export operation, if applicable.
- "fileType": "A String", # The file type for the specified uri. <br><code>SQL</code>: The file
- # contains SQL statements. <br><code>CSV</code>: The file contains CSV data.
- "uri": "A String", # The path to the file in Google Cloud Storage where the export will be
- # stored. The URI is in the form <code>gs:
- # //bucketName/fileName</code>. If the file already exists, the requests
- # // succeeds, but the operation fails. If <code>fileType</code> is
- # // <code>SQL</code> and the filename ends with .gz, the contents are
- # // compressed.
"csvExportOptions": { # Options for exporting data as CSV.
"selectQuery": "A String", # The select query used to extract the data.
},
- "kind": "A String", # This is always <code>sql#exportContext</code>.
- "databases": [ # Databases to be exported. <br /> <b>MySQL instances:</b> If
- # <code>fileType</code> is <code>SQL</code> and no database is specified, all
- # databases are exported, except for the <code>mysql</code> system database.
- # If <code>fileType</code> is <code>CSV</code>, you can specify one database,
- # either by using this property or by using the
- # <code>csvExportOptions.selectQuery</code> property, which takes precedence
- # over this property. <br /> <b>PostgreSQL instances:</b> You must specify
- # one database to be exported. If <code>fileType</code> is <code>CSV</code>,
- # this database must match the one specified in the
- # <code>csvExportOptions.selectQuery</code> property.
+ "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+ "kind": "A String", # This is always *sql#exportContext*.
+ "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form *gs: //bucketName/fileName*. If the file already exists, the requests // succeeds, but the operation fails. If *fileType* is // *SQL* and the filename ends with .gz, the contents are // compressed.
+ "databases": [ # Databases to be exported. *MySQL instances:* If *fileType* is *SQL* and no database is specified, all databases are exported, except for the *mysql* system database. If *fileType* is *CSV*, you can specify one database, either by using this property or by using the *csvExportOptions.selectQuery* property, which takes precedence over this property. *PostgreSQL instances:* You must specify one database to be exported. If *fileType* is *CSV*, this database must match the one specified in the *csvExportOptions.selectQuery* property.
"A String",
],
+ "offload": True or False, # Option for export offload.
"sqlExportOptions": { # Options for exporting data as SQL statements.
- "tables": [ # Tables to export, or that were exported, from the specified database. If
- # you specify tables, specify one and only one database. For PostgreSQL
- # instances, you can specify only one table.
+ "schemaOnly": True or False, # Export only schemas.
+ "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table.
"A String",
],
- "schemaOnly": True or False, # Export only schemas.
"mysqlExportOptions": { # Options for exporting from MySQL.
- "masterData": 42, # Option to include SQL statement required to set up replication.
- # If set to <code>1</code>, the dump file includes
- # a CHANGE MASTER TO statement with the binary log coordinates.
- # If set to <code>2</code>, the CHANGE MASTER TO statement is written as
- # a SQL comment, and has no effect.
- # All other values are ignored.
+ "masterData": 42, # Option to include SQL statement required to set up replication. If set to *1*, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates. If set to *2*, the CHANGE MASTER TO statement is written as a SQL comment, and has no effect. All other values are ignored.
},
},
},
- "targetId": "A String", # Name of the database instance related to this operation.
- "endTime": "A String", # The time this operation finished in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
- "startTime": "A String", # The time this operation actually started in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
- "insertTime": "A String", # The time this operation was enqueued in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
"user": "A String", # The email address of the user who initiated this operation.
+ "targetId": "A String", # Name of the database instance related to this operation.
+ "insertTime": "A String", # The time this operation was enqueued in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+ "operationType": "A String", # The type of the operation. Valid values are: *CREATE* *DELETE* *UPDATE* *RESTART* *IMPORT* *EXPORT* *BACKUP_VOLUME* *RESTORE_VOLUME* *CREATE_USER* *DELETE_USER* *CREATE_DATABASE* *DELETE_DATABASE*
"targetProject": "A String", # The project ID of the target instance related to this operation.
- "selfLink": "A String", # The URI of this resource.
+ "targetLink": "A String",
+ "importContext": { # Database instance import context. # The context for import operation, if applicable.
+ "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+ "kind": "A String", # This is always *sql#importContext*.
+ "csvImportOptions": { # Options for importing data as CSV.
+ "columns": [ # The columns to which CSV data is imported. If not specified, all columns of the database table are loaded with CSV data.
+ "A String",
+ ],
+ "table": "A String", # The table to which CSV data is imported.
+ },
+ "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
+ "encryptionOptions": {
+ "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+ "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+ "pvkPassword": "A String", # Password that encrypts the private key
+ },
+ },
+ "database": "A String", # The target database for the import. If *fileType* is *SQL*, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If *fileType* is *CSV*, one database must be specified.
+ "uri": "A String", # Path to the import file in Cloud Storage, in the form *gs: //bucketName/fileName*. Compressed gzip files (.gz) are supported // when *fileType* is *SQL*. The instance must have // write permissions to the bucket and read access to the file.
+ "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
+ },
}</pre>
</div>
@@ -438,24 +332,21 @@
An object of the form:
{ # Database list response.
- "kind": "A String", # This is always <code>sql#databasesList</code>.
+ "kind": "A String", # This is always *sql#databasesList*.
"items": [ # List of database resources in the instance.
{ # Represents a SQL database on the Cloud SQL instance.
- "collation": "A String", # The MySQL collation value.
- "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
- "name": "A String", # The name of the database in the Cloud SQL instance. This does not include
- # the project ID or instance name.
- "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google
- # apps domain is prefixed if applicable.
- "selfLink": "A String", # The URI of this resource.
+ "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
"sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance.
- "recoveryModel": "A String", # The recovery model of a SQL Server database
"compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible
+ "recoveryModel": "A String", # The recovery model of a SQL Server database
},
- "charset": "A String", # The MySQL charset value.
- "kind": "A String", # This is always <code>sql#database</code>.
- "etag": "A String", # This field is deprecated and will be removed from a future version of the
- # API.
+ "charset": "A String", # The Cloud SQL charset value.
+ "kind": "A String", # This is always *sql#database*.
+ "collation": "A String", # The Cloud SQL collation value.
+ "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
+ "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
+ "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
+ "selfLink": "A String", # The URI of this resource.
},
],
}</pre>
@@ -463,8 +354,7 @@
<div class="method">
<code class="details" id="patch">patch(project, instance, database, body=None, x__xgafv=None)</code>
- <pre>Partially updates a resource containing information about a database inside
-a Cloud SQL instance. This method supports patch semantics.
+ <pre>Partially updates a resource containing information about a database inside a Cloud SQL instance. This method supports patch semantics.
Args:
project: string, Project ID of the project that contains the instance. (required)
@@ -474,21 +364,18 @@
The object takes the form of:
{ # Represents a SQL database on the Cloud SQL instance.
- "collation": "A String", # The MySQL collation value.
- "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
- "name": "A String", # The name of the database in the Cloud SQL instance. This does not include
- # the project ID or instance name.
- "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google
- # apps domain is prefixed if applicable.
- "selfLink": "A String", # The URI of this resource.
+ "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
"sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance.
- "recoveryModel": "A String", # The recovery model of a SQL Server database
"compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible
+ "recoveryModel": "A String", # The recovery model of a SQL Server database
},
- "charset": "A String", # The MySQL charset value.
- "kind": "A String", # This is always <code>sql#database</code>.
- "etag": "A String", # This field is deprecated and will be removed from a future version of the
- # API.
+ "charset": "A String", # The Cloud SQL charset value.
+ "kind": "A String", # This is always *sql#database*.
+ "collation": "A String", # The Cloud SQL collation value.
+ "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
+ "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
+ "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
+ "selfLink": "A String", # The URI of this resource.
}
x__xgafv: string, V1 error format.
@@ -499,130 +386,76 @@
Returns:
An object of the form:
- { # An Operation resource.&nbsp;For successful operations that return an
- # Operation resource, only the fields relevant to the operation are populated
- # in the resource.
- "targetLink": "A String",
- "operationType": "A String", # The type of the operation. Valid values are <code>CREATE</code>,
- # <code>DELETE</code>, <code>UPDATE</code>, <code>RESTART</code>,
- # <code>IMPORT</code>, <code>EXPORT</code>, <code>BACKUP_VOLUME</code>,
- # <code>RESTORE_VOLUME</code>, <code>CREATE_USER</code>,
- # <code>DELETE_USER</code>, <code>CREATE_DATABASE</code>,
- # <code>DELETE_DATABASE</code> .
- "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be
- # populated.
+ { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource.
+ "kind": "A String", # This is always *sql#operation*.
+ "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation.
+ "endTime": "A String", # The time this operation finished in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+ "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated.
"errors": [ # The list of errors encountered while processing this operation.
{ # Database instance operation error.
- "kind": "A String", # This is always <code>sql#operationError</code>.
"code": "A String", # Identifies the specific error that occurred.
+ "kind": "A String", # This is always *sql#operationError*.
"message": "A String", # Additional information about the error encountered.
},
],
- "kind": "A String", # This is always <code>sql#operationErrors</code>.
+ "kind": "A String", # This is always *sql#operationErrors*.
},
- "kind": "A String", # This is always <code>sql#operation</code>.
- "importContext": { # Database instance import context. # The context for import operation, if applicable.
- "database": "A String", # The target database for the import. If <code>fileType</code> is
- # <code>SQL</code>, this field is required only if the import file does not
- # specify a database, and is overridden by any database specification in the
- # import file. If <code>fileType</code> is <code>CSV</code>, one database
- # must be specified.
- "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
- "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
- "encryptionOptions": {
- "pvkPassword": "A String", # Password that encrypts the private key
- "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form
- # <code>gs://bucketName/fileName</code>. The instance must have
- # write permissions to the bucket and read access to the file.
- "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the
- # form <code>gs://bucketName/fileName</code>. The instance must have
- # write permissions to the bucket and read access to the file.
- },
- },
- "uri": "A String", # Path to the import file in Cloud Storage, in the form
- # <code>gs:
- # //bucketName/fileName</code>. Compressed gzip files (.gz) are supported
- # // when <code>fileType</code> is <code>SQL</code>. The instance must have
- # // write permissions to the bucket and read access to the file.
- "fileType": "A String", # The file type for the specified uri. <br><code>SQL</code>: The file
- # contains SQL statements. <br><code>CSV</code>: The file contains CSV data.
- "kind": "A String", # This is always <code>sql#importContext</code>.
- "csvImportOptions": { # Options for importing data as CSV.
- "table": "A String", # The table to which CSV data is imported.
- "columns": [ # The columns to which CSV data is imported. If not specified, all columns
- # of the database table are loaded with CSV data.
- "A String",
- ],
- },
- },
- "status": "A String", # The status of an operation. Valid values are <code>PENDING</code>,
- # <code>RUNNING</code>, <code>DONE</code>,
- # <code>SQL_OPERATION_STATUS_UNSPECIFIED</code>.
- "name": "A String", # An identifier that uniquely identifies the operation. You can use this
- # identifier to retrieve the Operations resource that has information about
- # the operation.
+ "status": "A String", # The status of an operation. Valid values are: *PENDING* *RUNNING* *DONE* *SQL_OPERATION_STATUS_UNSPECIFIED*
+ "selfLink": "A String", # The URI of this resource.
+ "startTime": "A String", # The time this operation actually started in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
"exportContext": { # Database instance export context. # The context for export operation, if applicable.
- "fileType": "A String", # The file type for the specified uri. <br><code>SQL</code>: The file
- # contains SQL statements. <br><code>CSV</code>: The file contains CSV data.
- "uri": "A String", # The path to the file in Google Cloud Storage where the export will be
- # stored. The URI is in the form <code>gs:
- # //bucketName/fileName</code>. If the file already exists, the requests
- # // succeeds, but the operation fails. If <code>fileType</code> is
- # // <code>SQL</code> and the filename ends with .gz, the contents are
- # // compressed.
"csvExportOptions": { # Options for exporting data as CSV.
"selectQuery": "A String", # The select query used to extract the data.
},
- "kind": "A String", # This is always <code>sql#exportContext</code>.
- "databases": [ # Databases to be exported. <br /> <b>MySQL instances:</b> If
- # <code>fileType</code> is <code>SQL</code> and no database is specified, all
- # databases are exported, except for the <code>mysql</code> system database.
- # If <code>fileType</code> is <code>CSV</code>, you can specify one database,
- # either by using this property or by using the
- # <code>csvExportOptions.selectQuery</code> property, which takes precedence
- # over this property. <br /> <b>PostgreSQL instances:</b> You must specify
- # one database to be exported. If <code>fileType</code> is <code>CSV</code>,
- # this database must match the one specified in the
- # <code>csvExportOptions.selectQuery</code> property.
+ "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+ "kind": "A String", # This is always *sql#exportContext*.
+ "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form *gs: //bucketName/fileName*. If the file already exists, the requests // succeeds, but the operation fails. If *fileType* is // *SQL* and the filename ends with .gz, the contents are // compressed.
+ "databases": [ # Databases to be exported. *MySQL instances:* If *fileType* is *SQL* and no database is specified, all databases are exported, except for the *mysql* system database. If *fileType* is *CSV*, you can specify one database, either by using this property or by using the *csvExportOptions.selectQuery* property, which takes precedence over this property. *PostgreSQL instances:* You must specify one database to be exported. If *fileType* is *CSV*, this database must match the one specified in the *csvExportOptions.selectQuery* property.
"A String",
],
+ "offload": True or False, # Option for export offload.
"sqlExportOptions": { # Options for exporting data as SQL statements.
- "tables": [ # Tables to export, or that were exported, from the specified database. If
- # you specify tables, specify one and only one database. For PostgreSQL
- # instances, you can specify only one table.
+ "schemaOnly": True or False, # Export only schemas.
+ "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table.
"A String",
],
- "schemaOnly": True or False, # Export only schemas.
"mysqlExportOptions": { # Options for exporting from MySQL.
- "masterData": 42, # Option to include SQL statement required to set up replication.
- # If set to <code>1</code>, the dump file includes
- # a CHANGE MASTER TO statement with the binary log coordinates.
- # If set to <code>2</code>, the CHANGE MASTER TO statement is written as
- # a SQL comment, and has no effect.
- # All other values are ignored.
+ "masterData": 42, # Option to include SQL statement required to set up replication. If set to *1*, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates. If set to *2*, the CHANGE MASTER TO statement is written as a SQL comment, and has no effect. All other values are ignored.
},
},
},
- "targetId": "A String", # Name of the database instance related to this operation.
- "endTime": "A String", # The time this operation finished in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
- "startTime": "A String", # The time this operation actually started in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
- "insertTime": "A String", # The time this operation was enqueued in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
"user": "A String", # The email address of the user who initiated this operation.
+ "targetId": "A String", # Name of the database instance related to this operation.
+ "insertTime": "A String", # The time this operation was enqueued in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+ "operationType": "A String", # The type of the operation. Valid values are: *CREATE* *DELETE* *UPDATE* *RESTART* *IMPORT* *EXPORT* *BACKUP_VOLUME* *RESTORE_VOLUME* *CREATE_USER* *DELETE_USER* *CREATE_DATABASE* *DELETE_DATABASE*
"targetProject": "A String", # The project ID of the target instance related to this operation.
- "selfLink": "A String", # The URI of this resource.
+ "targetLink": "A String",
+ "importContext": { # Database instance import context. # The context for import operation, if applicable.
+ "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+ "kind": "A String", # This is always *sql#importContext*.
+ "csvImportOptions": { # Options for importing data as CSV.
+ "columns": [ # The columns to which CSV data is imported. If not specified, all columns of the database table are loaded with CSV data.
+ "A String",
+ ],
+ "table": "A String", # The table to which CSV data is imported.
+ },
+ "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
+ "encryptionOptions": {
+ "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+ "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+ "pvkPassword": "A String", # Password that encrypts the private key
+ },
+ },
+ "database": "A String", # The target database for the import. If *fileType* is *SQL*, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If *fileType* is *CSV*, one database must be specified.
+ "uri": "A String", # Path to the import file in Cloud Storage, in the form *gs: //bucketName/fileName*. Compressed gzip files (.gz) are supported // when *fileType* is *SQL*. The instance must have // write permissions to the bucket and read access to the file.
+ "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
+ },
}</pre>
</div>
<div class="method">
<code class="details" id="update">update(project, instance, database, body=None, x__xgafv=None)</code>
- <pre>Updates a resource containing information about a database inside a Cloud
-SQL instance.
+ <pre>Updates a resource containing information about a database inside a Cloud SQL instance.
Args:
project: string, Project ID of the project that contains the instance. (required)
@@ -632,21 +465,18 @@
The object takes the form of:
{ # Represents a SQL database on the Cloud SQL instance.
- "collation": "A String", # The MySQL collation value.
- "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
- "name": "A String", # The name of the database in the Cloud SQL instance. This does not include
- # the project ID or instance name.
- "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google
- # apps domain is prefixed if applicable.
- "selfLink": "A String", # The URI of this resource.
+ "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
"sqlserverDatabaseDetails": { # Represents a Sql Server database on the Cloud SQL instance.
- "recoveryModel": "A String", # The recovery model of a SQL Server database
"compatibilityLevel": 42, # The version of SQL Server with which the database is to be made compatible
+ "recoveryModel": "A String", # The recovery model of a SQL Server database
},
- "charset": "A String", # The MySQL charset value.
- "kind": "A String", # This is always <code>sql#database</code>.
- "etag": "A String", # This field is deprecated and will be removed from a future version of the
- # API.
+ "charset": "A String", # The Cloud SQL charset value.
+ "kind": "A String", # This is always *sql#database*.
+ "collation": "A String", # The Cloud SQL collation value.
+ "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
+ "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
+ "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
+ "selfLink": "A String", # The URI of this resource.
}
x__xgafv: string, V1 error format.
@@ -657,123 +487,70 @@
Returns:
An object of the form:
- { # An Operation resource.&nbsp;For successful operations that return an
- # Operation resource, only the fields relevant to the operation are populated
- # in the resource.
- "targetLink": "A String",
- "operationType": "A String", # The type of the operation. Valid values are <code>CREATE</code>,
- # <code>DELETE</code>, <code>UPDATE</code>, <code>RESTART</code>,
- # <code>IMPORT</code>, <code>EXPORT</code>, <code>BACKUP_VOLUME</code>,
- # <code>RESTORE_VOLUME</code>, <code>CREATE_USER</code>,
- # <code>DELETE_USER</code>, <code>CREATE_DATABASE</code>,
- # <code>DELETE_DATABASE</code> .
- "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be
- # populated.
+ { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource.
+ "kind": "A String", # This is always *sql#operation*.
+ "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation.
+ "endTime": "A String", # The time this operation finished in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+ "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated.
"errors": [ # The list of errors encountered while processing this operation.
{ # Database instance operation error.
- "kind": "A String", # This is always <code>sql#operationError</code>.
"code": "A String", # Identifies the specific error that occurred.
+ "kind": "A String", # This is always *sql#operationError*.
"message": "A String", # Additional information about the error encountered.
},
],
- "kind": "A String", # This is always <code>sql#operationErrors</code>.
+ "kind": "A String", # This is always *sql#operationErrors*.
},
- "kind": "A String", # This is always <code>sql#operation</code>.
- "importContext": { # Database instance import context. # The context for import operation, if applicable.
- "database": "A String", # The target database for the import. If <code>fileType</code> is
- # <code>SQL</code>, this field is required only if the import file does not
- # specify a database, and is overridden by any database specification in the
- # import file. If <code>fileType</code> is <code>CSV</code>, one database
- # must be specified.
- "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
- "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
- "encryptionOptions": {
- "pvkPassword": "A String", # Password that encrypts the private key
- "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form
- # <code>gs://bucketName/fileName</code>. The instance must have
- # write permissions to the bucket and read access to the file.
- "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the
- # form <code>gs://bucketName/fileName</code>. The instance must have
- # write permissions to the bucket and read access to the file.
- },
- },
- "uri": "A String", # Path to the import file in Cloud Storage, in the form
- # <code>gs:
- # //bucketName/fileName</code>. Compressed gzip files (.gz) are supported
- # // when <code>fileType</code> is <code>SQL</code>. The instance must have
- # // write permissions to the bucket and read access to the file.
- "fileType": "A String", # The file type for the specified uri. <br><code>SQL</code>: The file
- # contains SQL statements. <br><code>CSV</code>: The file contains CSV data.
- "kind": "A String", # This is always <code>sql#importContext</code>.
- "csvImportOptions": { # Options for importing data as CSV.
- "table": "A String", # The table to which CSV data is imported.
- "columns": [ # The columns to which CSV data is imported. If not specified, all columns
- # of the database table are loaded with CSV data.
- "A String",
- ],
- },
- },
- "status": "A String", # The status of an operation. Valid values are <code>PENDING</code>,
- # <code>RUNNING</code>, <code>DONE</code>,
- # <code>SQL_OPERATION_STATUS_UNSPECIFIED</code>.
- "name": "A String", # An identifier that uniquely identifies the operation. You can use this
- # identifier to retrieve the Operations resource that has information about
- # the operation.
+ "status": "A String", # The status of an operation. Valid values are: *PENDING* *RUNNING* *DONE* *SQL_OPERATION_STATUS_UNSPECIFIED*
+ "selfLink": "A String", # The URI of this resource.
+ "startTime": "A String", # The time this operation actually started in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
"exportContext": { # Database instance export context. # The context for export operation, if applicable.
- "fileType": "A String", # The file type for the specified uri. <br><code>SQL</code>: The file
- # contains SQL statements. <br><code>CSV</code>: The file contains CSV data.
- "uri": "A String", # The path to the file in Google Cloud Storage where the export will be
- # stored. The URI is in the form <code>gs:
- # //bucketName/fileName</code>. If the file already exists, the requests
- # // succeeds, but the operation fails. If <code>fileType</code> is
- # // <code>SQL</code> and the filename ends with .gz, the contents are
- # // compressed.
"csvExportOptions": { # Options for exporting data as CSV.
"selectQuery": "A String", # The select query used to extract the data.
},
- "kind": "A String", # This is always <code>sql#exportContext</code>.
- "databases": [ # Databases to be exported. <br /> <b>MySQL instances:</b> If
- # <code>fileType</code> is <code>SQL</code> and no database is specified, all
- # databases are exported, except for the <code>mysql</code> system database.
- # If <code>fileType</code> is <code>CSV</code>, you can specify one database,
- # either by using this property or by using the
- # <code>csvExportOptions.selectQuery</code> property, which takes precedence
- # over this property. <br /> <b>PostgreSQL instances:</b> You must specify
- # one database to be exported. If <code>fileType</code> is <code>CSV</code>,
- # this database must match the one specified in the
- # <code>csvExportOptions.selectQuery</code> property.
+ "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+ "kind": "A String", # This is always *sql#exportContext*.
+ "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form *gs: //bucketName/fileName*. If the file already exists, the requests // succeeds, but the operation fails. If *fileType* is // *SQL* and the filename ends with .gz, the contents are // compressed.
+ "databases": [ # Databases to be exported. *MySQL instances:* If *fileType* is *SQL* and no database is specified, all databases are exported, except for the *mysql* system database. If *fileType* is *CSV*, you can specify one database, either by using this property or by using the *csvExportOptions.selectQuery* property, which takes precedence over this property. *PostgreSQL instances:* You must specify one database to be exported. If *fileType* is *CSV*, this database must match the one specified in the *csvExportOptions.selectQuery* property.
"A String",
],
+ "offload": True or False, # Option for export offload.
"sqlExportOptions": { # Options for exporting data as SQL statements.
- "tables": [ # Tables to export, or that were exported, from the specified database. If
- # you specify tables, specify one and only one database. For PostgreSQL
- # instances, you can specify only one table.
+ "schemaOnly": True or False, # Export only schemas.
+ "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table.
"A String",
],
- "schemaOnly": True or False, # Export only schemas.
"mysqlExportOptions": { # Options for exporting from MySQL.
- "masterData": 42, # Option to include SQL statement required to set up replication.
- # If set to <code>1</code>, the dump file includes
- # a CHANGE MASTER TO statement with the binary log coordinates.
- # If set to <code>2</code>, the CHANGE MASTER TO statement is written as
- # a SQL comment, and has no effect.
- # All other values are ignored.
+ "masterData": 42, # Option to include SQL statement required to set up replication. If set to *1*, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates. If set to *2*, the CHANGE MASTER TO statement is written as a SQL comment, and has no effect. All other values are ignored.
},
},
},
- "targetId": "A String", # Name of the database instance related to this operation.
- "endTime": "A String", # The time this operation finished in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
- "startTime": "A String", # The time this operation actually started in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
- "insertTime": "A String", # The time this operation was enqueued in UTC timezone in <a
- # href="https://tools.ietf.org/html/rfc3339">RFC 3339</a> format, for example
- # <code>2012-11-15T16:19:00.094Z</code>.
"user": "A String", # The email address of the user who initiated this operation.
+ "targetId": "A String", # Name of the database instance related to this operation.
+ "insertTime": "A String", # The time this operation was enqueued in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+ "operationType": "A String", # The type of the operation. Valid values are: *CREATE* *DELETE* *UPDATE* *RESTART* *IMPORT* *EXPORT* *BACKUP_VOLUME* *RESTORE_VOLUME* *CREATE_USER* *DELETE_USER* *CREATE_DATABASE* *DELETE_DATABASE*
"targetProject": "A String", # The project ID of the target instance related to this operation.
- "selfLink": "A String", # The URI of this resource.
+ "targetLink": "A String",
+ "importContext": { # Database instance import context. # The context for import operation, if applicable.
+ "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+ "kind": "A String", # This is always *sql#importContext*.
+ "csvImportOptions": { # Options for importing data as CSV.
+ "columns": [ # The columns to which CSV data is imported. If not specified, all columns of the database table are loaded with CSV data.
+ "A String",
+ ],
+ "table": "A String", # The table to which CSV data is imported.
+ },
+ "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
+ "encryptionOptions": {
+ "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+ "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+ "pvkPassword": "A String", # Password that encrypts the private key
+ },
+ },
+ "database": "A String", # The target database for the import. If *fileType* is *SQL*, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If *fileType* is *CSV*, one database must be specified.
+ "uri": "A String", # Path to the import file in Cloud Storage, in the form *gs: //bucketName/fileName*. Compressed gzip files (.gz) are supported // when *fileType* is *SQL*. The instance must have // write permissions to the bucket and read access to the file.
+ "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
+ },
}</pre>
</div>