chore: Update discovery artifacts (#1531)

## Deleted keys were detected in the following stable discovery artifacts:
storage v1 https://github.com/googleapis/google-api-python-client/commit/6117646c93e672eb34816b6db4d2b84c3c046071

## Discovery Artifact Change Summary:
feat(androidmanagement): update the api https://github.com/googleapis/google-api-python-client/commit/493de7636af575bec7e3d646c77d81a4278891e7
feat(composer): update the api https://github.com/googleapis/google-api-python-client/commit/827a98a27eb06dee06080e01edc1b9d1304bae67
feat(compute): update the api https://github.com/googleapis/google-api-python-client/commit/967d539cb9dcccfe2eea8fd81e05989f1bd92975
feat(contactcenterinsights): update the api https://github.com/googleapis/google-api-python-client/commit/fd55971dcc7913faa7c90614e1b44122da9f3c1d
feat(containeranalysis): update the api https://github.com/googleapis/google-api-python-client/commit/be52e3f77f0900ea3369a3f1145702832ea2167a
feat(content): update the api https://github.com/googleapis/google-api-python-client/commit/c422dda8dc607554e34899c964c36b32c554bb61
feat(dataflow): update the api https://github.com/googleapis/google-api-python-client/commit/9357bc2b4b507ba98fd17988eb93e0c08da00bc3
feat(datastore): update the api https://github.com/googleapis/google-api-python-client/commit/ee1091a834aaf37e6b2a279f901543d43152da74
feat(documentai): update the api https://github.com/googleapis/google-api-python-client/commit/02e062eb95ebadf2f8002c34424a7442d327c765
feat(healthcare): update the api https://github.com/googleapis/google-api-python-client/commit/29bd379b11ee39b49d7452f0e9d7aada1536a22f
feat(notebooks): update the api https://github.com/googleapis/google-api-python-client/commit/438b148616d638783b17bf7fe060cdb57a8bc473
feat(ondemandscanning): update the api https://github.com/googleapis/google-api-python-client/commit/8f732ecf65df8e7aa8ad58258ed5d5a0dfed62ea
feat(osconfig): update the api https://github.com/googleapis/google-api-python-client/commit/655a50711fb06b94a3b33a173611cc39cfb2553f
feat(pubsublite): update the api https://github.com/googleapis/google-api-python-client/commit/fc27fe7319f659032e2c3e9fe7be24224dca9fb6
feat(run): update the api https://github.com/googleapis/google-api-python-client/commit/de851d225affb67ba9819e9d4c81dc14bc95dcd1
feat(sasportal): update the api https://github.com/googleapis/google-api-python-client/commit/9e472d5f1b8f31708fd535a3a8575f0510dad5a7
feat(storage): update the api https://github.com/googleapis/google-api-python-client/commit/6117646c93e672eb34816b6db4d2b84c3c046071
feat(sts): update the api https://github.com/googleapis/google-api-python-client/commit/9e0f476952df90e2fb9b6df287c2ceb2a5417c84
feat(youtube): update the api https://github.com/googleapis/google-api-python-client/commit/2624f80fe82466181d853c35138e04064b1edcef
diff --git a/docs/dyn/sqladmin_v1beta4.databases.html b/docs/dyn/sqladmin_v1beta4.databases.html
index b7d9e3b..8e979b5 100644
--- a/docs/dyn/sqladmin_v1beta4.databases.html
+++ b/docs/dyn/sqladmin_v1beta4.databases.html
@@ -120,48 +120,48 @@
     { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource.
   "backupContext": { # Backup context. # The context for backup operation, if applicable.
     "backupId": "A String", # The identifier of the backup.
-    "kind": "A String", # This is always *sql#backupContext*.
+    "kind": "A String", # This is always **sql#backupContext**.
   },
-  "endTime": "A String", # The time this operation finished in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+  "endTime": "A String", # The time this operation finished in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
   "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated.
     "errors": [ # The list of errors encountered while processing this operation.
       { # Database instance operation error.
         "code": "A String", # Identifies the specific error that occurred.
-        "kind": "A String", # This is always *sql#operationError*.
+        "kind": "A String", # This is always **sql#operationError**.
         "message": "A String", # Additional information about the error encountered.
       },
     ],
-    "kind": "A String", # This is always *sql#operationErrors*.
+    "kind": "A String", # This is always **sql#operationErrors**.
   },
   "exportContext": { # Database instance export context. # The context for export operation, if applicable.
-    "csvExportOptions": { # Options for exporting data as CSV. *MySQL* and *PostgreSQL* instances only.
+    "csvExportOptions": { # Options for exporting data as CSV. **MySQL** and **PostgreSQL** instances only.
       "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped.
       "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file.
       "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values.
       "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted.
       "selectQuery": "A String", # The select query used to extract the data.
     },
-    "databases": [ # Databases to be exported. *MySQL instances:* If *fileType* is *SQL* and no database is specified, all databases are exported, except for the *mysql* system database. If *fileType* is *CSV*, you can specify one database, either by using this property or by using the *csvExportOptions.selectQuery* property, which takes precedence over this property. *PostgreSQL instances:* You must specify one database to be exported. If *fileType* is *CSV*, this database must match the one specified in the *csvExportOptions.selectQuery* property. *SQL Server instances:* You must specify one database to be exported, and the *fileType* must be *BAK*.
+    "databases": [ # Databases to be exported. * **MySQL instances:** If **fileType** is **SQL** and no database is specified, all databases are exported, except for the **mysql** system database. If **fileType** is **CSV**, you can specify one database, either by using this property or by using the **csvExportOptions.selectQuery** property, which takes precedence over this property. * **PostgreSQL instances:** You must specify one database to be exported. If **fileType** is **CSV**, this database must match the one specified in the **csvExportOptions.selectQuery** property. * **SQL Server instances:** You must specify one database to be exported, and the **fileType** must be **BAK**.
       "A String",
     ],
-    "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data. *BAK*: The file contains backup data for a SQL Server instance.
+    "fileType": "A String", # The file type for the specified uri. * **SQL**: The file contains SQL statements. * **CSV**: The file contains CSV data. * **BAK**: The file contains backup data for a SQL Server instance.
     "kind": "A String", # This is always *sql#exportContext*.
     "offload": True or False, # Option for export offload.
     "sqlExportOptions": { # Options for exporting data as SQL statements.
       "mysqlExportOptions": { # Options for exporting from MySQL.
-        "masterData": 42, # Option to include SQL statement required to set up replication. If set to *1*, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. If set to *2*, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. If set to any value other than *1*, --set-gtid-purged is set to OFF.
+        "masterData": 42, # Option to include SQL statement required to set up replication. * If set to **1**, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. * If set to **2**, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. * If set to any value other than **1**, --set-gtid-purged is set to OFF.
       },
       "schemaOnly": True or False, # Export only schemas.
       "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table.
         "A String",
       ],
     },
-    "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form *gs://bucketName/fileName*. If the file already exists, the request succeeds, but the operation fails. If *fileType* is *SQL* and the filename ends with .gz, the contents are compressed.
+    "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form **gs://bucketName/fileName**. If the file already exists, the request succeeds, but the operation fails. If **fileType** is **SQL** and the filename ends with .gz, the contents are compressed.
   },
   "importContext": { # Database instance import context. # The context for import operation, if applicable.
     "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
       "encryptionOptions": {
-        "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+        "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form **gs://bucketName/fileName**. The instance must have write permissions to the bucket and read access to the file.
         "pvkPassword": "A String", # Password that encrypts the private key
         "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
       },
@@ -176,19 +176,19 @@
       "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted.
       "table": "A String", # The table to which CSV data is imported.
     },
-    "database": "A String", # The target database for the import. If *fileType* is *SQL*, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If *fileType* is *CSV*, one database must be specified.
-    "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+    "database": "A String", # The target database for the import. If **fileType** is **SQL**, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If **fileType** is **CSV**, one database must be specified.
+    "fileType": "A String", # The file type for the specified uri. * **SQL**: The file contains SQL statements. * **CSV**: The file contains CSV data. * **BAK**: The file contains backup data for a SQL Server instance.
     "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
-    "kind": "A String", # This is always *sql#importContext*.
-    "uri": "A String", # Path to the import file in Cloud Storage, in the form *gs://bucketName/fileName*. Compressed gzip files (.gz) are supported when *fileType* is *SQL*. The instance must have write permissions to the bucket and read access to the file.
+    "kind": "A String", # This is always **sql#importContext**.
+    "uri": "A String", # Path to the import file in Cloud Storage, in the form **gs://bucketName/fileName**. Compressed gzip files (.gz) are supported when **fileType** is **SQL**. The instance must have write permissions to the bucket and read access to the file.
   },
-  "insertTime": "A String", # The time this operation was enqueued in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
-  "kind": "A String", # This is always *sql#operation*.
+  "insertTime": "A String", # The time this operation was enqueued in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
+  "kind": "A String", # This is always **sql#operation**.
   "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation.
-  "operationType": "A String", # The type of the operation. Valid values are: *CREATE* *DELETE* *UPDATE* *RESTART* *IMPORT* *EXPORT* *BACKUP_VOLUME* *RESTORE_VOLUME* *CREATE_USER* *DELETE_USER* *CREATE_DATABASE* *DELETE_DATABASE*
+  "operationType": "A String", # The type of the operation. Valid values are: * **CREATE** * **DELETE** * **UPDATE** * **RESTART** * **IMPORT** * **EXPORT** * **BACKUP_VOLUME** * **RESTORE_VOLUME** * **CREATE_USER** * **DELETE_USER** * **CREATE_DATABASE** * **DELETE_DATABASE**
   "selfLink": "A String", # The URI of this resource.
-  "startTime": "A String", # The time this operation actually started in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
-  "status": "A String", # The status of an operation. Valid values are: *PENDING* *RUNNING* *DONE* *SQL_OPERATION_STATUS_UNSPECIFIED*
+  "startTime": "A String", # The time this operation actually started in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
+  "status": "A String", # The status of an operation. Valid values are: * **PENDING** * **RUNNING** * **DONE** * **SQL_OPERATION_STATUS_UNSPECIFIED**
   "targetId": "A String", # Name of the database instance related to this operation.
   "targetLink": "A String",
   "targetProject": "A String", # The project ID of the target instance related to this operation.
@@ -217,7 +217,7 @@
   "collation": "A String", # The Cloud SQL collation value.
   "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
   "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
-  "kind": "A String", # This is always *sql#database*.
+  "kind": "A String", # This is always **sql#database**.
   "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
   "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
   "selfLink": "A String", # The URI of this resource.
@@ -243,7 +243,7 @@
   "collation": "A String", # The Cloud SQL collation value.
   "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
   "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
-  "kind": "A String", # This is always *sql#database*.
+  "kind": "A String", # This is always **sql#database**.
   "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
   "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
   "selfLink": "A String", # The URI of this resource.
@@ -264,48 +264,48 @@
     { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource.
   "backupContext": { # Backup context. # The context for backup operation, if applicable.
     "backupId": "A String", # The identifier of the backup.
-    "kind": "A String", # This is always *sql#backupContext*.
+    "kind": "A String", # This is always **sql#backupContext**.
   },
-  "endTime": "A String", # The time this operation finished in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+  "endTime": "A String", # The time this operation finished in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
   "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated.
     "errors": [ # The list of errors encountered while processing this operation.
       { # Database instance operation error.
         "code": "A String", # Identifies the specific error that occurred.
-        "kind": "A String", # This is always *sql#operationError*.
+        "kind": "A String", # This is always **sql#operationError**.
         "message": "A String", # Additional information about the error encountered.
       },
     ],
-    "kind": "A String", # This is always *sql#operationErrors*.
+    "kind": "A String", # This is always **sql#operationErrors**.
   },
   "exportContext": { # Database instance export context. # The context for export operation, if applicable.
-    "csvExportOptions": { # Options for exporting data as CSV. *MySQL* and *PostgreSQL* instances only.
+    "csvExportOptions": { # Options for exporting data as CSV. **MySQL** and **PostgreSQL** instances only.
       "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped.
       "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file.
       "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values.
       "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted.
       "selectQuery": "A String", # The select query used to extract the data.
     },
-    "databases": [ # Databases to be exported. *MySQL instances:* If *fileType* is *SQL* and no database is specified, all databases are exported, except for the *mysql* system database. If *fileType* is *CSV*, you can specify one database, either by using this property or by using the *csvExportOptions.selectQuery* property, which takes precedence over this property. *PostgreSQL instances:* You must specify one database to be exported. If *fileType* is *CSV*, this database must match the one specified in the *csvExportOptions.selectQuery* property. *SQL Server instances:* You must specify one database to be exported, and the *fileType* must be *BAK*.
+    "databases": [ # Databases to be exported. * **MySQL instances:** If **fileType** is **SQL** and no database is specified, all databases are exported, except for the **mysql** system database. If **fileType** is **CSV**, you can specify one database, either by using this property or by using the **csvExportOptions.selectQuery** property, which takes precedence over this property. * **PostgreSQL instances:** You must specify one database to be exported. If **fileType** is **CSV**, this database must match the one specified in the **csvExportOptions.selectQuery** property. * **SQL Server instances:** You must specify one database to be exported, and the **fileType** must be **BAK**.
       "A String",
     ],
-    "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data. *BAK*: The file contains backup data for a SQL Server instance.
+    "fileType": "A String", # The file type for the specified uri. * **SQL**: The file contains SQL statements. * **CSV**: The file contains CSV data. * **BAK**: The file contains backup data for a SQL Server instance.
     "kind": "A String", # This is always *sql#exportContext*.
     "offload": True or False, # Option for export offload.
     "sqlExportOptions": { # Options for exporting data as SQL statements.
       "mysqlExportOptions": { # Options for exporting from MySQL.
-        "masterData": 42, # Option to include SQL statement required to set up replication. If set to *1*, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. If set to *2*, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. If set to any value other than *1*, --set-gtid-purged is set to OFF.
+        "masterData": 42, # Option to include SQL statement required to set up replication. * If set to **1**, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. * If set to **2**, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. * If set to any value other than **1**, --set-gtid-purged is set to OFF.
       },
       "schemaOnly": True or False, # Export only schemas.
       "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table.
         "A String",
       ],
     },
-    "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form *gs://bucketName/fileName*. If the file already exists, the request succeeds, but the operation fails. If *fileType* is *SQL* and the filename ends with .gz, the contents are compressed.
+    "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form **gs://bucketName/fileName**. If the file already exists, the request succeeds, but the operation fails. If **fileType** is **SQL** and the filename ends with .gz, the contents are compressed.
   },
   "importContext": { # Database instance import context. # The context for import operation, if applicable.
     "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
       "encryptionOptions": {
-        "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+        "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form **gs://bucketName/fileName**. The instance must have write permissions to the bucket and read access to the file.
         "pvkPassword": "A String", # Password that encrypts the private key
         "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
       },
@@ -320,19 +320,19 @@
       "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted.
       "table": "A String", # The table to which CSV data is imported.
     },
-    "database": "A String", # The target database for the import. If *fileType* is *SQL*, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If *fileType* is *CSV*, one database must be specified.
-    "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+    "database": "A String", # The target database for the import. If **fileType** is **SQL**, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If **fileType** is **CSV**, one database must be specified.
+    "fileType": "A String", # The file type for the specified uri. * **SQL**: The file contains SQL statements. * **CSV**: The file contains CSV data. * **BAK**: The file contains backup data for a SQL Server instance.
     "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
-    "kind": "A String", # This is always *sql#importContext*.
-    "uri": "A String", # Path to the import file in Cloud Storage, in the form *gs://bucketName/fileName*. Compressed gzip files (.gz) are supported when *fileType* is *SQL*. The instance must have write permissions to the bucket and read access to the file.
+    "kind": "A String", # This is always **sql#importContext**.
+    "uri": "A String", # Path to the import file in Cloud Storage, in the form **gs://bucketName/fileName**. Compressed gzip files (.gz) are supported when **fileType** is **SQL**. The instance must have write permissions to the bucket and read access to the file.
   },
-  "insertTime": "A String", # The time this operation was enqueued in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
-  "kind": "A String", # This is always *sql#operation*.
+  "insertTime": "A String", # The time this operation was enqueued in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
+  "kind": "A String", # This is always **sql#operation**.
   "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation.
-  "operationType": "A String", # The type of the operation. Valid values are: *CREATE* *DELETE* *UPDATE* *RESTART* *IMPORT* *EXPORT* *BACKUP_VOLUME* *RESTORE_VOLUME* *CREATE_USER* *DELETE_USER* *CREATE_DATABASE* *DELETE_DATABASE*
+  "operationType": "A String", # The type of the operation. Valid values are: * **CREATE** * **DELETE** * **UPDATE** * **RESTART** * **IMPORT** * **EXPORT** * **BACKUP_VOLUME** * **RESTORE_VOLUME** * **CREATE_USER** * **DELETE_USER** * **CREATE_DATABASE** * **DELETE_DATABASE**
   "selfLink": "A String", # The URI of this resource.
-  "startTime": "A String", # The time this operation actually started in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
-  "status": "A String", # The status of an operation. Valid values are: *PENDING* *RUNNING* *DONE* *SQL_OPERATION_STATUS_UNSPECIFIED*
+  "startTime": "A String", # The time this operation actually started in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
+  "status": "A String", # The status of an operation. Valid values are: * **PENDING** * **RUNNING** * **DONE** * **SQL_OPERATION_STATUS_UNSPECIFIED**
   "targetId": "A String", # Name of the database instance related to this operation.
   "targetLink": "A String",
   "targetProject": "A String", # The project ID of the target instance related to this operation.
@@ -362,7 +362,7 @@
       "collation": "A String", # The Cloud SQL collation value.
       "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
       "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
-      "kind": "A String", # This is always *sql#database*.
+      "kind": "A String", # This is always **sql#database**.
       "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
       "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
       "selfLink": "A String", # The URI of this resource.
@@ -392,7 +392,7 @@
   "collation": "A String", # The Cloud SQL collation value.
   "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
   "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
-  "kind": "A String", # This is always *sql#database*.
+  "kind": "A String", # This is always **sql#database**.
   "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
   "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
   "selfLink": "A String", # The URI of this resource.
@@ -413,48 +413,48 @@
     { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource.
   "backupContext": { # Backup context. # The context for backup operation, if applicable.
     "backupId": "A String", # The identifier of the backup.
-    "kind": "A String", # This is always *sql#backupContext*.
+    "kind": "A String", # This is always **sql#backupContext**.
   },
-  "endTime": "A String", # The time this operation finished in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+  "endTime": "A String", # The time this operation finished in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
   "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated.
     "errors": [ # The list of errors encountered while processing this operation.
       { # Database instance operation error.
         "code": "A String", # Identifies the specific error that occurred.
-        "kind": "A String", # This is always *sql#operationError*.
+        "kind": "A String", # This is always **sql#operationError**.
         "message": "A String", # Additional information about the error encountered.
       },
     ],
-    "kind": "A String", # This is always *sql#operationErrors*.
+    "kind": "A String", # This is always **sql#operationErrors**.
   },
   "exportContext": { # Database instance export context. # The context for export operation, if applicable.
-    "csvExportOptions": { # Options for exporting data as CSV. *MySQL* and *PostgreSQL* instances only.
+    "csvExportOptions": { # Options for exporting data as CSV. **MySQL** and **PostgreSQL** instances only.
       "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped.
       "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file.
       "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values.
       "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted.
       "selectQuery": "A String", # The select query used to extract the data.
     },
-    "databases": [ # Databases to be exported. *MySQL instances:* If *fileType* is *SQL* and no database is specified, all databases are exported, except for the *mysql* system database. If *fileType* is *CSV*, you can specify one database, either by using this property or by using the *csvExportOptions.selectQuery* property, which takes precedence over this property. *PostgreSQL instances:* You must specify one database to be exported. If *fileType* is *CSV*, this database must match the one specified in the *csvExportOptions.selectQuery* property. *SQL Server instances:* You must specify one database to be exported, and the *fileType* must be *BAK*.
+    "databases": [ # Databases to be exported. * **MySQL instances:** If **fileType** is **SQL** and no database is specified, all databases are exported, except for the **mysql** system database. If **fileType** is **CSV**, you can specify one database, either by using this property or by using the **csvExportOptions.selectQuery** property, which takes precedence over this property. * **PostgreSQL instances:** You must specify one database to be exported. If **fileType** is **CSV**, this database must match the one specified in the **csvExportOptions.selectQuery** property. * **SQL Server instances:** You must specify one database to be exported, and the **fileType** must be **BAK**.
       "A String",
     ],
-    "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data. *BAK*: The file contains backup data for a SQL Server instance.
+    "fileType": "A String", # The file type for the specified uri. * **SQL**: The file contains SQL statements. * **CSV**: The file contains CSV data. * **BAK**: The file contains backup data for a SQL Server instance.
     "kind": "A String", # This is always *sql#exportContext*.
     "offload": True or False, # Option for export offload.
     "sqlExportOptions": { # Options for exporting data as SQL statements.
       "mysqlExportOptions": { # Options for exporting from MySQL.
-        "masterData": 42, # Option to include SQL statement required to set up replication. If set to *1*, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. If set to *2*, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. If set to any value other than *1*, --set-gtid-purged is set to OFF.
+        "masterData": 42, # Option to include SQL statement required to set up replication. * If set to **1**, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. * If set to **2**, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. * If set to any value other than **1**, --set-gtid-purged is set to OFF.
       },
       "schemaOnly": True or False, # Export only schemas.
       "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table.
         "A String",
       ],
     },
-    "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form *gs://bucketName/fileName*. If the file already exists, the request succeeds, but the operation fails. If *fileType* is *SQL* and the filename ends with .gz, the contents are compressed.
+    "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form **gs://bucketName/fileName**. If the file already exists, the request succeeds, but the operation fails. If **fileType** is **SQL** and the filename ends with .gz, the contents are compressed.
   },
   "importContext": { # Database instance import context. # The context for import operation, if applicable.
     "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
       "encryptionOptions": {
-        "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+        "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form **gs://bucketName/fileName**. The instance must have write permissions to the bucket and read access to the file.
         "pvkPassword": "A String", # Password that encrypts the private key
         "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
       },
@@ -469,19 +469,19 @@
       "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted.
       "table": "A String", # The table to which CSV data is imported.
     },
-    "database": "A String", # The target database for the import. If *fileType* is *SQL*, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If *fileType* is *CSV*, one database must be specified.
-    "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+    "database": "A String", # The target database for the import. If **fileType** is **SQL**, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If **fileType** is **CSV**, one database must be specified.
+    "fileType": "A String", # The file type for the specified uri. * **SQL**: The file contains SQL statements. * **CSV**: The file contains CSV data. * **BAK**: The file contains backup data for a SQL Server instance.
     "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
-    "kind": "A String", # This is always *sql#importContext*.
-    "uri": "A String", # Path to the import file in Cloud Storage, in the form *gs://bucketName/fileName*. Compressed gzip files (.gz) are supported when *fileType* is *SQL*. The instance must have write permissions to the bucket and read access to the file.
+    "kind": "A String", # This is always **sql#importContext**.
+    "uri": "A String", # Path to the import file in Cloud Storage, in the form **gs://bucketName/fileName**. Compressed gzip files (.gz) are supported when **fileType** is **SQL**. The instance must have write permissions to the bucket and read access to the file.
   },
-  "insertTime": "A String", # The time this operation was enqueued in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
-  "kind": "A String", # This is always *sql#operation*.
+  "insertTime": "A String", # The time this operation was enqueued in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
+  "kind": "A String", # This is always **sql#operation**.
   "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation.
-  "operationType": "A String", # The type of the operation. Valid values are: *CREATE* *DELETE* *UPDATE* *RESTART* *IMPORT* *EXPORT* *BACKUP_VOLUME* *RESTORE_VOLUME* *CREATE_USER* *DELETE_USER* *CREATE_DATABASE* *DELETE_DATABASE*
+  "operationType": "A String", # The type of the operation. Valid values are: * **CREATE** * **DELETE** * **UPDATE** * **RESTART** * **IMPORT** * **EXPORT** * **BACKUP_VOLUME** * **RESTORE_VOLUME** * **CREATE_USER** * **DELETE_USER** * **CREATE_DATABASE** * **DELETE_DATABASE**
   "selfLink": "A String", # The URI of this resource.
-  "startTime": "A String", # The time this operation actually started in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
-  "status": "A String", # The status of an operation. Valid values are: *PENDING* *RUNNING* *DONE* *SQL_OPERATION_STATUS_UNSPECIFIED*
+  "startTime": "A String", # The time this operation actually started in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
+  "status": "A String", # The status of an operation. Valid values are: * **PENDING** * **RUNNING** * **DONE** * **SQL_OPERATION_STATUS_UNSPECIFIED**
   "targetId": "A String", # Name of the database instance related to this operation.
   "targetLink": "A String",
   "targetProject": "A String", # The project ID of the target instance related to this operation.
@@ -505,7 +505,7 @@
   "collation": "A String", # The Cloud SQL collation value.
   "etag": "A String", # This field is deprecated and will be removed from a future version of the API.
   "instance": "A String", # The name of the Cloud SQL instance. This does not include the project ID.
-  "kind": "A String", # This is always *sql#database*.
+  "kind": "A String", # This is always **sql#database**.
   "name": "A String", # The name of the database in the Cloud SQL instance. This does not include the project ID or instance name.
   "project": "A String", # The project ID of the project containing the Cloud SQL database. The Google apps domain is prefixed if applicable.
   "selfLink": "A String", # The URI of this resource.
@@ -526,48 +526,48 @@
     { # An Operation resource. For successful operations that return an Operation resource, only the fields relevant to the operation are populated in the resource.
   "backupContext": { # Backup context. # The context for backup operation, if applicable.
     "backupId": "A String", # The identifier of the backup.
-    "kind": "A String", # This is always *sql#backupContext*.
+    "kind": "A String", # This is always **sql#backupContext**.
   },
-  "endTime": "A String", # The time this operation finished in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
+  "endTime": "A String", # The time this operation finished in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
   "error": { # Database instance operation errors list wrapper. # If errors occurred during processing of this operation, this field will be populated.
     "errors": [ # The list of errors encountered while processing this operation.
       { # Database instance operation error.
         "code": "A String", # Identifies the specific error that occurred.
-        "kind": "A String", # This is always *sql#operationError*.
+        "kind": "A String", # This is always **sql#operationError**.
         "message": "A String", # Additional information about the error encountered.
       },
     ],
-    "kind": "A String", # This is always *sql#operationErrors*.
+    "kind": "A String", # This is always **sql#operationErrors**.
   },
   "exportContext": { # Database instance export context. # The context for export operation, if applicable.
-    "csvExportOptions": { # Options for exporting data as CSV. *MySQL* and *PostgreSQL* instances only.
+    "csvExportOptions": { # Options for exporting data as CSV. **MySQL** and **PostgreSQL** instances only.
       "escapeCharacter": "A String", # Specifies the character that should appear before a data character that needs to be escaped.
       "fieldsTerminatedBy": "A String", # Specifies the character that separates columns within each row (line) of the file.
       "linesTerminatedBy": "A String", # This is used to separate lines. If a line does not contain all fields, the rest of the columns are set to their default values.
       "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted.
       "selectQuery": "A String", # The select query used to extract the data.
     },
-    "databases": [ # Databases to be exported. *MySQL instances:* If *fileType* is *SQL* and no database is specified, all databases are exported, except for the *mysql* system database. If *fileType* is *CSV*, you can specify one database, either by using this property or by using the *csvExportOptions.selectQuery* property, which takes precedence over this property. *PostgreSQL instances:* You must specify one database to be exported. If *fileType* is *CSV*, this database must match the one specified in the *csvExportOptions.selectQuery* property. *SQL Server instances:* You must specify one database to be exported, and the *fileType* must be *BAK*.
+    "databases": [ # Databases to be exported. * **MySQL instances:** If **fileType** is **SQL** and no database is specified, all databases are exported, except for the **mysql** system database. If **fileType** is **CSV**, you can specify one database, either by using this property or by using the **csvExportOptions.selectQuery** property, which takes precedence over this property. * **PostgreSQL instances:** You must specify one database to be exported. If **fileType** is **CSV**, this database must match the one specified in the **csvExportOptions.selectQuery** property. * **SQL Server instances:** You must specify one database to be exported, and the **fileType** must be **BAK**.
       "A String",
     ],
-    "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data. *BAK*: The file contains backup data for a SQL Server instance.
+    "fileType": "A String", # The file type for the specified uri. * **SQL**: The file contains SQL statements. * **CSV**: The file contains CSV data. * **BAK**: The file contains backup data for a SQL Server instance.
     "kind": "A String", # This is always *sql#exportContext*.
     "offload": True or False, # Option for export offload.
     "sqlExportOptions": { # Options for exporting data as SQL statements.
       "mysqlExportOptions": { # Options for exporting from MySQL.
-        "masterData": 42, # Option to include SQL statement required to set up replication. If set to *1*, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. If set to *2*, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. If set to any value other than *1*, --set-gtid-purged is set to OFF.
+        "masterData": 42, # Option to include SQL statement required to set up replication. * If set to **1**, the dump file includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is set to ON. * If set to **2**, the CHANGE MASTER TO statement is written as a SQL comment and has no effect. * If set to any value other than **1**, --set-gtid-purged is set to OFF.
       },
       "schemaOnly": True or False, # Export only schemas.
       "tables": [ # Tables to export, or that were exported, from the specified database. If you specify tables, specify one and only one database. For PostgreSQL instances, you can specify only one table.
         "A String",
       ],
     },
-    "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form *gs://bucketName/fileName*. If the file already exists, the request succeeds, but the operation fails. If *fileType* is *SQL* and the filename ends with .gz, the contents are compressed.
+    "uri": "A String", # The path to the file in Google Cloud Storage where the export will be stored. The URI is in the form **gs://bucketName/fileName**. If the file already exists, the request succeeds, but the operation fails. If **fileType** is **SQL** and the filename ends with .gz, the contents are compressed.
   },
   "importContext": { # Database instance import context. # The context for import operation, if applicable.
     "bakImportOptions": { # Import parameters specific to SQL Server .BAK files
       "encryptionOptions": {
-        "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
+        "certPath": "A String", # Path to the Certificate (.cer) in Cloud Storage, in the form **gs://bucketName/fileName**. The instance must have write permissions to the bucket and read access to the file.
         "pvkPassword": "A String", # Password that encrypts the private key
         "pvkPath": "A String", # Path to the Certificate Private Key (.pvk) in Cloud Storage, in the form *gs://bucketName/fileName*. The instance must have write permissions to the bucket and read access to the file.
       },
@@ -582,19 +582,19 @@
       "quoteCharacter": "A String", # Specifies the quoting character to be used when a data value is quoted.
       "table": "A String", # The table to which CSV data is imported.
     },
-    "database": "A String", # The target database for the import. If *fileType* is *SQL*, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If *fileType* is *CSV*, one database must be specified.
-    "fileType": "A String", # The file type for the specified uri. *SQL*: The file contains SQL statements. *CSV*: The file contains CSV data.
+    "database": "A String", # The target database for the import. If **fileType** is **SQL**, this field is required only if the import file does not specify a database, and is overridden by any database specification in the import file. If **fileType** is **CSV**, one database must be specified.
+    "fileType": "A String", # The file type for the specified uri. * **SQL**: The file contains SQL statements. * **CSV**: The file contains CSV data. * **BAK**: The file contains backup data for a SQL Server instance.
     "importUser": "A String", # The PostgreSQL user for this import operation. PostgreSQL instances only.
-    "kind": "A String", # This is always *sql#importContext*.
-    "uri": "A String", # Path to the import file in Cloud Storage, in the form *gs://bucketName/fileName*. Compressed gzip files (.gz) are supported when *fileType* is *SQL*. The instance must have write permissions to the bucket and read access to the file.
+    "kind": "A String", # This is always **sql#importContext**.
+    "uri": "A String", # Path to the import file in Cloud Storage, in the form **gs://bucketName/fileName**. Compressed gzip files (.gz) are supported when **fileType** is **SQL**. The instance must have write permissions to the bucket and read access to the file.
   },
-  "insertTime": "A String", # The time this operation was enqueued in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
-  "kind": "A String", # This is always *sql#operation*.
+  "insertTime": "A String", # The time this operation was enqueued in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
+  "kind": "A String", # This is always **sql#operation**.
   "name": "A String", # An identifier that uniquely identifies the operation. You can use this identifier to retrieve the Operations resource that has information about the operation.
-  "operationType": "A String", # The type of the operation. Valid values are: *CREATE* *DELETE* *UPDATE* *RESTART* *IMPORT* *EXPORT* *BACKUP_VOLUME* *RESTORE_VOLUME* *CREATE_USER* *DELETE_USER* *CREATE_DATABASE* *DELETE_DATABASE*
+  "operationType": "A String", # The type of the operation. Valid values are: * **CREATE** * **DELETE** * **UPDATE** * **RESTART** * **IMPORT** * **EXPORT** * **BACKUP_VOLUME** * **RESTORE_VOLUME** * **CREATE_USER** * **DELETE_USER** * **CREATE_DATABASE** * **DELETE_DATABASE**
   "selfLink": "A String", # The URI of this resource.
-  "startTime": "A String", # The time this operation actually started in UTC timezone in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
-  "status": "A String", # The status of an operation. Valid values are: *PENDING* *RUNNING* *DONE* *SQL_OPERATION_STATUS_UNSPECIFIED*
+  "startTime": "A String", # The time this operation actually started in UTC timezone in [RFC 3339](https://tools.ietf.org/html/rfc3339) format, for example **2012-11-15T16:19:00.094Z**.
+  "status": "A String", # The status of an operation. Valid values are: * **PENDING** * **RUNNING** * **DONE** * **SQL_OPERATION_STATUS_UNSPECIFIED**
   "targetId": "A String", # Name of the database instance related to this operation.
   "targetLink": "A String",
   "targetProject": "A String", # The project ID of the target instance related to this operation.