Regen docs (#373)

diff --git a/docs/dyn/dataproc_v1beta1.projects.jobs.html b/docs/dyn/dataproc_v1beta1.projects.jobs.html
index fc05bfe..c9b5277 100644
--- a/docs/dyn/dataproc_v1beta1.projects.jobs.html
+++ b/docs/dyn/dataproc_v1beta1.projects.jobs.html
@@ -121,6 +121,7 @@
     "status": { # Cloud Dataproc job status. # Output-only The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
       "state": "A String", # Required A state message specifying the overall job state.
       "stateStartTime": "A String", # Output-only The time when this state was entered.
+      "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
       "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
     },
     "hadoopJob": { # A Cloud Dataproc job for running Hadoop MapReduce jobs on YARN. # Job is a Hadoop job.
@@ -232,10 +233,10 @@
     "driverOutputResourceUri": "A String", # Output-only A URI pointing to the location of the stdout of the job's driver program.
     "pysparkJob": { # A Cloud Dataproc job for running PySpark applications on YARN. # Job is a Pyspark job.
       "mainPythonFileUri": "A String", # Required The Hadoop Compatible Filesystem (HCFS) URI of the main Python file to use as the driver. Must be a .py file.
-      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
+      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
         "A String",
       ],
-      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
         "A String",
       ],
       "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -259,10 +260,10 @@
     "driverControlFilesUri": "A String", # Output-only If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
     "driverInputResourceUri": "A String", # Output-only A URI pointing to the location of the stdin of the job's driver program, only set if the job is interactive.
     "sparkJob": { # A Cloud Dataproc job for running Spark applications on YARN. # Job is a Spark job.
-      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
+      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
         "A String",
       ],
-      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
         "A String",
       ],
       "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -290,6 +291,7 @@
       { # Cloud Dataproc job status.
         "state": "A String", # Required A state message specifying the overall job state.
         "stateStartTime": "A String", # Output-only The time when this state was entered.
+        "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
         "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
       },
     ],
@@ -366,6 +368,7 @@
     "status": { # Cloud Dataproc job status. # Output-only The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
       "state": "A String", # Required A state message specifying the overall job state.
       "stateStartTime": "A String", # Output-only The time when this state was entered.
+      "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
       "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
     },
     "hadoopJob": { # A Cloud Dataproc job for running Hadoop MapReduce jobs on YARN. # Job is a Hadoop job.
@@ -477,10 +480,10 @@
     "driverOutputResourceUri": "A String", # Output-only A URI pointing to the location of the stdout of the job's driver program.
     "pysparkJob": { # A Cloud Dataproc job for running PySpark applications on YARN. # Job is a Pyspark job.
       "mainPythonFileUri": "A String", # Required The Hadoop Compatible Filesystem (HCFS) URI of the main Python file to use as the driver. Must be a .py file.
-      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
+      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
         "A String",
       ],
-      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
         "A String",
       ],
       "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -504,10 +507,10 @@
     "driverControlFilesUri": "A String", # Output-only If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
     "driverInputResourceUri": "A String", # Output-only A URI pointing to the location of the stdin of the job's driver program, only set if the job is interactive.
     "sparkJob": { # A Cloud Dataproc job for running Spark applications on YARN. # Job is a Spark job.
-      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
+      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
         "A String",
       ],
-      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
         "A String",
       ],
       "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -535,6 +538,7 @@
       { # Cloud Dataproc job status.
         "state": "A String", # Required A state message specifying the overall job state.
         "stateStartTime": "A String", # Output-only The time when this state was entered.
+        "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
         "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
       },
     ],
@@ -595,6 +599,7 @@
         "status": { # Cloud Dataproc job status. # Output-only The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
           "state": "A String", # Required A state message specifying the overall job state.
           "stateStartTime": "A String", # Output-only The time when this state was entered.
+          "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
           "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
         },
         "hadoopJob": { # A Cloud Dataproc job for running Hadoop MapReduce jobs on YARN. # Job is a Hadoop job.
@@ -706,10 +711,10 @@
         "driverOutputResourceUri": "A String", # Output-only A URI pointing to the location of the stdout of the job's driver program.
         "pysparkJob": { # A Cloud Dataproc job for running PySpark applications on YARN. # Job is a Pyspark job.
           "mainPythonFileUri": "A String", # Required The Hadoop Compatible Filesystem (HCFS) URI of the main Python file to use as the driver. Must be a .py file.
-          "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
+          "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
             "A String",
           ],
-          "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+          "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
             "A String",
           ],
           "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -733,10 +738,10 @@
         "driverControlFilesUri": "A String", # Output-only If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
         "driverInputResourceUri": "A String", # Output-only A URI pointing to the location of the stdin of the job's driver program, only set if the job is interactive.
         "sparkJob": { # A Cloud Dataproc job for running Spark applications on YARN. # Job is a Spark job.
-          "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
+          "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
             "A String",
           ],
-          "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+          "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
             "A String",
           ],
           "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -764,6 +769,7 @@
           { # Cloud Dataproc job status.
             "state": "A String", # Required A state message specifying the overall job state.
             "stateStartTime": "A String", # Output-only The time when this state was entered.
+            "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
             "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
           },
         ],
@@ -828,6 +834,7 @@
   "status": { # Cloud Dataproc job status. # Output-only The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
     "state": "A String", # Required A state message specifying the overall job state.
     "stateStartTime": "A String", # Output-only The time when this state was entered.
+    "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
     "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
   },
   "hadoopJob": { # A Cloud Dataproc job for running Hadoop MapReduce jobs on YARN. # Job is a Hadoop job.
@@ -939,10 +946,10 @@
   "driverOutputResourceUri": "A String", # Output-only A URI pointing to the location of the stdout of the job's driver program.
   "pysparkJob": { # A Cloud Dataproc job for running PySpark applications on YARN. # Job is a Pyspark job.
     "mainPythonFileUri": "A String", # Required The Hadoop Compatible Filesystem (HCFS) URI of the main Python file to use as the driver. Must be a .py file.
-    "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
+    "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
       "A String",
     ],
-    "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+    "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
       "A String",
     ],
     "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -966,10 +973,10 @@
   "driverControlFilesUri": "A String", # Output-only If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
   "driverInputResourceUri": "A String", # Output-only A URI pointing to the location of the stdin of the job's driver program, only set if the job is interactive.
   "sparkJob": { # A Cloud Dataproc job for running Spark applications on YARN. # Job is a Spark job.
-    "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
+    "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
       "A String",
     ],
-    "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+    "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
       "A String",
     ],
     "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -997,6 +1004,7 @@
     { # Cloud Dataproc job status.
       "state": "A String", # Required A state message specifying the overall job state.
       "stateStartTime": "A String", # Output-only The time when this state was entered.
+      "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
       "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
     },
   ],
@@ -1043,6 +1051,7 @@
     "status": { # Cloud Dataproc job status. # Output-only The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
       "state": "A String", # Required A state message specifying the overall job state.
       "stateStartTime": "A String", # Output-only The time when this state was entered.
+      "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
       "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
     },
     "hadoopJob": { # A Cloud Dataproc job for running Hadoop MapReduce jobs on YARN. # Job is a Hadoop job.
@@ -1154,10 +1163,10 @@
     "driverOutputResourceUri": "A String", # Output-only A URI pointing to the location of the stdout of the job's driver program.
     "pysparkJob": { # A Cloud Dataproc job for running PySpark applications on YARN. # Job is a Pyspark job.
       "mainPythonFileUri": "A String", # Required The Hadoop Compatible Filesystem (HCFS) URI of the main Python file to use as the driver. Must be a .py file.
-      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
+      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
         "A String",
       ],
-      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
         "A String",
       ],
       "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -1181,10 +1190,10 @@
     "driverControlFilesUri": "A String", # Output-only If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
     "driverInputResourceUri": "A String", # Output-only A URI pointing to the location of the stdin of the job's driver program, only set if the job is interactive.
     "sparkJob": { # A Cloud Dataproc job for running Spark applications on YARN. # Job is a Spark job.
-      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
+      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
         "A String",
       ],
-      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
         "A String",
       ],
       "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -1212,6 +1221,7 @@
       { # Cloud Dataproc job status.
         "state": "A String", # Required A state message specifying the overall job state.
         "stateStartTime": "A String", # Output-only The time when this state was entered.
+        "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
         "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
       },
     ],
@@ -1260,6 +1270,7 @@
       "status": { # Cloud Dataproc job status. # Output-only The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
         "state": "A String", # Required A state message specifying the overall job state.
         "stateStartTime": "A String", # Output-only The time when this state was entered.
+        "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
         "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
       },
       "hadoopJob": { # A Cloud Dataproc job for running Hadoop MapReduce jobs on YARN. # Job is a Hadoop job.
@@ -1371,10 +1382,10 @@
       "driverOutputResourceUri": "A String", # Output-only A URI pointing to the location of the stdout of the job's driver program.
       "pysparkJob": { # A Cloud Dataproc job for running PySpark applications on YARN. # Job is a Pyspark job.
         "mainPythonFileUri": "A String", # Required The Hadoop Compatible Filesystem (HCFS) URI of the main Python file to use as the driver. Must be a .py file.
-        "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
+        "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
           "A String",
         ],
-        "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+        "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
           "A String",
         ],
         "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -1398,10 +1409,10 @@
       "driverControlFilesUri": "A String", # Output-only If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
       "driverInputResourceUri": "A String", # Output-only A URI pointing to the location of the stdin of the job's driver program, only set if the job is interactive.
       "sparkJob": { # A Cloud Dataproc job for running Spark applications on YARN. # Job is a Spark job.
-        "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
+        "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
           "A String",
         ],
-        "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+        "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
           "A String",
         ],
         "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -1429,6 +1440,7 @@
         { # Cloud Dataproc job status.
           "state": "A String", # Required A state message specifying the overall job state.
           "stateStartTime": "A String", # Output-only The time when this state was entered.
+          "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
           "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
         },
       ],
@@ -1475,6 +1487,7 @@
     "status": { # Cloud Dataproc job status. # Output-only The job status. Additional application-specific status information may be contained in the <code>type_job</code> and <code>yarn_applications</code> fields.
       "state": "A String", # Required A state message specifying the overall job state.
       "stateStartTime": "A String", # Output-only The time when this state was entered.
+      "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
       "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
     },
     "hadoopJob": { # A Cloud Dataproc job for running Hadoop MapReduce jobs on YARN. # Job is a Hadoop job.
@@ -1586,10 +1599,10 @@
     "driverOutputResourceUri": "A String", # Output-only A URI pointing to the location of the stdout of the job's driver program.
     "pysparkJob": { # A Cloud Dataproc job for running PySpark applications on YARN. # Job is a Pyspark job.
       "mainPythonFileUri": "A String", # Required The Hadoop Compatible Filesystem (HCFS) URI of the main Python file to use as the driver. Must be a .py file.
-      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
+      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
         "A String",
       ],
-      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
         "A String",
       ],
       "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -1613,10 +1626,10 @@
     "driverControlFilesUri": "A String", # Output-only If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
     "driverInputResourceUri": "A String", # Output-only A URI pointing to the location of the stdin of the job's driver program, only set if the job is interactive.
     "sparkJob": { # A Cloud Dataproc job for running Spark applications on YARN. # Job is a Spark job.
-      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
+      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
         "A String",
       ],
-      "args": [ # Optional The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
+      "jarFileUris": [ # Optional HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
         "A String",
       ],
       "loggingConfiguration": { # The runtime logging configuration of the job. # Optional The runtime log configuration for job execution.
@@ -1644,6 +1657,7 @@
       { # Cloud Dataproc job status.
         "state": "A String", # Required A state message specifying the overall job state.
         "stateStartTime": "A String", # Output-only The time when this state was entered.
+        "substate": "A String", # Output-only Additional state information, which includes status reported by the agent.
         "details": "A String", # Optional Job state details, such as an error description if the state is <code>ERROR</code>.
       },
     ],