beam-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From al...@apache.org
Subject [4/6] beam git commit: Moving dataflow runner related code to vendor directory
Date Fri, 17 Feb 2017 07:39:21 GMT
http://git-wip-us.apache.org/repos/asf/beam/blob/4337c3ec/sdks/python/apache_beam/internal/clients/dataflow/dataflow_v1b3_messages.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/internal/clients/dataflow/dataflow_v1b3_messages.py b/sdks/python/apache_beam/internal/clients/dataflow/dataflow_v1b3_messages.py
deleted file mode 100644
index a42154e..0000000
--- a/sdks/python/apache_beam/internal/clients/dataflow/dataflow_v1b3_messages.py
+++ /dev/null
@@ -1,4173 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-"""Generated message classes for dataflow version v1b3.
-
-Develops and executes data processing patterns like ETL, batch computation,
-and continuous computation.
-"""
-# NOTE: This file is autogenerated and should not be edited by hand.
-
-from apitools.base.protorpclite import messages as _messages
-from apitools.base.py import encoding
-from apitools.base.py import extra_types
-
-
-package = 'dataflow'
-
-
-class ApproximateProgress(_messages.Message):
-  """Obsolete in favor of ApproximateReportedProgress and
-  ApproximateSplitRequest.
-
-  Fields:
-    percentComplete: Obsolete.
-    position: Obsolete.
-    remainingTime: Obsolete.
-  """
-
-  percentComplete = _messages.FloatField(1, variant=_messages.Variant.FLOAT)
-  position = _messages.MessageField('Position', 2)
-  remainingTime = _messages.StringField(3)
-
-
-class ApproximateReportedProgress(_messages.Message):
-  """A progress measurement of a WorkItem by a worker.
-
-  Fields:
-    consumedParallelism: Total amount of parallelism in the portion of input
-      of this task that has already been consumed and is no longer active. In
-      the first two examples above (see remaining_parallelism), the value
-      should be 29 or 2 respectively.  The sum of remaining_parallelism and
-      consumed_parallelism should equal the total amount of parallelism in
-      this work item.  If specified, must be finite.
-    fractionConsumed: Completion as fraction of the input consumed, from 0.0
-      (beginning, nothing consumed), to 1.0 (end of the input, entire input
-      consumed).
-    position: A Position within the work to represent a progress.
-    remainingParallelism: Total amount of parallelism in the input of this
-      task that remains, (i.e. can be delegated to this task and any new tasks
-      via dynamic splitting). Always at least 1 for non-finished work items
-      and 0 for finished.  "Amount of parallelism" refers to how many non-
-      empty parts of the input can be read in parallel. This does not
-      necessarily equal number of records. An input that can be read in
-      parallel down to the individual records is called "perfectly
-      splittable". An example of non-perfectly parallelizable input is a
-      block-compressed file format where a block of records has to be read as
-      a whole, but different blocks can be read in parallel.  Examples: * If
-      we are processing record #30 (starting at 1) out of 50 in a perfectly
-      splittable 50-record input, this value should be 21 (20 remaining + 1
-      current). * If we are reading through block 3 in a block-compressed file
-      consisting   of 5 blocks, this value should be 3 (since blocks 4 and 5
-      can be   processed in parallel by new tasks via dynamic splitting and
-      the current   task remains processing block 3). * If we are reading
-      through the last block in a block-compressed file,   or reading or
-      processing the last record in a perfectly splittable   input, this value
-      should be 1, because apart from the current task, no   additional
-      remainder can be split off.
-  """
-
-  consumedParallelism = _messages.MessageField('ReportedParallelism', 1)
-  fractionConsumed = _messages.FloatField(2)
-  position = _messages.MessageField('Position', 3)
-  remainingParallelism = _messages.MessageField('ReportedParallelism', 4)
-
-
-class ApproximateSplitRequest(_messages.Message):
-  """A suggestion by the service to the worker to dynamically split the
-  WorkItem.
-
-  Fields:
-    fractionConsumed: A fraction at which to split the work item, from 0.0
-      (beginning of the input) to 1.0 (end of the input).
-    position: A Position at which to split the work item.
-  """
-
-  fractionConsumed = _messages.FloatField(1)
-  position = _messages.MessageField('Position', 2)
-
-
-class AutoscalingSettings(_messages.Message):
-  """Settings for WorkerPool autoscaling.
-
-  Enums:
-    AlgorithmValueValuesEnum: The algorithm to use for autoscaling.
-
-  Fields:
-    algorithm: The algorithm to use for autoscaling.
-    maxNumWorkers: The maximum number of workers to cap scaling at.
-  """
-
-  class AlgorithmValueValuesEnum(_messages.Enum):
-    """The algorithm to use for autoscaling.
-
-    Values:
-      AUTOSCALING_ALGORITHM_UNKNOWN: The algorithm is unknown, or unspecified.
-      AUTOSCALING_ALGORITHM_NONE: Disable autoscaling.
-      AUTOSCALING_ALGORITHM_BASIC: Increase worker count over time to reduce
-        job execution time.
-    """
-    AUTOSCALING_ALGORITHM_UNKNOWN = 0
-    AUTOSCALING_ALGORITHM_NONE = 1
-    AUTOSCALING_ALGORITHM_BASIC = 2
-
-  algorithm = _messages.EnumField('AlgorithmValueValuesEnum', 1)
-  maxNumWorkers = _messages.IntegerField(2, variant=_messages.Variant.INT32)
-
-
-class ComputationTopology(_messages.Message):
-  """All configuration data for a particular Computation.
-
-  Fields:
-    computationId: The ID of the computation.
-    inputs: The inputs to the computation.
-    keyRanges: The key ranges processed by the computation.
-    outputs: The outputs from the computation.
-    stateFamilies: The state family values.
-    systemStageName: The system stage name.
-    userStageName: The user stage name.
-  """
-
-  computationId = _messages.StringField(1)
-  inputs = _messages.MessageField('StreamLocation', 2, repeated=True)
-  keyRanges = _messages.MessageField('KeyRangeLocation', 3, repeated=True)
-  outputs = _messages.MessageField('StreamLocation', 4, repeated=True)
-  stateFamilies = _messages.MessageField('StateFamilyConfig', 5, repeated=True)
-  systemStageName = _messages.StringField(6)
-  userStageName = _messages.StringField(7)
-
-
-class ConcatPosition(_messages.Message):
-  """A position that encapsulates an inner position and an index for the inner
-  position. A ConcatPosition can be used by a reader of a source that
-  encapsulates a set of other sources.
-
-  Fields:
-    index: Index of the inner source.
-    position: Position within the inner source.
-  """
-
-  index = _messages.IntegerField(1, variant=_messages.Variant.INT32)
-  position = _messages.MessageField('Position', 2)
-
-
-class CounterMetadata(_messages.Message):
-  """CounterMetadata includes all static non-name non-value counter
-  attributes.
-
-  Enums:
-    KindValueValuesEnum: Counter aggregation kind.
-    StandardUnitsValueValuesEnum: System defined Units, see above enum.
-
-  Fields:
-    description: Human-readable description of the counter semantics.
-    kind: Counter aggregation kind.
-    otherUnits: A string referring to the unit type.
-    standardUnits: System defined Units, see above enum.
-  """
-
-  class KindValueValuesEnum(_messages.Enum):
-    """Counter aggregation kind.
-
-    Values:
-      INVALID: Counter aggregation kind was not set.
-      SUM: Aggregated value is the sum of all contributed values.
-      MAX: Aggregated value is the max of all contributed values.
-      MIN: Aggregated value is the min of all contributed values.
-      MEAN: Aggregated value is the mean of all contributed values.
-      OR: Aggregated value represents the logical 'or' of all contributed
-        values.
-      AND: Aggregated value represents the logical 'and' of all contributed
-        values.
-      SET: Aggregated value is a set of unique contributed values.
-      DISTRIBUTION: Aggregated value captures statistics about a distribution.
-    """
-    INVALID = 0
-    SUM = 1
-    MAX = 2
-    MIN = 3
-    MEAN = 4
-    OR = 5
-    AND = 6
-    SET = 7
-    DISTRIBUTION = 8
-
-  class StandardUnitsValueValuesEnum(_messages.Enum):
-    """System defined Units, see above enum.
-
-    Values:
-      BYTES: Counter returns a value in bytes.
-      BYTES_PER_SEC: Counter returns a value in bytes per second.
-      MILLISECONDS: Counter returns a value in milliseconds.
-      MICROSECONDS: Counter returns a value in microseconds.
-      NANOSECONDS: Counter returns a value in nanoseconds.
-      TIMESTAMP_MSEC: Counter returns a timestamp in milliseconds.
-      TIMESTAMP_USEC: Counter returns a timestamp in microseconds.
-      TIMESTAMP_NSEC: Counter returns a timestamp in nanoseconds.
-    """
-    BYTES = 0
-    BYTES_PER_SEC = 1
-    MILLISECONDS = 2
-    MICROSECONDS = 3
-    NANOSECONDS = 4
-    TIMESTAMP_MSEC = 5
-    TIMESTAMP_USEC = 6
-    TIMESTAMP_NSEC = 7
-
-  description = _messages.StringField(1)
-  kind = _messages.EnumField('KindValueValuesEnum', 2)
-  otherUnits = _messages.StringField(3)
-  standardUnits = _messages.EnumField('StandardUnitsValueValuesEnum', 4)
-
-
-class CounterStructuredName(_messages.Message):
-  """Identifies a counter within a per-job namespace. Counters whose
-  structured names are the same get merged into a single value for the job.
-
-  Enums:
-    PortionValueValuesEnum: Portion of this counter, either key or value.
-    StandardOriginValueValuesEnum: One of the standard Origins defined above.
-
-  Fields:
-    componentStepName: Name of the optimized step being executed by the
-      workers.
-    executionStepName: Name of the stage. An execution step contains multiple
-      component steps.
-    name: Counter name. Not necessarily globally-unique, but unique within the
-      context of the other fields. Required.
-    originalStepName: System generated name of the original step in the user's
-      graph, before optimization.
-    otherOrigin: A string containing the origin of the counter.
-    portion: Portion of this counter, either key or value.
-    standardOrigin: One of the standard Origins defined above.
-    workerId: ID of a particular worker.
-  """
-
-  class PortionValueValuesEnum(_messages.Enum):
-    """Portion of this counter, either key or value.
-
-    Values:
-      ALL: Counter portion has not been set.
-      KEY: Counter reports a key.
-      VALUE: Counter reports a value.
-    """
-    ALL = 0
-    KEY = 1
-    VALUE = 2
-
-  class StandardOriginValueValuesEnum(_messages.Enum):
-    """One of the standard Origins defined above.
-
-    Values:
-      DATAFLOW: Counter was created by the Dataflow system.
-      USER: Counter was created by the user.
-    """
-    DATAFLOW = 0
-    USER = 1
-
-  componentStepName = _messages.StringField(1)
-  executionStepName = _messages.StringField(2)
-  name = _messages.StringField(3)
-  originalStepName = _messages.StringField(4)
-  otherOrigin = _messages.StringField(5)
-  portion = _messages.EnumField('PortionValueValuesEnum', 6)
-  standardOrigin = _messages.EnumField('StandardOriginValueValuesEnum', 7)
-  workerId = _messages.StringField(8)
-
-
-class CounterStructuredNameAndMetadata(_messages.Message):
-  """A single message which encapsulates structured name and metadata for a
-  given counter.
-
-  Fields:
-    metadata: Metadata associated with a counter
-    name: Structured name of the counter.
-  """
-
-  metadata = _messages.MessageField('CounterMetadata', 1)
-  name = _messages.MessageField('CounterStructuredName', 2)
-
-
-class CounterUpdate(_messages.Message):
-  """An update to a Counter sent from a worker.
-
-  Fields:
-    boolean: Boolean value for And, Or.
-    cumulative: True if this counter is reported as the total cumulative
-      aggregate value accumulated since the worker started working on this
-      WorkItem. By default this is false, indicating that this counter is
-      reported as a delta.
-    distribution: Distribution data
-    floatingPoint: Floating point value for Sum, Max, Min.
-    floatingPointList: List of floating point numbers, for Set.
-    floatingPointMean: Floating point mean aggregation value for Mean.
-    integer: Integer value for Sum, Max, Min.
-    integerList: List of integers, for Set.
-    integerMean: Integer mean aggregation value for Mean.
-    internal: Value for internally-defined counters used by the Dataflow
-      service.
-    nameAndKind: Counter name and aggregation type.
-    shortId: The service-generated short identifier for this counter. The
-      short_id -> (name, metadata) mapping is constant for the lifetime of a
-      job.
-    stringList: List of strings, for Set.
-    structuredNameAndMetadata: Counter structured name and metadata.
-  """
-
-  boolean = _messages.BooleanField(1)
-  cumulative = _messages.BooleanField(2)
-  distribution = _messages.MessageField('DistributionUpdate', 3)
-  floatingPoint = _messages.FloatField(4)
-  floatingPointList = _messages.MessageField('FloatingPointList', 5)
-  floatingPointMean = _messages.MessageField('FloatingPointMean', 6)
-  integer = _messages.MessageField('SplitInt64', 7)
-  integerList = _messages.MessageField('IntegerList', 8)
-  integerMean = _messages.MessageField('IntegerMean', 9)
-  internal = _messages.MessageField('extra_types.JsonValue', 10)
-  nameAndKind = _messages.MessageField('NameAndKind', 11)
-  shortId = _messages.IntegerField(12)
-  stringList = _messages.MessageField('StringList', 13)
-  structuredNameAndMetadata = _messages.MessageField('CounterStructuredNameAndMetadata', 14)
-
-
-class CreateJobFromTemplateRequest(_messages.Message):
-  """A request to create a Cloud Dataflow job from a template.
-
-  Messages:
-    ParametersValue: The runtime parameters to pass to the job.
-
-  Fields:
-    environment: The runtime environment for the job.
-    gcsPath: Required. A Cloud Storage path to the template from which to
-      create the job. Must be a valid Cloud Storage URL, beginning with
-      `gs://`.
-    jobName: Required. The job name to use for the created job.
-    parameters: The runtime parameters to pass to the job.
-  """
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class ParametersValue(_messages.Message):
-    """The runtime parameters to pass to the job.
-
-    Messages:
-      AdditionalProperty: An additional property for a ParametersValue object.
-
-    Fields:
-      additionalProperties: Additional properties of type ParametersValue
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a ParametersValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A string attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.StringField(2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  environment = _messages.MessageField('RuntimeEnvironment', 1)
-  gcsPath = _messages.StringField(2)
-  jobName = _messages.StringField(3)
-  parameters = _messages.MessageField('ParametersValue', 4)
-
-
-class CustomSourceLocation(_messages.Message):
-  """Identifies the location of a custom souce.
-
-  Fields:
-    stateful: Whether this source is stateful.
-  """
-
-  stateful = _messages.BooleanField(1)
-
-
-class DataDiskAssignment(_messages.Message):
-  """Data disk assignment for a given VM instance.
-
-  Fields:
-    dataDisks: Mounted data disks. The order is important a data disk's
-      0-based index in this list defines which persistent directory the disk
-      is mounted to, for example the list of {
-      "myproject-1014-104817-4c2-harness-0-disk-0" }, {
-      "myproject-1014-104817-4c2-harness-0-disk-1" }.
-    vmInstance: VM instance name the data disks mounted to, for example
-      "myproject-1014-104817-4c2-harness-0".
-  """
-
-  dataDisks = _messages.StringField(1, repeated=True)
-  vmInstance = _messages.StringField(2)
-
-
-class DataflowProjectsJobsCreateRequest(_messages.Message):
-  """A DataflowProjectsJobsCreateRequest object.
-
-  Enums:
-    ViewValueValuesEnum: The level of information requested in response.
-
-  Fields:
-    job: A Job resource to be passed as the request body.
-    location: The location that contains this job.
-    projectId: The ID of the Cloud Platform project that the job belongs to.
-    replaceJobId: Deprecated. This field is now in the Job message.
-    view: The level of information requested in response.
-  """
-
-  class ViewValueValuesEnum(_messages.Enum):
-    """The level of information requested in response.
-
-    Values:
-      JOB_VIEW_UNKNOWN: <no description>
-      JOB_VIEW_SUMMARY: <no description>
-      JOB_VIEW_ALL: <no description>
-    """
-    JOB_VIEW_UNKNOWN = 0
-    JOB_VIEW_SUMMARY = 1
-    JOB_VIEW_ALL = 2
-
-  job = _messages.MessageField('Job', 1)
-  location = _messages.StringField(2)
-  projectId = _messages.StringField(3, required=True)
-  replaceJobId = _messages.StringField(4)
-  view = _messages.EnumField('ViewValueValuesEnum', 5)
-
-
-class DataflowProjectsJobsDebugGetConfigRequest(_messages.Message):
-  """A DataflowProjectsJobsDebugGetConfigRequest object.
-
-  Fields:
-    getDebugConfigRequest: A GetDebugConfigRequest resource to be passed as
-      the request body.
-    jobId: The job id.
-    projectId: The project id.
-  """
-
-  getDebugConfigRequest = _messages.MessageField('GetDebugConfigRequest', 1)
-  jobId = _messages.StringField(2, required=True)
-  projectId = _messages.StringField(3, required=True)
-
-
-class DataflowProjectsJobsDebugSendCaptureRequest(_messages.Message):
-  """A DataflowProjectsJobsDebugSendCaptureRequest object.
-
-  Fields:
-    jobId: The job id.
-    projectId: The project id.
-    sendDebugCaptureRequest: A SendDebugCaptureRequest resource to be passed
-      as the request body.
-  """
-
-  jobId = _messages.StringField(1, required=True)
-  projectId = _messages.StringField(2, required=True)
-  sendDebugCaptureRequest = _messages.MessageField('SendDebugCaptureRequest', 3)
-
-
-class DataflowProjectsJobsGetMetricsRequest(_messages.Message):
-  """A DataflowProjectsJobsGetMetricsRequest object.
-
-  Fields:
-    jobId: The job to get messages for.
-    location: The location which contains the job specified by job_id.
-    projectId: A project id.
-    startTime: Return only metric data that has changed since this time.
-      Default is to return all information about all metrics for the job.
-  """
-
-  jobId = _messages.StringField(1, required=True)
-  location = _messages.StringField(2)
-  projectId = _messages.StringField(3, required=True)
-  startTime = _messages.StringField(4)
-
-
-class DataflowProjectsJobsGetRequest(_messages.Message):
-  """A DataflowProjectsJobsGetRequest object.
-
-  Enums:
-    ViewValueValuesEnum: The level of information requested in response.
-
-  Fields:
-    jobId: The job ID.
-    location: The location that contains this job.
-    projectId: The ID of the Cloud Platform project that the job belongs to.
-    view: The level of information requested in response.
-  """
-
-  class ViewValueValuesEnum(_messages.Enum):
-    """The level of information requested in response.
-
-    Values:
-      JOB_VIEW_UNKNOWN: <no description>
-      JOB_VIEW_SUMMARY: <no description>
-      JOB_VIEW_ALL: <no description>
-    """
-    JOB_VIEW_UNKNOWN = 0
-    JOB_VIEW_SUMMARY = 1
-    JOB_VIEW_ALL = 2
-
-  jobId = _messages.StringField(1, required=True)
-  location = _messages.StringField(2)
-  projectId = _messages.StringField(3, required=True)
-  view = _messages.EnumField('ViewValueValuesEnum', 4)
-
-
-class DataflowProjectsJobsListRequest(_messages.Message):
-  """A DataflowProjectsJobsListRequest object.
-
-  Enums:
-    FilterValueValuesEnum: The kind of filter to use.
-    ViewValueValuesEnum: Level of information requested in response. Default
-      is `JOB_VIEW_SUMMARY`.
-
-  Fields:
-    filter: The kind of filter to use.
-    location: The location that contains this job.
-    pageSize: If there are many jobs, limit response to at most this many. The
-      actual number of jobs returned will be the lesser of max_responses and
-      an unspecified server-defined limit.
-    pageToken: Set this to the 'next_page_token' field of a previous response
-      to request additional results in a long list.
-    projectId: The project which owns the jobs.
-    view: Level of information requested in response. Default is
-      `JOB_VIEW_SUMMARY`.
-  """
-
-  class FilterValueValuesEnum(_messages.Enum):
-    """The kind of filter to use.
-
-    Values:
-      UNKNOWN: <no description>
-      ALL: <no description>
-      TERMINATED: <no description>
-      ACTIVE: <no description>
-    """
-    UNKNOWN = 0
-    ALL = 1
-    TERMINATED = 2
-    ACTIVE = 3
-
-  class ViewValueValuesEnum(_messages.Enum):
-    """Level of information requested in response. Default is
-    `JOB_VIEW_SUMMARY`.
-
-    Values:
-      JOB_VIEW_UNKNOWN: <no description>
-      JOB_VIEW_SUMMARY: <no description>
-      JOB_VIEW_ALL: <no description>
-    """
-    JOB_VIEW_UNKNOWN = 0
-    JOB_VIEW_SUMMARY = 1
-    JOB_VIEW_ALL = 2
-
-  filter = _messages.EnumField('FilterValueValuesEnum', 1)
-  location = _messages.StringField(2)
-  pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
-  pageToken = _messages.StringField(4)
-  projectId = _messages.StringField(5, required=True)
-  view = _messages.EnumField('ViewValueValuesEnum', 6)
-
-
-class DataflowProjectsJobsMessagesListRequest(_messages.Message):
-  """A DataflowProjectsJobsMessagesListRequest object.
-
-  Enums:
-    MinimumImportanceValueValuesEnum: Filter to only get messages with
-      importance >= level
-
-  Fields:
-    endTime: Return only messages with timestamps < end_time. The default is
-      now (i.e. return up to the latest messages available).
-    jobId: The job to get messages about.
-    location: The location which contains the job specified by job_id.
-    minimumImportance: Filter to only get messages with importance >= level
-    pageSize: If specified, determines the maximum number of messages to
-      return.  If unspecified, the service may choose an appropriate default,
-      or may return an arbitrarily large number of results.
-    pageToken: If supplied, this should be the value of next_page_token
-      returned by an earlier call. This will cause the next page of results to
-      be returned.
-    projectId: A project id.
-    startTime: If specified, return only messages with timestamps >=
-      start_time. The default is the job creation time (i.e. beginning of
-      messages).
-  """
-
-  class MinimumImportanceValueValuesEnum(_messages.Enum):
-    """Filter to only get messages with importance >= level
-
-    Values:
-      JOB_MESSAGE_IMPORTANCE_UNKNOWN: <no description>
-      JOB_MESSAGE_DEBUG: <no description>
-      JOB_MESSAGE_DETAILED: <no description>
-      JOB_MESSAGE_BASIC: <no description>
-      JOB_MESSAGE_WARNING: <no description>
-      JOB_MESSAGE_ERROR: <no description>
-    """
-    JOB_MESSAGE_IMPORTANCE_UNKNOWN = 0
-    JOB_MESSAGE_DEBUG = 1
-    JOB_MESSAGE_DETAILED = 2
-    JOB_MESSAGE_BASIC = 3
-    JOB_MESSAGE_WARNING = 4
-    JOB_MESSAGE_ERROR = 5
-
-  endTime = _messages.StringField(1)
-  jobId = _messages.StringField(2, required=True)
-  location = _messages.StringField(3)
-  minimumImportance = _messages.EnumField('MinimumImportanceValueValuesEnum', 4)
-  pageSize = _messages.IntegerField(5, variant=_messages.Variant.INT32)
-  pageToken = _messages.StringField(6)
-  projectId = _messages.StringField(7, required=True)
-  startTime = _messages.StringField(8)
-
-
-class DataflowProjectsJobsUpdateRequest(_messages.Message):
-  """A DataflowProjectsJobsUpdateRequest object.
-
-  Fields:
-    job: A Job resource to be passed as the request body.
-    jobId: The job ID.
-    location: The location that contains this job.
-    projectId: The ID of the Cloud Platform project that the job belongs to.
-  """
-
-  job = _messages.MessageField('Job', 1)
-  jobId = _messages.StringField(2, required=True)
-  location = _messages.StringField(3)
-  projectId = _messages.StringField(4, required=True)
-
-
-class DataflowProjectsJobsWorkItemsLeaseRequest(_messages.Message):
-  """A DataflowProjectsJobsWorkItemsLeaseRequest object.
-
-  Fields:
-    jobId: Identifies the workflow job this worker belongs to.
-    leaseWorkItemRequest: A LeaseWorkItemRequest resource to be passed as the
-      request body.
-    projectId: Identifies the project this worker belongs to.
-  """
-
-  jobId = _messages.StringField(1, required=True)
-  leaseWorkItemRequest = _messages.MessageField('LeaseWorkItemRequest', 2)
-  projectId = _messages.StringField(3, required=True)
-
-
-class DataflowProjectsJobsWorkItemsReportStatusRequest(_messages.Message):
-  """A DataflowProjectsJobsWorkItemsReportStatusRequest object.
-
-  Fields:
-    jobId: The job which the WorkItem is part of.
-    projectId: The project which owns the WorkItem's job.
-    reportWorkItemStatusRequest: A ReportWorkItemStatusRequest resource to be
-      passed as the request body.
-  """
-
-  jobId = _messages.StringField(1, required=True)
-  projectId = _messages.StringField(2, required=True)
-  reportWorkItemStatusRequest = _messages.MessageField('ReportWorkItemStatusRequest', 3)
-
-
-class DataflowProjectsLocationsJobsCreateRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsCreateRequest object.
-
-  Enums:
-    ViewValueValuesEnum: The level of information requested in response.
-
-  Fields:
-    job: A Job resource to be passed as the request body.
-    location: The location that contains this job.
-    projectId: The ID of the Cloud Platform project that the job belongs to.
-    replaceJobId: Deprecated. This field is now in the Job message.
-    view: The level of information requested in response.
-  """
-
-  class ViewValueValuesEnum(_messages.Enum):
-    """The level of information requested in response.
-
-    Values:
-      JOB_VIEW_UNKNOWN: <no description>
-      JOB_VIEW_SUMMARY: <no description>
-      JOB_VIEW_ALL: <no description>
-    """
-    JOB_VIEW_UNKNOWN = 0
-    JOB_VIEW_SUMMARY = 1
-    JOB_VIEW_ALL = 2
-
-  job = _messages.MessageField('Job', 1)
-  location = _messages.StringField(2, required=True)
-  projectId = _messages.StringField(3, required=True)
-  replaceJobId = _messages.StringField(4)
-  view = _messages.EnumField('ViewValueValuesEnum', 5)
-
-
-class DataflowProjectsLocationsJobsGetMetricsRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsGetMetricsRequest object.
-
-  Fields:
-    jobId: The job to get messages for.
-    location: The location which contains the job specified by job_id.
-    projectId: A project id.
-    startTime: Return only metric data that has changed since this time.
-      Default is to return all information about all metrics for the job.
-  """
-
-  jobId = _messages.StringField(1, required=True)
-  location = _messages.StringField(2, required=True)
-  projectId = _messages.StringField(3, required=True)
-  startTime = _messages.StringField(4)
-
-
-class DataflowProjectsLocationsJobsGetRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsGetRequest object.
-
-  Enums:
-    ViewValueValuesEnum: The level of information requested in response.
-
-  Fields:
-    jobId: The job ID.
-    location: The location that contains this job.
-    projectId: The ID of the Cloud Platform project that the job belongs to.
-    view: The level of information requested in response.
-  """
-
-  class ViewValueValuesEnum(_messages.Enum):
-    """The level of information requested in response.
-
-    Values:
-      JOB_VIEW_UNKNOWN: <no description>
-      JOB_VIEW_SUMMARY: <no description>
-      JOB_VIEW_ALL: <no description>
-    """
-    JOB_VIEW_UNKNOWN = 0
-    JOB_VIEW_SUMMARY = 1
-    JOB_VIEW_ALL = 2
-
-  jobId = _messages.StringField(1, required=True)
-  location = _messages.StringField(2, required=True)
-  projectId = _messages.StringField(3, required=True)
-  view = _messages.EnumField('ViewValueValuesEnum', 4)
-
-
-class DataflowProjectsLocationsJobsListRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsListRequest object.
-
-  Enums:
-    FilterValueValuesEnum: The kind of filter to use.
-    ViewValueValuesEnum: Level of information requested in response. Default
-      is `JOB_VIEW_SUMMARY`.
-
-  Fields:
-    filter: The kind of filter to use.
-    location: The location that contains this job.
-    pageSize: If there are many jobs, limit response to at most this many. The
-      actual number of jobs returned will be the lesser of max_responses and
-      an unspecified server-defined limit.
-    pageToken: Set this to the 'next_page_token' field of a previous response
-      to request additional results in a long list.
-    projectId: The project which owns the jobs.
-    view: Level of information requested in response. Default is
-      `JOB_VIEW_SUMMARY`.
-  """
-
-  class FilterValueValuesEnum(_messages.Enum):
-    """The kind of filter to use.
-
-    Values:
-      UNKNOWN: <no description>
-      ALL: <no description>
-      TERMINATED: <no description>
-      ACTIVE: <no description>
-    """
-    UNKNOWN = 0
-    ALL = 1
-    TERMINATED = 2
-    ACTIVE = 3
-
-  class ViewValueValuesEnum(_messages.Enum):
-    """Level of information requested in response. Default is
-    `JOB_VIEW_SUMMARY`.
-
-    Values:
-      JOB_VIEW_UNKNOWN: <no description>
-      JOB_VIEW_SUMMARY: <no description>
-      JOB_VIEW_ALL: <no description>
-    """
-    JOB_VIEW_UNKNOWN = 0
-    JOB_VIEW_SUMMARY = 1
-    JOB_VIEW_ALL = 2
-
-  filter = _messages.EnumField('FilterValueValuesEnum', 1)
-  location = _messages.StringField(2, required=True)
-  pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
-  pageToken = _messages.StringField(4)
-  projectId = _messages.StringField(5, required=True)
-  view = _messages.EnumField('ViewValueValuesEnum', 6)
-
-
-class DataflowProjectsLocationsJobsMessagesListRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsMessagesListRequest object.
-
-  Enums:
-    MinimumImportanceValueValuesEnum: Filter to only get messages with
-      importance >= level
-
-  Fields:
-    endTime: Return only messages with timestamps < end_time. The default is
-      now (i.e. return up to the latest messages available).
-    jobId: The job to get messages about.
-    location: The location which contains the job specified by job_id.
-    minimumImportance: Filter to only get messages with importance >= level
-    pageSize: If specified, determines the maximum number of messages to
-      return.  If unspecified, the service may choose an appropriate default,
-      or may return an arbitrarily large number of results.
-    pageToken: If supplied, this should be the value of next_page_token
-      returned by an earlier call. This will cause the next page of results to
-      be returned.
-    projectId: A project id.
-    startTime: If specified, return only messages with timestamps >=
-      start_time. The default is the job creation time (i.e. beginning of
-      messages).
-  """
-
-  class MinimumImportanceValueValuesEnum(_messages.Enum):
-    """Filter to only get messages with importance >= level
-
-    Values:
-      JOB_MESSAGE_IMPORTANCE_UNKNOWN: <no description>
-      JOB_MESSAGE_DEBUG: <no description>
-      JOB_MESSAGE_DETAILED: <no description>
-      JOB_MESSAGE_BASIC: <no description>
-      JOB_MESSAGE_WARNING: <no description>
-      JOB_MESSAGE_ERROR: <no description>
-    """
-    JOB_MESSAGE_IMPORTANCE_UNKNOWN = 0
-    JOB_MESSAGE_DEBUG = 1
-    JOB_MESSAGE_DETAILED = 2
-    JOB_MESSAGE_BASIC = 3
-    JOB_MESSAGE_WARNING = 4
-    JOB_MESSAGE_ERROR = 5
-
-  endTime = _messages.StringField(1)
-  jobId = _messages.StringField(2, required=True)
-  location = _messages.StringField(3, required=True)
-  minimumImportance = _messages.EnumField('MinimumImportanceValueValuesEnum', 4)
-  pageSize = _messages.IntegerField(5, variant=_messages.Variant.INT32)
-  pageToken = _messages.StringField(6)
-  projectId = _messages.StringField(7, required=True)
-  startTime = _messages.StringField(8)
-
-
-class DataflowProjectsLocationsJobsUpdateRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsUpdateRequest object.
-
-  Fields:
-    job: A Job resource to be passed as the request body.
-    jobId: The job ID.
-    location: The location that contains this job.
-    projectId: The ID of the Cloud Platform project that the job belongs to.
-  """
-
-  job = _messages.MessageField('Job', 1)
-  jobId = _messages.StringField(2, required=True)
-  location = _messages.StringField(3, required=True)
-  projectId = _messages.StringField(4, required=True)
-
-
-class DataflowProjectsLocationsJobsWorkItemsLeaseRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsWorkItemsLeaseRequest object.
-
-  Fields:
-    jobId: Identifies the workflow job this worker belongs to.
-    leaseWorkItemRequest: A LeaseWorkItemRequest resource to be passed as the
-      request body.
-    location: The location which contains the WorkItem's job.
-    projectId: Identifies the project this worker belongs to.
-  """
-
-  jobId = _messages.StringField(1, required=True)
-  leaseWorkItemRequest = _messages.MessageField('LeaseWorkItemRequest', 2)
-  location = _messages.StringField(3, required=True)
-  projectId = _messages.StringField(4, required=True)
-
-
-class DataflowProjectsLocationsJobsWorkItemsReportStatusRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsWorkItemsReportStatusRequest object.
-
-  Fields:
-    jobId: The job which the WorkItem is part of.
-    location: The location which contains the WorkItem's job.
-    projectId: The project which owns the WorkItem's job.
-    reportWorkItemStatusRequest: A ReportWorkItemStatusRequest resource to be
-      passed as the request body.
-  """
-
-  jobId = _messages.StringField(1, required=True)
-  location = _messages.StringField(2, required=True)
-  projectId = _messages.StringField(3, required=True)
-  reportWorkItemStatusRequest = _messages.MessageField('ReportWorkItemStatusRequest', 4)
-
-
-class DataflowProjectsTemplatesCreateRequest(_messages.Message):
-  """A DataflowProjectsTemplatesCreateRequest object.
-
-  Fields:
-    createJobFromTemplateRequest: A CreateJobFromTemplateRequest resource to
-      be passed as the request body.
-    projectId: Required. The ID of the Cloud Platform project that the job
-      belongs to.
-  """
-
-  createJobFromTemplateRequest = _messages.MessageField('CreateJobFromTemplateRequest', 1)
-  projectId = _messages.StringField(2, required=True)
-
-
-class DataflowProjectsWorkerMessagesRequest(_messages.Message):
-  """A DataflowProjectsWorkerMessagesRequest object.
-
-  Fields:
-    projectId: The project to send the WorkerMessages to.
-    sendWorkerMessagesRequest: A SendWorkerMessagesRequest resource to be
-      passed as the request body.
-  """
-
-  projectId = _messages.StringField(1, required=True)
-  sendWorkerMessagesRequest = _messages.MessageField('SendWorkerMessagesRequest', 2)
-
-
-class DerivedSource(_messages.Message):
-  """Specification of one of the bundles produced as a result of splitting a
-  Source (e.g. when executing a SourceSplitRequest, or when splitting an
-  active task using WorkItemStatus.dynamic_source_split), relative to the
-  source being split.
-
-  Enums:
-    DerivationModeValueValuesEnum: What source to base the produced source on
-      (if any).
-
-  Fields:
-    derivationMode: What source to base the produced source on (if any).
-    source: Specification of the source.
-  """
-
-  class DerivationModeValueValuesEnum(_messages.Enum):
-    """What source to base the produced source on (if any).
-
-    Values:
-      SOURCE_DERIVATION_MODE_UNKNOWN: The source derivation is unknown, or
-        unspecified.
-      SOURCE_DERIVATION_MODE_INDEPENDENT: Produce a completely independent
-        Source with no base.
-      SOURCE_DERIVATION_MODE_CHILD_OF_CURRENT: Produce a Source based on the
-        Source being split.
-      SOURCE_DERIVATION_MODE_SIBLING_OF_CURRENT: Produce a Source based on the
-        base of the Source being split.
-    """
-    SOURCE_DERIVATION_MODE_UNKNOWN = 0
-    SOURCE_DERIVATION_MODE_INDEPENDENT = 1
-    SOURCE_DERIVATION_MODE_CHILD_OF_CURRENT = 2
-    SOURCE_DERIVATION_MODE_SIBLING_OF_CURRENT = 3
-
-  derivationMode = _messages.EnumField('DerivationModeValueValuesEnum', 1)
-  source = _messages.MessageField('Source', 2)
-
-
-class Disk(_messages.Message):
-  """Describes the data disk used by a workflow job.
-
-  Fields:
-    diskType: Disk storage type, as defined by Google Compute Engine.  This
-      must be a disk type appropriate to the project and zone in which the
-      workers will run.  If unknown or unspecified, the service will attempt
-      to choose a reasonable default.  For example, the standard persistent
-      disk type is a resource name typically ending in "pd-standard".  If SSD
-      persistent disks are available, the resource name typically ends with
-      "pd-ssd".  The actual valid values are defined the Google Compute Engine
-      API, not by the Cloud Dataflow API; consult the Google Compute Engine
-      documentation for more information about determining the set of
-      available disk types for a particular project and zone.  Google Compute
-      Engine Disk types are local to a particular project in a particular
-      zone, and so the resource name will typically look something like this:
-      compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-
-      standard
-    mountPoint: Directory in a VM where disk is mounted.
-    sizeGb: Size of disk in GB.  If zero or unspecified, the service will
-      attempt to choose a reasonable default.
-  """
-
-  diskType = _messages.StringField(1)
-  mountPoint = _messages.StringField(2)
-  sizeGb = _messages.IntegerField(3, variant=_messages.Variant.INT32)
-
-
-class DistributionUpdate(_messages.Message):
-  """A metric value representing a distribution.
-
-  Fields:
-    count: The count of the number of elements present in the distribution.
-    max: The maximum value present in the distribution.
-    min: The minimum value present in the distribution.
-    sum: Use an int64 since we'd prefer the added precision. If overflow is a
-      common problem we can detect it and use an additional int64 or a double.
-    sumOfSquares: Use a double since the sum of squares is likely to overflow
-      int64.
-  """
-
-  count = _messages.MessageField('SplitInt64', 1)
-  max = _messages.MessageField('SplitInt64', 2)
-  min = _messages.MessageField('SplitInt64', 3)
-  sum = _messages.MessageField('SplitInt64', 4)
-  sumOfSquares = _messages.FloatField(5)
-
-
-class DynamicSourceSplit(_messages.Message):
-  """When a task splits using WorkItemStatus.dynamic_source_split, this
-  message describes the two parts of the split relative to the description of
-  the current task's input.
-
-  Fields:
-    primary: Primary part (continued to be processed by worker). Specified
-      relative to the previously-current source. Becomes current.
-    residual: Residual part (returned to the pool of work). Specified relative
-      to the previously-current source.
-  """
-
-  primary = _messages.MessageField('DerivedSource', 1)
-  residual = _messages.MessageField('DerivedSource', 2)
-
-
-class Environment(_messages.Message):
-  """Describes the environment in which a Dataflow Job runs.
-
-  Messages:
-    InternalExperimentsValue: Experimental settings.
-    SdkPipelineOptionsValue: The Cloud Dataflow SDK pipeline options specified
-      by the user. These options are passed through the service and are used
-      to recreate the SDK pipeline options on the worker in a language
-      agnostic and platform independent way.
-    UserAgentValue: A description of the process that generated the request.
-    VersionValue: A structure describing which components and their versions
-      of the service are required in order to run the job.
-
-  Fields:
-    clusterManagerApiService: The type of cluster manager API to use.  If
-      unknown or unspecified, the service will attempt to choose a reasonable
-      default.  This should be in the form of the API service name, e.g.
-      "compute.googleapis.com".
-    dataset: The dataset for the current project where various workflow
-      related tables are stored.  The supported resource type is:  Google
-      BigQuery:   bigquery.googleapis.com/{dataset}
-    experiments: The list of experiments to enable.
-    internalExperiments: Experimental settings.
-    sdkPipelineOptions: The Cloud Dataflow SDK pipeline options specified by
-      the user. These options are passed through the service and are used to
-      recreate the SDK pipeline options on the worker in a language agnostic
-      and platform independent way.
-    serviceAccountEmail: Identity to run virtual machines as. Defaults to the
-      default account.
-    tempStoragePrefix: The prefix of the resources the system should use for
-      temporary storage.  The system will append the suffix "/temp-{JOBNAME}
-      to this resource prefix, where {JOBNAME} is the value of the job_name
-      field.  The resulting bucket and object prefix is used as the prefix of
-      the resources used to store temporary data needed during the job
-      execution.  NOTE: This will override the value in taskrunner_settings.
-      The supported resource type is:  Google Cloud Storage:
-      storage.googleapis.com/{bucket}/{object}
-      bucket.storage.googleapis.com/{object}
-    userAgent: A description of the process that generated the request.
-    version: A structure describing which components and their versions of the
-      service are required in order to run the job.
-    workerPools: The worker pools. At least one "harness" worker pool must be
-      specified in order for the job to have workers.
-  """
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class InternalExperimentsValue(_messages.Message):
-    """Experimental settings.
-
-    Messages:
-      AdditionalProperty: An additional property for a
-        InternalExperimentsValue object.
-
-    Fields:
-      additionalProperties: Properties of the object. Contains field @type
-        with type URL.
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a InternalExperimentsValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A extra_types.JsonValue attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.MessageField('extra_types.JsonValue', 2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class SdkPipelineOptionsValue(_messages.Message):
-    """The Cloud Dataflow SDK pipeline options specified by the user. These
-    options are passed through the service and are used to recreate the SDK
-    pipeline options on the worker in a language agnostic and platform
-    independent way.
-
-    Messages:
-      AdditionalProperty: An additional property for a SdkPipelineOptionsValue
-        object.
-
-    Fields:
-      additionalProperties: Properties of the object.
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a SdkPipelineOptionsValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A extra_types.JsonValue attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.MessageField('extra_types.JsonValue', 2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class UserAgentValue(_messages.Message):
-    """A description of the process that generated the request.
-
-    Messages:
-      AdditionalProperty: An additional property for a UserAgentValue object.
-
-    Fields:
-      additionalProperties: Properties of the object.
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a UserAgentValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A extra_types.JsonValue attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.MessageField('extra_types.JsonValue', 2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class VersionValue(_messages.Message):
-    """A structure describing which components and their versions of the
-    service are required in order to run the job.
-
-    Messages:
-      AdditionalProperty: An additional property for a VersionValue object.
-
-    Fields:
-      additionalProperties: Properties of the object.
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a VersionValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A extra_types.JsonValue attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.MessageField('extra_types.JsonValue', 2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  clusterManagerApiService = _messages.StringField(1)
-  dataset = _messages.StringField(2)
-  experiments = _messages.StringField(3, repeated=True)
-  internalExperiments = _messages.MessageField('InternalExperimentsValue', 4)
-  sdkPipelineOptions = _messages.MessageField('SdkPipelineOptionsValue', 5)
-  serviceAccountEmail = _messages.StringField(6)
-  tempStoragePrefix = _messages.StringField(7)
-  userAgent = _messages.MessageField('UserAgentValue', 8)
-  version = _messages.MessageField('VersionValue', 9)
-  workerPools = _messages.MessageField('WorkerPool', 10, repeated=True)
-
-
-class FailedLocation(_messages.Message):
-  """Indicates which location failed to respond to a request for data.
-
-  Fields:
-    name: The name of the failed location.
-  """
-
-  name = _messages.StringField(1)
-
-
-class FlattenInstruction(_messages.Message):
-  """An instruction that copies its inputs (zero or more) to its (single)
-  output.
-
-  Fields:
-    inputs: Describes the inputs to the flatten instruction.
-  """
-
-  inputs = _messages.MessageField('InstructionInput', 1, repeated=True)
-
-
-class FloatingPointList(_messages.Message):
-  """A metric value representing a list of floating point numbers.
-
-  Fields:
-    elements: Elements of the list.
-  """
-
-  elements = _messages.FloatField(1, repeated=True)
-
-
-class FloatingPointMean(_messages.Message):
-  """A representation of a floating point mean metric contribution.
-
-  Fields:
-    count: The number of values being aggregated.
-    sum: The sum of all values being aggregated.
-  """
-
-  count = _messages.MessageField('SplitInt64', 1)
-  sum = _messages.FloatField(2)
-
-
-class GetDebugConfigRequest(_messages.Message):
-  """Request to get updated debug configuration for component.
-
-  Fields:
-    componentId: The internal component id for which debug configuration is
-      requested.
-    workerId: The worker id, i.e., VM hostname.
-  """
-
-  componentId = _messages.StringField(1)
-  workerId = _messages.StringField(2)
-
-
-class GetDebugConfigResponse(_messages.Message):
-  """Response to a get debug configuration request.
-
-  Fields:
-    config: The encoded debug configuration for the requested component.
-  """
-
-  config = _messages.StringField(1)
-
-
-class InstructionInput(_messages.Message):
-  """An input of an instruction, as a reference to an output of a producer
-  instruction.
-
-  Fields:
-    outputNum: The output index (origin zero) within the producer.
-    producerInstructionIndex: The index (origin zero) of the parallel
-      instruction that produces the output to be consumed by this input.  This
-      index is relative to the list of instructions in this input's
-      instruction's containing MapTask.
-  """
-
-  outputNum = _messages.IntegerField(1, variant=_messages.Variant.INT32)
-  producerInstructionIndex = _messages.IntegerField(2, variant=_messages.Variant.INT32)
-
-
-class InstructionOutput(_messages.Message):
-  """An output of an instruction.
-
-  Messages:
-    CodecValue: The codec to use to encode data being written via this output.
-
-  Fields:
-    codec: The codec to use to encode data being written via this output.
-    name: The user-provided name of this output.
-    onlyCountKeyBytes: For system-generated byte and mean byte metrics,
-      certain instructions should only report the key size.
-    onlyCountValueBytes: For system-generated byte and mean byte metrics,
-      certain instructions should only report the value size.
-    originalName: System-defined name for this output in the original workflow
-      graph. Outputs that do not contribute to an original instruction do not
-      set this.
-    systemName: System-defined name of this output. Unique across the
-      workflow.
-  """
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class CodecValue(_messages.Message):
-    """The codec to use to encode data being written via this output.
-
-    Messages:
-      AdditionalProperty: An additional property for a CodecValue object.
-
-    Fields:
-      additionalProperties: Properties of the object.
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a CodecValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A extra_types.JsonValue attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.MessageField('extra_types.JsonValue', 2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  codec = _messages.MessageField('CodecValue', 1)
-  name = _messages.StringField(2)
-  onlyCountKeyBytes = _messages.BooleanField(3)
-  onlyCountValueBytes = _messages.BooleanField(4)
-  originalName = _messages.StringField(5)
-  systemName = _messages.StringField(6)
-
-
-class IntegerList(_messages.Message):
-  """A metric value representing a list of integers.
-
-  Fields:
-    elements: Elements of the list.
-  """
-
-  elements = _messages.MessageField('SplitInt64', 1, repeated=True)
-
-
-class IntegerMean(_messages.Message):
-  """A representation of an integer mean metric contribution.
-
-  Fields:
-    count: The number of values being aggregated.
-    sum: The sum of all values being aggregated.
-  """
-
-  count = _messages.MessageField('SplitInt64', 1)
-  sum = _messages.MessageField('SplitInt64', 2)
-
-
-class Job(_messages.Message):
-  """Defines a job to be run by the Cloud Dataflow service.
-
-  Enums:
-    CurrentStateValueValuesEnum: The current state of the job.  Jobs are
-      created in the `JOB_STATE_STOPPED` state unless otherwise specified.  A
-      job in the `JOB_STATE_RUNNING` state may asynchronously enter a terminal
-      state. After a job has reached a terminal state, no further state
-      updates may be made.  This field may be mutated by the Cloud Dataflow
-      service; callers cannot mutate it.
-    RequestedStateValueValuesEnum: The job's requested state.  `UpdateJob` may
-      be used to switch between the `JOB_STATE_STOPPED` and
-      `JOB_STATE_RUNNING` states, by setting requested_state.  `UpdateJob` may
-      also be used to directly set a job's requested state to
-      `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the
-      job if it has not already reached a terminal state.
-    TypeValueValuesEnum: The type of Cloud Dataflow job.
-
-  Messages:
-    LabelsValue: User-defined labels for this job.  The labels map can contain
-      no more than 64 entries.  Entries of the labels map are UTF8 strings
-      that comply with the following restrictions:  * Keys must conform to
-      regexp:  \p{Ll}\p{Lo}{0,62} * Values must conform to regexp:
-      [\p{Ll}\p{Lo}\p{N}_-]{0,63} * Both keys and values are additionally
-      constrained to be <= 128 bytes in size.
-    TransformNameMappingValue: The map of transform name prefixes of the job
-      to be replaced to the corresponding name prefixes of the new job.
-
-  Fields:
-    clientRequestId: The client's unique identifier of the job, re-used across
-      retried attempts. If this field is set, the service will ensure its
-      uniqueness. The request to create a job will fail if the service has
-      knowledge of a previously submitted job with the same client's ID and
-      job name. The caller may use this field to ensure idempotence of job
-      creation across retried attempts to create a job. By default, the field
-      is empty and, in that case, the service ignores it.
-    createTime: The timestamp when the job was initially created. Immutable
-      and set by the Cloud Dataflow service.
-    currentState: The current state of the job.  Jobs are created in the
-      `JOB_STATE_STOPPED` state unless otherwise specified.  A job in the
-      `JOB_STATE_RUNNING` state may asynchronously enter a terminal state.
-      After a job has reached a terminal state, no further state updates may
-      be made.  This field may be mutated by the Cloud Dataflow service;
-      callers cannot mutate it.
-    currentStateTime: The timestamp associated with the current state.
-    environment: The environment for the job.
-    executionInfo: Information about how the Cloud Dataflow service will run
-      the job.
-    id: The unique ID of this job.  This field is set by the Cloud Dataflow
-      service when the Job is created, and is immutable for the life of the
-      job.
-    labels: User-defined labels for this job.  The labels map can contain no
-      more than 64 entries.  Entries of the labels map are UTF8 strings that
-      comply with the following restrictions:  * Keys must conform to regexp:
-      \p{Ll}\p{Lo}{0,62} * Values must conform to regexp:
-      [\p{Ll}\p{Lo}\p{N}_-]{0,63} * Both keys and values are additionally
-      constrained to be <= 128 bytes in size.
-    location: The location that contains this job.
-    name: The user-specified Cloud Dataflow job name.  Only one Job with a
-      given name may exist in a project at any given time. If a caller
-      attempts to create a Job with the same name as an already-existing Job,
-      the attempt returns the existing Job.  The name must match the regular
-      expression `[a-z]([-a-z0-9]{0,38}[a-z0-9])?`
-    projectId: The ID of the Cloud Platform project that the job belongs to.
-    replaceJobId: If this job is an update of an existing job, this field is
-      the job ID of the job it replaced.  When sending a `CreateJobRequest`,
-      you can update a job by specifying it here. The job named here is
-      stopped, and its intermediate state is transferred to this job.
-    replacedByJobId: If another job is an update of this job (and thus, this
-      job is in `JOB_STATE_UPDATED`), this field contains the ID of that job.
-    requestedState: The job's requested state.  `UpdateJob` may be used to
-      switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states,
-      by setting requested_state.  `UpdateJob` may also be used to directly
-      set a job's requested state to `JOB_STATE_CANCELLED` or
-      `JOB_STATE_DONE`, irrevocably terminating the job if it has not already
-      reached a terminal state.
-    steps: The top-level steps that constitute the entire job.
-    tempFiles: A set of files the system should be aware of that are used for
-      temporary storage. These temporary files will be removed on job
-      completion. No duplicates are allowed. No file patterns are supported.
-      The supported files are:  Google Cloud Storage:
-      storage.googleapis.com/{bucket}/{object}
-      bucket.storage.googleapis.com/{object}
-    transformNameMapping: The map of transform name prefixes of the job to be
-      replaced to the corresponding name prefixes of the new job.
-    type: The type of Cloud Dataflow job.
-  """
-
-  class CurrentStateValueValuesEnum(_messages.Enum):
-    """The current state of the job.  Jobs are created in the
-    `JOB_STATE_STOPPED` state unless otherwise specified.  A job in the
-    `JOB_STATE_RUNNING` state may asynchronously enter a terminal state. After
-    a job has reached a terminal state, no further state updates may be made.
-    This field may be mutated by the Cloud Dataflow service; callers cannot
-    mutate it.
-
-    Values:
-      JOB_STATE_UNKNOWN: The job's run state isn't specified.
-      JOB_STATE_STOPPED: `JOB_STATE_STOPPED` indicates that the job has not
-        yet started to run.
-      JOB_STATE_RUNNING: `JOB_STATE_RUNNING` indicates that the job is
-        currently running.
-      JOB_STATE_DONE: `JOB_STATE_DONE` indicates that the job has successfully
-        completed. This is a terminal job state.  This state may be set by the
-        Cloud Dataflow service, as a transition from `JOB_STATE_RUNNING`. It
-        may also be set via a Cloud Dataflow `UpdateJob` call, if the job has
-        not yet reached a terminal state.
-      JOB_STATE_FAILED: `JOB_STATE_FAILED` indicates that the job has failed.
-        This is a terminal job state.  This state may only be set by the Cloud
-        Dataflow service, and only as a transition from `JOB_STATE_RUNNING`.
-      JOB_STATE_CANCELLED: `JOB_STATE_CANCELLED` indicates that the job has
-        been explicitly cancelled. This is a terminal job state. This state
-        may only be set via a Cloud Dataflow `UpdateJob` call, and only if the
-        job has not yet reached another terminal state.
-      JOB_STATE_UPDATED: `JOB_STATE_UPDATED` indicates that the job was
-        successfully updated, meaning that this job was stopped and another
-        job was started, inheriting state from this one. This is a terminal
-        job state. This state may only be set by the Cloud Dataflow service,
-        and only as a transition from `JOB_STATE_RUNNING`.
-      JOB_STATE_DRAINING: `JOB_STATE_DRAINING` indicates that the job is in
-        the process of draining. A draining job has stopped pulling from its
-        input sources and is processing any data that remains in-flight. This
-        state may be set via a Cloud Dataflow `UpdateJob` call, but only as a
-        transition from `JOB_STATE_RUNNING`. Jobs that are draining may only
-        transition to `JOB_STATE_DRAINED`, `JOB_STATE_CANCELLED`, or
-        `JOB_STATE_FAILED`.
-      JOB_STATE_DRAINED: `JOB_STATE_DRAINED` indicates that the job has been
-        drained. A drained job terminated by stopping pulling from its input
-        sources and processing any data that remained in-flight when draining
-        was requested. This state is a terminal state, may only be set by the
-        Cloud Dataflow service, and only as a transition from
-        `JOB_STATE_DRAINING`.
-    """
-    JOB_STATE_UNKNOWN = 0
-    JOB_STATE_STOPPED = 1
-    JOB_STATE_RUNNING = 2
-    JOB_STATE_DONE = 3
-    JOB_STATE_FAILED = 4
-    JOB_STATE_CANCELLED = 5
-    JOB_STATE_UPDATED = 6
-    JOB_STATE_DRAINING = 7
-    JOB_STATE_DRAINED = 8
-
-  class RequestedStateValueValuesEnum(_messages.Enum):
-    """The job's requested state.  `UpdateJob` may be used to switch between
-    the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting
-    requested_state.  `UpdateJob` may also be used to directly set a job's
-    requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably
-    terminating the job if it has not already reached a terminal state.
-
-    Values:
-      JOB_STATE_UNKNOWN: The job's run state isn't specified.
-      JOB_STATE_STOPPED: `JOB_STATE_STOPPED` indicates that the job has not
-        yet started to run.
-      JOB_STATE_RUNNING: `JOB_STATE_RUNNING` indicates that the job is
-        currently running.
-      JOB_STATE_DONE: `JOB_STATE_DONE` indicates that the job has successfully
-        completed. This is a terminal job state.  This state may be set by the
-        Cloud Dataflow service, as a transition from `JOB_STATE_RUNNING`. It
-        may also be set via a Cloud Dataflow `UpdateJob` call, if the job has
-        not yet reached a terminal state.
-      JOB_STATE_FAILED: `JOB_STATE_FAILED` indicates that the job has failed.
-        This is a terminal job state.  This state may only be set by the Cloud
-        Dataflow service, and only as a transition from `JOB_STATE_RUNNING`.
-      JOB_STATE_CANCELLED: `JOB_STATE_CANCELLED` indicates that the job has
-        been explicitly cancelled. This is a terminal job state. This state
-        may only be set via a Cloud Dataflow `UpdateJob` call, and only if the
-        job has not yet reached another terminal state.
-      JOB_STATE_UPDATED: `JOB_STATE_UPDATED` indicates that the job was
-        successfully updated, meaning that this job was stopped and another
-        job was started, inheriting state from this one. This is a terminal
-        job state. This state may only be set by the Cloud Dataflow service,
-        and only as a transition from `JOB_STATE_RUNNING`.
-      JOB_STATE_DRAINING: `JOB_STATE_DRAINING` indicates that the job is in
-        the process of draining. A draining job has stopped pulling from its
-        input sources and is processing any data that remains in-flight. This
-        state may be set via a Cloud Dataflow `UpdateJob` call, but only as a
-        transition from `JOB_STATE_RUNNING`. Jobs that are draining may only
-        transition to `JOB_STATE_DRAINED`, `JOB_STATE_CANCELLED`, or
-        `JOB_STATE_FAILED`.
-      JOB_STATE_DRAINED: `JOB_STATE_DRAINED` indicates that the job has been
-        drained. A drained job terminated by stopping pulling from its input
-        sources and processing any data that remained in-flight when draining
-        was requested. This state is a terminal state, may only be set by the
-        Cloud Dataflow service, and only as a transition from
-        `JOB_STATE_DRAINING`.
-    """
-    JOB_STATE_UNKNOWN = 0
-    JOB_STATE_STOPPED = 1
-    JOB_STATE_RUNNING = 2
-    JOB_STATE_DONE = 3
-    JOB_STATE_FAILED = 4
-    JOB_STATE_CANCELLED = 5
-    JOB_STATE_UPDATED = 6
-    JOB_STATE_DRAINING = 7
-    JOB_STATE_DRAINED = 8
-
-  class TypeValueValuesEnum(_messages.Enum):
-    """The type of Cloud Dataflow job.
-
-    Values:
-      JOB_TYPE_UNKNOWN: The type of the job is unspecified, or unknown.
-      JOB_TYPE_BATCH: A batch job with a well-defined end point: data is read,
-        data is processed, data is written, and the job is done.
-      JOB_TYPE_STREAMING: A continuously streaming job with no end: data is
-        read, processed, and written continuously.
-    """
-    JOB_TYPE_UNKNOWN = 0
-    JOB_TYPE_BATCH = 1
-    JOB_TYPE_STREAMING = 2
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class LabelsValue(_messages.Message):
-    """User-defined labels for this job.  The labels map can contain no more
-    than 64 entries.  Entries of the labels map are UTF8 strings that comply
-    with the following restrictions:  * Keys must conform to regexp:
-    \p{Ll}\p{Lo}{0,62} * Values must conform to regexp:
-    [\p{Ll}\p{Lo}\p{N}_-]{0,63} * Both keys and values are additionally
-    constrained to be <= 128 bytes in size.
-
-    Messages:
-      AdditionalProperty: An additional property for a LabelsValue object.
-
-    Fields:
-      additionalProperties: Additional properties of type LabelsValue
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a LabelsValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A string attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.StringField(2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class TransformNameMappingValue(_messages.Message):
-    """The map of transform name prefixes of the job to be replaced to the
-    corresponding name prefixes of the new job.
-
-    Messages:
-      AdditionalProperty: An additional property for a
-        TransformNameMappingValue object.
-
-    Fields:
-      additionalProperties: Additional properties of type
-        TransformNameMappingValue
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a TransformNameMappingValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A string attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.StringField(2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  clientRequestId = _messages.StringField(1)
-  createTime = _messages.StringField(2)
-  currentState = _messages.EnumField('CurrentStateValueValuesEnum', 3)
-  currentStateTime = _messages.StringField(4)
-  environment = _messages.MessageField('Environment', 5)
-  executionInfo = _messages.MessageField('JobExecutionInfo', 6)
-  id = _messages.StringField(7)
-  labels = _messages.MessageField('LabelsValue', 8)
-  location = _messages.StringField(9)
-  name = _messages.StringField(10)
-  projectId = _messages.StringField(11)
-  replaceJobId = _messages.StringField(12)
-  replacedByJobId = _messages.StringField(13)
-  requestedState = _messages.EnumField('RequestedStateValueValuesEnum', 14)
-  steps = _messages.MessageField('Step', 15, repeated=True)
-  tempFiles = _messages.StringField(16, repeated=True)
-  transformNameMapping = _messages.MessageField('TransformNameMappingValue', 17)
-  type = _messages.EnumField('TypeValueValuesEnum', 18)
-
-
-class JobExecutionInfo(_messages.Message):
-  """Additional information about how a Cloud Dataflow job will be executed
-  that isn't contained in the submitted job.
-
-  Messages:
-    StagesValue: A mapping from each stage to the information about that
-      stage.
-
-  Fields:
-    stages: A mapping from each stage to the information about that stage.
-  """
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class StagesValue(_messages.Message):
-    """A mapping from each stage to the information about that stage.
-
-    Messages:
-      AdditionalProperty: An additional property for a StagesValue object.
-
-    Fields:
-      additionalProperties: Additional properties of type StagesValue
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a StagesValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A JobExecutionStageInfo attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.MessageField('JobExecutionStageInfo', 2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  stages = _messages.MessageField('StagesValue', 1)
-
-
-class JobExecutionStageInfo(_messages.Message):
-  """Contains information about how a particular google.dataflow.v1beta3.Step
-  will be executed.
-
-  Fields:
-    stepName: The steps associated with the execution stage. Note that stages
-      may have several steps, and that a given step might be run by more than
-      one stage.
-  """
-
-  stepName = _messages.StringField(1, repeated=True)
-
-
-class JobMessage(_messages.Message):
-  """A particular message pertaining to a Dataflow job.
-
-  Enums:
-    MessageImportanceValueValuesEnum: Importance level of the message.
-
-  Fields:
-    id: Identifies the message.  This is automatically generated by the
-      service; the caller should treat it as an opaque string.
-    messageImportance: Importance level of the message.
-    messageText: The text of the message.
-    time: The timestamp of the message.
-  """
-
-  class MessageImportanceValueValuesEnum(_messages.Enum):
-    """Importance level of the message.
-
-    Values:
-      JOB_MESSAGE_IMPORTANCE_UNKNOWN: The message importance isn't specified,
-        or is unknown.
-      JOB_MESSAGE_DEBUG: The message is at the 'debug' level: typically only
-        useful for software engineers working on the code the job is running.
-        Typically, Dataflow pipeline runners do not display log messages at
-        this level by default.
-      JOB_MESSAGE_DETAILED: The message is at the 'detailed' level: somewhat
-        verbose, but potentially useful to users.  Typically, Dataflow
-        pipeline runners do not display log messages at this level by default.
-        These messages are displayed by default in the Dataflow monitoring UI.
-      JOB_MESSAGE_BASIC: The message is at the 'basic' level: useful for
-        keeping track of the execution of a Dataflow pipeline.  Typically,
-        Dataflow pipeline runners display log messages at this level by
-        default, and these messages are displayed by default in the Dataflow
-        monitoring UI.
-      JOB_MESSAGE_WARNING: The message is at the 'warning' level: indicating a
-        condition pertaining to a job which may require human intervention.
-        Typically, Dataflow pipeline runners display log messages at this
-        level by default, and these messages are displayed by default in the
-        Dataflow monitoring UI.
-      JOB_MESSAGE_ERROR: The message is at the 'error' level: indicating a
-        condition preventing a job from succeeding.  Typically, Dataflow
-        pipeline runners display log messages at this level by default, and
-        these messages are displayed by default in the Dataflow monitoring UI.
-    """
-    JOB_MESSAGE_IMPORTANCE_UNKNOWN = 0
-    JOB_MESSAGE_DEBUG = 1
-    JOB_MESSAGE_DETAILED = 2
-    JOB_MESSAGE_BASIC = 3
-    JOB_MESSAGE_WARNING = 4
-    JOB_MESSAGE_ERROR = 5
-
-  id = _messages.StringField(1)
-  messageImportance = _messages.EnumField('MessageImportanceValueValuesEnum', 2)
-  messageText = _messages.StringField(3)
-  time = _messages.StringField(4)
-
-
-class JobMetrics(_messages.Message):
-  """JobMetrics contains a collection of metrics descibing the detailed
-  progress of a Dataflow job. Metrics correspond to user-defined and system-
-  defined metrics in the job.  This resource captures only the most recent
-  values of each metric; time-series data can be queried for them (under the
-  same metric names) from Cloud Monitoring.
-
-  Fields:
-    metricTime: Timestamp as of which metric values are current.
-    metrics: All metrics for this job.
-  """
-
-  metricTime = _messages.StringField(1)
-  metrics = _messages.MessageField('MetricUpdate', 2, repeated=True)
-
-
-class KeyRangeDataDiskAssignment(_messages.Message):
-  """Data disk assignment information for a specific key-range of a sharded
-  computation. Currently we only support UTF-8 character splits to simplify
-  encoding into JSON.
-
-  Fields:
-    dataDisk: The name of the data disk where data for this range is stored.
-      This name is local to the Google Cloud Platform project and uniquely
-      identifies the disk within that project, for example
-      "myproject-1014-104817-4c2-harness-0-disk-1".
-    end: The end (exclusive) of the key range.
-    start: The start (inclusive) of the key range.
-  """
-
-  dataDisk = _messages.StringField(1)
-  end = _messages.StringField(2)
-  start = _messages.StringField(3)
-
-
-class KeyRangeLocation(_messages.Message):
-  """Location information for a specific key-range of a sharded computation.
-  Currently we only support UTF-8 character splits to simplify encoding into
-  JSON.
-
-  Fields:
-    dataDisk: The name of the data disk where data for this range is stored.
-      This name is local to the Google Cloud Platform project and uniquely
-      identifies the disk within that project, for example
-      "myproject-1014-104817-4c2-harness-0-disk-1".
-    deliveryEndpoint: The physical location of this range assignment to be
-      used for streaming computation cross-worker message delivery.
-    end: The end (exclusive) of the key range.
-    persistentDirectory: The location of the persistent state for this range,
-      as a persistent directory in the worker local filesystem.
-    start: The start (inclusive) of the key range.
-  """
-
-  dataDisk = _messages.StringField(1)
-  deliveryEndpoint = _messages.StringField(2)
-  end = _messages.StringField(3)
-  persistentDirectory = _messages.StringField(4)
-  start = _messages.StringField(5)
-
-
-class LeaseWorkItemRequest(_messages.Message):
-  """Request to lease WorkItems.
-
-  Fields:
-    currentWorkerTime: The current timestamp at the worker.
-    location: The location which contains the WorkItem's job.
-    requestedLeaseDuration: The initial lease period.
-    workItemTypes: Filter for WorkItem type.
-    workerCapabilities: Worker capabilities. WorkItems might be limited to
-      workers with specific capabilities.
-    workerId: Identifies the worker leasing work -- typically the ID of the
-      virtual machine running the worker.
-  """
-
-  currentWorkerTime = _messages.StringField(1)
-  location = _messages.StringField(2)
-  requestedLeaseDuration = _messages.StringField(3)
-  workItemTypes = _messages.StringField(4, repeated=True)
-  workerCapabilities = _messages.StringField(5, repeated=True)
-  workerId = _messages.StringField(6)
-
-
-class LeaseWorkItemResponse(_messages.Message):
-  """Response to a request to lease WorkItems.
-
-  Fields:
-    workItems: A list of the leased WorkItems.
-  """
-
-  workItems = _messages.MessageField('WorkItem', 1, repeated=True)
-
-
-class ListJobMessagesResponse(_messages.Message):
-  """Response to a request to list job messages.
-
-  Fields:
-    jobMessages: Messages in ascending timestamp order.
-    nextPageToken: The token to obtain the next page of results if there are
-      more.
-  """
-
-  jobMessages = _messages.MessageField('JobMessage', 1, repeated=True)
-  nextPageToken = _messages.StringField(2)
-
-
-class ListJobsResponse(_messages.Message):
-  """Response to a request to list Cloud Dataflow jobs.  This may be a partial
-  response, depending on the page size in the ListJobsRequest.
-
-  Fields:
-    failedLocation: Zero or more messages describing locations that failed to
-      respond.
-    jobs: A subset of the requested job information.
-    nextPageToken: Set if there may be more results than fit in this response.
-  """
-
-  failedLocation = _messages.MessageField('FailedLocation', 1, repeated=True)
-  jobs = _messages.MessageField('Job', 2, repeated=True)
-  nextPageToken = _messages.StringField(3)
-
-
-class MapTask(_messages.Message):
-  """MapTask consists of an ordered set of instructions, each of which
-  describes one particular low-level operation for the worker to perform in
-  order to accomplish the MapTask's WorkItem.  Each instruction must appear in
-  the list before any instructions which depends on its output.
-
-  Fields:
-    instructions: The instructions in the MapTask.
-    stageName: System-defined name of the stage containing this MapTask.
-      Unique across the workflow.
-    systemName: System-defined name of this MapTask. Unique across the
-      workflow.
-  """
-
-  instructions = _messages.MessageField('ParallelInstruction', 1, repeated=True)
-  stageName = _messages.StringField(2)
-  systemName = _messages.StringField(3)
-
-
-class MetricShortId(_messages.Message):
-  """The metric short id is returned to the user alongside an offset into
-  ReportWorkItemStatusRequest
-
-  Fields:
-    metricIndex: The index of the corresponding metric in the
-      ReportWorkItemStatusRequest. Required.
-    shortId: The service-generated short identifier for the metric.
-  """
-
-  metricIndex = _messages.IntegerField(1, variant=_messages.Variant.INT32)
-  shortId = _messages.IntegerField(2)
-
-
-class MetricStructuredName(_messages.Message):
-  """Identifies a metric, by describing the source which generated the metric.
-
-  Messages:
-    ContextValue: Zero or more labeled fields which identify the part of the
-      job this metric is associated with, such as the name of a step or
-      collection.  For example, built-in counters associated with steps will
-      have context['step'] = <step-name>. Counters associated with
-      PCollections in the SDK will have context['pcollection'] = <pcollection-
-      name>.
-
-  Fields:
-    context: Zero or more labeled fields which identify the part of the job
-      this metric is associated with, such as the name of a step or
-      collection.  For example, built-in counters associated with steps will
-      have context['step'] = <step-name>. Counters associated with
-      PCollections in the SDK will have context['pcollection'] = <pcollection-
-      name>.
-    name: Worker-defined metric name.
-    origin: Origin (namespace) of metric name. May be blank for user-define
-      metrics; will be "dataflow" for metrics defined by the Dataflow service
-      or SDK.
-  """
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class ContextValue(_messages.Message):
-    """Zero or more labeled fields which identify the part of the job this
-    metric is associated with, such as the name of a step or collection.  For
-    example, built-in counters associated with steps will have context['step']
-    = <step-name>. Counters associated with PCollections in the SDK will have
-    context['pcollection'] = <pcollection-name>.
-
-    Messages:
-      AdditionalProperty: An additional property for a ContextValue object.
-
-    Fields:
-      additionalProperties: Additional properties of type ContextValue
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a ContextValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A string attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.StringField(2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  context = _messages.MessageField('ContextValue', 1)
-  name = _messages.StringField(2)
-  origin = _messages.StringField(3)
-
-
-class MetricUpdate(_messages.Message):
-  """Describes the state of a metric.
-
-  Fields:
-    cumulative: True if this metric is reported as the total cumulative
-      aggregate value accumulated since the worker started working on this
-      WorkItem. By default this is false, indicating that this metric is
-      reported as a delta that is not associated with any WorkItem.
-    internal: Worker-computed aggregate value for internal use by the Dataflow
-      service.
-    kind: Metric aggregation kind.  The possible metric aggregation kinds are
-      "Sum", "Max", "Min", "Mean", "Set", "And", and "Or". The specified
-      aggregation kind is case-insensitive.  If omitted, this is not an
-      aggregated value but instead a single metric sample value.
-    meanCount: Worker-computed aggregate value for the "Mean" aggregation
-      kind. This holds the count of the aggregated values and is used in
-      combination with mean_sum above to obtain the actual mean aggregate
-      value. The only possible value type is Long.
-    meanSum: Worker-computed aggregate value for the "Mean" aggregation kind.
-      This holds the sum of the aggregated values and is used in combination
-      with mean_count below to obtain the actual mean aggregate value. The
-      only possible value types are Long and Double.
-    name: Name of the metric.
-    scalar: Worker-computed aggregate value for aggregation kinds "Sum",
-      "Max", "Min", "And", and "Or".  The possible value types are Long,
-      Double, and Boolean.
-    set: Worker-computed aggregate value for the "Set" aggregation kind.  The
-      only possible value type is a list of Values whose type can be Long,
-      Double, or String, according to the metric's type.  All Values in the
-      list must be of the same type.
-    updateTime: Timestamp associated with the metric value. Optional when
-      workers are reporting work progress; it will be filled in responses from
-      the metrics API.
-  """
-
-  cumulative = _messages.BooleanField(1)
-  internal = _messages.MessageField('extra_types.JsonValue', 2)
-  kind = _messages.StringField(3)
-  meanCount = _messages.MessageField('extra_types.JsonValue', 4)
-  meanSum = _messages.MessageField('extra_types.JsonValue', 5)
-  name = _messages.MessageField('MetricStructuredName', 6)
-  scalar = _messages.MessageField('extra_types.JsonValue', 7)
-  set = _messages.MessageField('extra_types.JsonValue', 8)
-  updateTime = _messages.StringField(9)
-
-
-class MountedDataDisk(_messages.Message):
-  """Describes mounted data disk.
-
-  Fields:
-    dataDisk: The name of the data disk. This name is local to the Google
-      Cloud Platform project and uniquely identifies the disk within that
-      project, for example "myproject-1014-104817-4c2-harness-0-disk-1".
-  """
-
-  dataDisk = _messages.StringField(1)
-
-
-class MultiOutputInfo(_messages.Message):
-  """Information about an output of a multi-output DoFn.
-
-  Fields:
-    tag: The id of the tag the user code will emit to this output by; this
-      should correspond to the tag of some SideInputInfo.
-  """
-
-  tag = _messages.StringField(1)
-
-
-class NameAndKind(_messages.Message):
-  """Basic metadata about a counter.
-
-  Enums:
-    KindValueValuesEnum: Counter aggregation kind.
-
-  Fields:
-    kind: Counter aggregation kind.
-    name: Name of the counter.
-  """
-
-  class KindValueValuesEnum(_messages.Enum):
-    """Counter aggregation kind.
-
-    Values:
-      INVALID: Counter aggregation kind was not set.
-      SUM: Aggregated value is the sum of all contributed values.
-      MAX: Aggregated value is the max of all contributed values.
-      MIN: Aggregated value is the min of all contributed values.
-      MEAN: Aggregated value is the mean of all contributed values.
-      OR: Aggregated value represents the logical 'or' of all contributed
-        values.
-      AND: Aggregated value represents the logical 'and' of all contributed
-        values.
-      SET: Aggregated value is a set of unique contributed values.
-      DISTRIBUTION: Aggregated value captures statistics about a distribution.
-    """
-    INVALID = 0
-    SUM = 1
-    MAX = 2
-    MIN = 3
-    MEAN = 4
-    OR = 5
-    AND = 6
-    SET = 7
-    DISTRIBUTION = 8
-
-  kind = _messages.EnumField('KindValueValuesEnum', 1)
-  name = _messages.StringField(2)
-
-
-class Package(_messages.Message):
-  """The packages that must be installed in order for a worker to run the
-  steps of the Cloud Dataflow job that will be assigned to its worker pool.
-  This is the mechanism by which the Cloud Dataflow SDK causes code to be
-  loaded onto the workers. For example, the Cloud Dataflow Java SDK might use
-  this to install jars containing the user's code and all of the various
-  dependencies (libraries, data files, etc.) required in order for that code
-  to run.
-
-  Fields:
-    location: The resource to read the package from. The supported resource
-      type is:  Google Cloud Storage:    storage.googleapis.com/{bucket}
-      bucket.storage.googleapis.com/
-    name: The name of the package.
-  """
-
-  location = _messages.StringField(1)
-  name = _messages.StringField(2)
-
-
-class ParDoInstruction(_messages.Message):
-  """An instruction that does a ParDo operation. Takes one main input and zero
-  or more side inputs, and produces zero or more outputs. Runs user code.
-
-  Messages:
-    UserFnValue: The user function to invoke.
-
-  Fields:
-    input: The input.
-    multiOutputInfos: Information about each of the outputs, if user_fn is a
-      MultiDoFn.
-    numOutputs: The number of outputs.
-    sideInputs: Zero or more side inputs.
-    userFn: The user function to invoke.
-  """
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class UserFnValue(_messages.Message):
-    """The user function to invoke.
-
-    Messages:
-      AdditionalProperty: An additional property for a UserFnValue object.
-
-    Fields:
-      additionalProperties: Properties of the object.
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a UserFnValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A extra_types.JsonValue attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.MessageField('extra_types.JsonValue', 2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  input = _messages.MessageField('InstructionInput', 1)
-  multiOutputInfos = _messages.MessageField('MultiOutputInfo', 2, repeated=True)
-  numOutputs = _messages.IntegerField(3, variant=_messages.Variant.INT32)
-  sideInputs = _messages.MessageField('SideInputInfo', 4, repeated=True)
-  userFn = _messages.MessageField('UserFnValue', 5)
-
-
-class ParallelInstruction(_messages.Message):
-  """Describes a particular operation comprising a MapTask.
-
-  Fields:
-    flatten: Additional information for Flatten instructions.
-    name: User-provided name of this operation.
-    originalName: System-defined name for the operation in the original
-      workflow graph.
-    outputs: Describes the outputs of the instruction.
-    parDo: Additional information for ParDo instructions.
-    partialGroupByKey: Additional information for PartialGroupByKey
-      instructions.
-    read: Additional information for Read instructions.
-    systemName: System-defined name of this operation. Unique across the
-      workflow.
-    write: Additional information for Write instructions.
-  """
-
-  flatten = _messages.MessageField('FlattenInstruction', 1)
-  name = _messages.StringField(2)
-  originalName = _messages.StringField(3)
-  outputs = _messages.MessageField('InstructionOutput', 4, repeated=True)
-  parDo = _messages.MessageField('ParDoInstruction', 5)
-  partialGroupByKey = _messages.MessageField('PartialGroupByKeyInstruction', 6)
-  read = _messages.MessageField('ReadInstruction', 7)
-  systemName = _messages.StringField(8)
-  write = _messages.MessageField('WriteInstruction', 9)
-
-
-class PartialGroupByKeyInstruction(_messages.Message):
-  """An instruction that does a partial group-by-key. One input and one
-  output.
-
-  Messages:
-    InputElementCodecValue: The codec to use for interpreting an element in
-      the input PTable.
-    ValueCombiningFnValue: The value combining function to invoke.
-
-  Fields:
-    input: Describes the input to the partial group-by-key instruction.
-    inputElementCodec: The codec to use for interpreting an element in the
-      input PTable.
-    originalCombineValuesInputStoreName: If this instruction includes a
-      combining function this is the name of the intermediate store between
-      the GBK and the CombineValues.
-    originalCombineValuesStepName: If this instruction includes a combining
-      function, this is the name of the CombineValues instruction lifted into
-      this instruction.
-    sideInputs: Zero or more side inputs.
-    valueCombiningFn: The value combining function to invoke.
-  """
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class InputElementCodecValue(_messages.Message):
-    """The codec to use for interpreting an element in the input PTable.
-
-    Messages:
-      AdditionalProperty: An additional property for a InputElementCodecValue
-        object.
-
-    Fields:
-      additionalProperties: Properties of the object.
-    """
-
-    class AdditionalProperty(_messages.Message):
-      """An additional property for a InputElementCodecValue object.
-
-      Fields:
-        key: Name of the additional property.
-        value: A extra_types.JsonValue attribute.
-      """
-
-      key = _messages.StringField(1)
-      value = _messages.MessageField('extra_types.JsonValue', 2)
-
-    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
-  @encoding.MapUnrecognizedFields('additionalProperties')
-  class ValueCombiningFnValue(_messages.Message):
-    """The value combining function to invoke.
-
-    Messages:
-      AdditionalProperty: An additional property for a ValueCombiningFnValue
-        object.
-
- 

<TRUNCATED>

Mime
View raw message