"""Generated message classes for bigquerymigration version v2.

The migration service, exposing apis for migration jobs operations, and agent
management.
"""
# NOTE: This file is autogenerated and should not be edited by hand.

from __future__ import absolute_import

from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types


package = 'bigquerymigration'


class BigquerymigrationProjectsLocationsSubtaskTypesFinishSubtaskRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsSubtaskTypesFinishSubtaskRequest
  object.

  Fields:
    googleCloudBigqueryMigrationV2FinishSubtaskRequest: A
      GoogleCloudBigqueryMigrationV2FinishSubtaskRequest resource to be passed
      as the request body.
    subtaskType: Required. The type of the subtask to update. The name of the
      subtask type to which the task belongs. The project number is owned by
      the worker, not the customer. Example:
      `projects/123/locations/us/subtaskTypes/st1`
  """

  googleCloudBigqueryMigrationV2FinishSubtaskRequest = _messages.MessageField('GoogleCloudBigqueryMigrationV2FinishSubtaskRequest', 1)
  subtaskType = _messages.StringField(2, required=True)


class BigquerymigrationProjectsLocationsSubtaskTypesReceiveMigrationSubtaskRequest(_messages.Message):
  r"""A
  BigquerymigrationProjectsLocationsSubtaskTypesReceiveMigrationSubtaskRequest
  object.

  Fields:
    googleCloudBigqueryMigrationV2ReceiveMigrationSubtaskRequest: A
      GoogleCloudBigqueryMigrationV2ReceiveMigrationSubtaskRequest resource to
      be passed as the request body.
    projectLocation: Required. The project location for which to assign a
      subtask. The project number is owned by the worker, not the customer.
      Example: `projects/123/locations/us`
  """

  googleCloudBigqueryMigrationV2ReceiveMigrationSubtaskRequest = _messages.MessageField('GoogleCloudBigqueryMigrationV2ReceiveMigrationSubtaskRequest', 1)
  projectLocation = _messages.StringField(2, required=True)


class BigquerymigrationProjectsLocationsSubtaskTypesRenewSubtaskLeaseRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsSubtaskTypesRenewSubtaskLeaseRequest
  object.

  Fields:
    googleCloudBigqueryMigrationV2RenewSubtaskLeaseRequest: A
      GoogleCloudBigqueryMigrationV2RenewSubtaskLeaseRequest resource to be
      passed as the request body.
    subtaskType: Required. The unique identifier for the migration subtask
      type. The project number is owned by the worker, not the customer.
      Example: `projects/123/locations/us/subtaskTypes/st1`
  """

  googleCloudBigqueryMigrationV2RenewSubtaskLeaseRequest = _messages.MessageField('GoogleCloudBigqueryMigrationV2RenewSubtaskLeaseRequest', 1)
  subtaskType = _messages.StringField(2, required=True)


class BigquerymigrationProjectsLocationsTaskTypesFinishMigrationTaskOrchestrationRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsTaskTypesFinishMigrationTaskOrchestr
  ationRequest object.

  Fields:
    googleCloudBigqueryMigrationV2FinishMigrationTaskOrchestrationRequest: A
      GoogleCloudBigqueryMigrationV2FinishMigrationTaskOrchestrationRequest
      resource to be passed as the request body.
    taskType: Required. The type of the task to update. The name of the task
      type to which the task belongs. The project number is owned by the
      orchestrator, not the customer. Example:
      `projects/123/locations/us/taskTypes/t1`
  """

  googleCloudBigqueryMigrationV2FinishMigrationTaskOrchestrationRequest = _messages.MessageField('GoogleCloudBigqueryMigrationV2FinishMigrationTaskOrchestrationRequest', 1)
  taskType = _messages.StringField(2, required=True)


class BigquerymigrationProjectsLocationsTaskTypesReceiveMigrationTaskRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsTaskTypesReceiveMigrationTaskRequest
  object.

  Fields:
    googleCloudBigqueryMigrationV2ReceiveMigrationTaskRequest: A
      GoogleCloudBigqueryMigrationV2ReceiveMigrationTaskRequest resource to be
      passed as the request body.
    projectLocation: Required. The project location of the task type for which
      to assign a task. The project number is owned by the orchestrator, not
      the customer. Example: `projects/123/locations/us`
  """

  googleCloudBigqueryMigrationV2ReceiveMigrationTaskRequest = _messages.MessageField('GoogleCloudBigqueryMigrationV2ReceiveMigrationTaskRequest', 1)
  projectLocation = _messages.StringField(2, required=True)


class BigquerymigrationProjectsLocationsTaskTypesRenewTaskLeaseRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsTaskTypesRenewTaskLeaseRequest
  object.

  Fields:
    googleCloudBigqueryMigrationV2RenewTaskLeaseRequest: A
      GoogleCloudBigqueryMigrationV2RenewTaskLeaseRequest resource to be
      passed as the request body.
    taskType: Required. The unique identifier for the migration task type. The
      project number is owned by the worker, not the customer. Example:
      `projects/123/locations/us/taskTypes/st1`
  """

  googleCloudBigqueryMigrationV2RenewTaskLeaseRequest = _messages.MessageField('GoogleCloudBigqueryMigrationV2RenewTaskLeaseRequest', 1)
  taskType = _messages.StringField(2, required=True)


class BigquerymigrationProjectsLocationsTranslateQueryRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsTranslateQueryRequest object.

  Fields:
    googleCloudBigqueryMigrationV2TranslateQueryRequest: A
      GoogleCloudBigqueryMigrationV2TranslateQueryRequest resource to be
      passed as the request body.
    parent: Required. The name of the project to which this translation
      request belongs. Example: `projects/foo/locations/bar`
  """

  googleCloudBigqueryMigrationV2TranslateQueryRequest = _messages.MessageField('GoogleCloudBigqueryMigrationV2TranslateQueryRequest', 1)
  parent = _messages.StringField(2, required=True)


class BigquerymigrationProjectsLocationsWorkflowsCreateRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsWorkflowsCreateRequest object.

  Fields:
    googleCloudBigqueryMigrationV2MigrationWorkflow: A
      GoogleCloudBigqueryMigrationV2MigrationWorkflow resource to be passed as
      the request body.
    parent: Required. The name of the project to which this migration workflow
      belongs. Example: `projects/foo/locations/bar`
  """

  googleCloudBigqueryMigrationV2MigrationWorkflow = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationWorkflow', 1)
  parent = _messages.StringField(2, required=True)


class BigquerymigrationProjectsLocationsWorkflowsDeleteRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsWorkflowsDeleteRequest object.

  Fields:
    name: Required. The unique identifier for the migration workflow. Example:
      `projects/123/locations/us/workflows/1234`
  """

  name = _messages.StringField(1, required=True)


class BigquerymigrationProjectsLocationsWorkflowsGetRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsWorkflowsGetRequest object.

  Fields:
    name: Required. The unique identifier for the migration workflow. Example:
      `projects/123/locations/us/workflows/1234`
    readMask: The list of fields to be retrieved.
  """

  name = _messages.StringField(1, required=True)
  readMask = _messages.StringField(2)


class BigquerymigrationProjectsLocationsWorkflowsListRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsWorkflowsListRequest object.

  Fields:
    pageSize: The maximum number of migration workflows to return. The service
      may return fewer than this number.
    pageToken: A page token, received from previous `ListMigrationWorkflows`
      call. Provide this to retrieve the subsequent page. When paginating, all
      other parameters provided to `ListMigrationWorkflows` must match the
      call that provided the page token.
    parent: Required. The project and location of the migration workflows to
      list. Example: `projects/123/locations/us`
    readMask: The list of fields to be retrieved.
  """

  pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  pageToken = _messages.StringField(2)
  parent = _messages.StringField(3, required=True)
  readMask = _messages.StringField(4)


class BigquerymigrationProjectsLocationsWorkflowsStartRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsWorkflowsStartRequest object.

  Fields:
    googleCloudBigqueryMigrationV2StartMigrationWorkflowRequest: A
      GoogleCloudBigqueryMigrationV2StartMigrationWorkflowRequest resource to
      be passed as the request body.
    name: Required. The unique identifier for the migration workflow. Example:
      `projects/123/locations/us/workflows/1234`
  """

  googleCloudBigqueryMigrationV2StartMigrationWorkflowRequest = _messages.MessageField('GoogleCloudBigqueryMigrationV2StartMigrationWorkflowRequest', 1)
  name = _messages.StringField(2, required=True)


class BigquerymigrationProjectsLocationsWorkflowsSubtasksGetRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsWorkflowsSubtasksGetRequest object.

  Fields:
    name: Required. The unique identifier for the migration subtask. Example:
      `projects/123/locations/us/workflows/1234/subtasks/543`
    readMask: Optional. The list of fields to be retrieved.
  """

  name = _messages.StringField(1, required=True)
  readMask = _messages.StringField(2)


class BigquerymigrationProjectsLocationsWorkflowsSubtasksListRequest(_messages.Message):
  r"""A BigquerymigrationProjectsLocationsWorkflowsSubtasksListRequest object.

  Fields:
    filter: Optional. The filter to apply. This can be used to get the
      subtasks of a specific tasks in a workflow, e.g. `migration_task =
      "ab012"` where `"ab012"` is the task ID (not the name in the named map).
    pageSize: Optional. The maximum number of migration tasks to return. The
      service may return fewer than this number.
    pageToken: Optional. A page token, received from previous
      `ListMigrationSubtasks` call. Provide this to retrieve the subsequent
      page. When paginating, all other parameters provided to
      `ListMigrationSubtasks` must match the call that provided the page
      token.
    parent: Required. The migration task of the subtasks to list. Example:
      `projects/123/locations/us/workflows/1234`
    readMask: Optional. The list of fields to be retrieved.
  """

  filter = _messages.StringField(1)
  pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
  pageToken = _messages.StringField(3)
  parent = _messages.StringField(4, required=True)
  readMask = _messages.StringField(5)


class GoogleApiDistribution(_messages.Message):
  r"""`Distribution` contains summary statistics for a population of values.
  It optionally contains a histogram representing the distribution of those
  values across a set of buckets. The summary statistics are the count, mean,
  sum of the squared deviation from the mean, the minimum, and the maximum of
  the set of population of values. The histogram is based on a sequence of
  buckets and gives a count of values that fall into each bucket. The
  boundaries of the buckets are given either explicitly or by formulas for
  buckets of fixed or exponentially increasing widths. Although it is not
  forbidden, it is generally a bad idea to include non-finite values
  (infinities or NaNs) in the population of values, as this will render the
  `mean` and `sum_of_squared_deviation` fields meaningless.

  Fields:
    bucketCounts: The number of values in each bucket of the histogram, as
      described in `bucket_options`. If the distribution does not have a
      histogram, then omit this field. If there is a histogram, then the sum
      of the values in `bucket_counts` must equal the value in the `count`
      field of the distribution. If present, `bucket_counts` should contain N
      values, where N is the number of buckets specified in `bucket_options`.
      If you supply fewer than N values, the remaining values are assumed to
      be 0. The order of the values in `bucket_counts` follows the bucket
      numbering schemes described for the three bucket types. The first value
      must be the count for the underflow bucket (number 0). The next N-2
      values are the counts for the finite buckets (number 1 through N-2). The
      N'th value in `bucket_counts` is the count for the overflow bucket
      (number N-1).
    bucketOptions: Defines the histogram bucket boundaries. If the
      distribution does not contain a histogram, then omit this field.
    count: The number of values in the population. Must be non-negative. This
      value must equal the sum of the values in `bucket_counts` if a histogram
      is provided.
    exemplars: Must be in increasing order of `value` field.
    mean: The arithmetic mean of the values in the population. If `count` is
      zero then this field must be zero.
    range: If specified, contains the range of the population values. The
      field must not be present if the `count` is zero.
    sumOfSquaredDeviation: The sum of squared deviations from the mean of the
      values in the population. For values x_i this is: Sum[i=1..n]((x_i -
      mean)^2) Knuth, "The Art of Computer Programming", Vol. 2, page 232, 3rd
      edition describes Welford's method for accumulating this sum in one
      pass. If `count` is zero then this field must be zero.
  """

  bucketCounts = _messages.IntegerField(1, repeated=True)
  bucketOptions = _messages.MessageField('GoogleApiDistributionBucketOptions', 2)
  count = _messages.IntegerField(3)
  exemplars = _messages.MessageField('GoogleApiDistributionExemplar', 4, repeated=True)
  mean = _messages.FloatField(5)
  range = _messages.MessageField('GoogleApiDistributionRange', 6)
  sumOfSquaredDeviation = _messages.FloatField(7)


class GoogleApiDistributionBucketOptions(_messages.Message):
  r"""`BucketOptions` describes the bucket boundaries used to create a
  histogram for the distribution. The buckets can be in a linear sequence, an
  exponential sequence, or each bucket can be specified explicitly.
  `BucketOptions` does not include the number of values in each bucket. A
  bucket has an inclusive lower bound and exclusive upper bound for the values
  that are counted for that bucket. The upper bound of a bucket must be
  strictly greater than the lower bound. The sequence of N buckets for a
  distribution consists of an underflow bucket (number 0), zero or more finite
  buckets (number 1 through N - 2) and an overflow bucket (number N - 1). The
  buckets are contiguous: the lower bound of bucket i (i > 0) is the same as
  the upper bound of bucket i - 1. The buckets span the whole range of finite
  values: lower bound of the underflow bucket is -infinity and the upper bound
  of the overflow bucket is +infinity. The finite buckets are so-called
  because both bounds are finite.

  Fields:
    explicitBuckets: The explicit buckets.
    exponentialBuckets: The exponential buckets.
    linearBuckets: The linear bucket.
  """

  explicitBuckets = _messages.MessageField('GoogleApiDistributionBucketOptionsExplicit', 1)
  exponentialBuckets = _messages.MessageField('GoogleApiDistributionBucketOptionsExponential', 2)
  linearBuckets = _messages.MessageField('GoogleApiDistributionBucketOptionsLinear', 3)


class GoogleApiDistributionBucketOptionsExplicit(_messages.Message):
  r"""Specifies a set of buckets with arbitrary widths. There are
  `size(bounds) + 1` (= N) buckets. Bucket `i` has the following boundaries:
  Upper bound (0 <= i < N-1): bounds[i] Lower bound (1 <= i < N); bounds[i -
  1] The `bounds` field must contain at least one element. If `bounds` has
  only one element, then there are no finite buckets, and that single element
  is the common boundary of the overflow and underflow buckets.

  Fields:
    bounds: The values must be monotonically increasing.
  """

  bounds = _messages.FloatField(1, repeated=True)


class GoogleApiDistributionBucketOptionsExponential(_messages.Message):
  r"""Specifies an exponential sequence of buckets that have a width that is
  proportional to the value of the lower bound. Each bucket represents a
  constant relative uncertainty on a specific value in the bucket. There are
  `num_finite_buckets + 2` (= N) buckets. Bucket `i` has the following
  boundaries: Upper bound (0 <= i < N-1): scale * (growth_factor ^ i). Lower
  bound (1 <= i < N): scale * (growth_factor ^ (i - 1)).

  Fields:
    growthFactor: Must be greater than 1.
    numFiniteBuckets: Must be greater than 0.
    scale: Must be greater than 0.
  """

  growthFactor = _messages.FloatField(1)
  numFiniteBuckets = _messages.IntegerField(2, variant=_messages.Variant.INT32)
  scale = _messages.FloatField(3)


class GoogleApiDistributionBucketOptionsLinear(_messages.Message):
  r"""Specifies a linear sequence of buckets that all have the same width
  (except overflow and underflow). Each bucket represents a constant absolute
  uncertainty on the specific value in the bucket. There are
  `num_finite_buckets + 2` (= N) buckets. Bucket `i` has the following
  boundaries: Upper bound (0 <= i < N-1): offset + (width * i). Lower bound (1
  <= i < N): offset + (width * (i - 1)).

  Fields:
    numFiniteBuckets: Must be greater than 0.
    offset: Lower bound of the first bucket.
    width: Must be greater than 0.
  """

  numFiniteBuckets = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  offset = _messages.FloatField(2)
  width = _messages.FloatField(3)


class GoogleApiDistributionExemplar(_messages.Message):
  r"""Exemplars are example points that may be used to annotate aggregated
  distribution values. They are metadata that gives information about a
  particular value added to a Distribution bucket, such as a trace ID that was
  active when a value was added. They may contain further information, such as
  a example values and timestamps, origin, etc.

  Messages:
    AttachmentsValueListEntry: A AttachmentsValueListEntry object.

  Fields:
    attachments: Contextual information about the example value. Examples are:
      Trace: type.googleapis.com/google.monitoring.v3.SpanContext Literal
      string: type.googleapis.com/google.protobuf.StringValue Labels dropped
      during aggregation:
      type.googleapis.com/google.monitoring.v3.DroppedLabels There may be only
      a single attachment of any given message type in a single exemplar, and
      this is enforced by the system.
    timestamp: The observation (sampling) time of the above value.
    value: Value of the exemplar point. This value determines to which bucket
      the exemplar belongs.
  """

  @encoding.MapUnrecognizedFields('additionalProperties')
  class AttachmentsValueListEntry(_messages.Message):
    r"""A AttachmentsValueListEntry object.

    Messages:
      AdditionalProperty: An additional property for a
        AttachmentsValueListEntry object.

    Fields:
      additionalProperties: Properties of the object. Contains field @type
        with type URL.
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a AttachmentsValueListEntry object.

      Fields:
        key: Name of the additional property.
        value: A extra_types.JsonValue attribute.
      """

      key = _messages.StringField(1)
      value = _messages.MessageField('extra_types.JsonValue', 2)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  attachments = _messages.MessageField('AttachmentsValueListEntry', 1, repeated=True)
  timestamp = _messages.StringField(2)
  value = _messages.FloatField(3)


class GoogleApiDistributionRange(_messages.Message):
  r"""The range of the population values.

  Fields:
    max: The maximum of the population values.
    min: The minimum of the population values.
  """

  max = _messages.FloatField(1)
  min = _messages.FloatField(2)


class GoogleCloudBigqueryMigrationTasksAssessmentV2alphaAssessmentTaskDetails(_messages.Message):
  r"""DEPRECATED! Use the AssessmentTaskDetails defined in
  com.google.cloud.bigquery.migration.v2alpha.AssessmentTaskDetails instead.
  Assessment task details.

  Fields:
    dataSource: Required. The data source or data warehouse type (eg:
      TERADATA/REDSHIFT) from which the input data is extracted.
    inputPath: Required. The Cloud Storage path for assessment input files.
    outputDataset: Required. The BigQuery dataset for output.
    querylogsPath: Optional. An optional Cloud Storage path to write the query
      logs (which is then used as an input path on the translation task)
  """

  dataSource = _messages.StringField(1)
  inputPath = _messages.StringField(2)
  outputDataset = _messages.StringField(3)
  querylogsPath = _messages.StringField(4)


class GoogleCloudBigqueryMigrationTasksTranslationV2alphaBteqOptions(_messages.Message):
  r"""BTEQ translation task related settings.

  Messages:
    FileReplacementMapValue: Maps the local paths that are used in BTEQ
      scripts (the keys) to the paths in Cloud Storage that should be used in
      their stead in the translation (the value).

  Fields:
    defaultPathUri: The Cloud Storage location to be used as the default path
      for files that are not otherwise specified in the file replacement map.
    fileReplacementMap: Maps the local paths that are used in BTEQ scripts
      (the keys) to the paths in Cloud Storage that should be used in their
      stead in the translation (the value).
    projectDataset: Specifies the project and dataset in BigQuery that will be
      used for external table creation during the translation.
  """

  @encoding.MapUnrecognizedFields('additionalProperties')
  class FileReplacementMapValue(_messages.Message):
    r"""Maps the local paths that are used in BTEQ scripts (the keys) to the
    paths in Cloud Storage that should be used in their stead in the
    translation (the value).

    Messages:
      AdditionalProperty: An additional property for a FileReplacementMapValue
        object.

    Fields:
      additionalProperties: Additional properties of type
        FileReplacementMapValue
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a FileReplacementMapValue object.

      Fields:
        key: Name of the additional property.
        value: A string attribute.
      """

      key = _messages.StringField(1)
      value = _messages.StringField(2)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  defaultPathUri = _messages.StringField(1)
  fileReplacementMap = _messages.MessageField('FileReplacementMapValue', 2)
  projectDataset = _messages.MessageField('GoogleCloudBigqueryMigrationTasksTranslationV2alphaDatasetReference', 3)


class GoogleCloudBigqueryMigrationTasksTranslationV2alphaDatasetReference(_messages.Message):
  r"""A GoogleCloudBigqueryMigrationTasksTranslationV2alphaDatasetReference
  object.

  Fields:
    datasetId: Required. A unique ID for this dataset, without the project
      name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or
      underscores (_). The maximum length is 1,024 characters.
    projectId: Optional. The ID of the project containing this dataset.
  """

  datasetId = _messages.StringField(1)
  projectId = _messages.StringField(2)


class GoogleCloudBigqueryMigrationTasksTranslationV2alphaFilter(_messages.Message):
  r"""The filter applied to fields of translation details.

  Fields:
    inputFileExclusionPrefixes: The list of prefixes used to exclude
      processing for input files.
  """

  inputFileExclusionPrefixes = _messages.StringField(1, repeated=True)


class GoogleCloudBigqueryMigrationTasksTranslationV2alphaIdentifierSettings(_messages.Message):
  r"""Settings related to SQL identifiers.

  Enums:
    IdentifierRewriteModeValueValuesEnum: Specifies the rewrite mode for SQL
      identifiers.
    OutputIdentifierCaseValueValuesEnum: The setting to control output
      queries' identifier case.

  Fields:
    identifierRewriteMode: Specifies the rewrite mode for SQL identifiers.
    outputIdentifierCase: The setting to control output queries' identifier
      case.
  """

  class IdentifierRewriteModeValueValuesEnum(_messages.Enum):
    r"""Specifies the rewrite mode for SQL identifiers.

    Values:
      IDENTIFIER_REWRITE_MODE_UNSPECIFIED: SQL Identifier rewrite mode is
        unspecified.
      NONE: SQL identifiers won't be rewrite.
      REWRITE_ALL: All SQL identifiers will be rewrite.
    """
    IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0
    NONE = 1
    REWRITE_ALL = 2

  class OutputIdentifierCaseValueValuesEnum(_messages.Enum):
    r"""The setting to control output queries' identifier case.

    Values:
      IDENTIFIER_CASE_UNSPECIFIED: The identifier case is not specified.
      ORIGINAL: Identifiers' cases will be kept as the original cases.
      UPPER: Identifiers will be in upper cases.
      LOWER: Identifiers will be in lower cases.
    """
    IDENTIFIER_CASE_UNSPECIFIED = 0
    ORIGINAL = 1
    UPPER = 2
    LOWER = 3

  identifierRewriteMode = _messages.EnumField('IdentifierRewriteModeValueValuesEnum', 1)
  outputIdentifierCase = _messages.EnumField('OutputIdentifierCaseValueValuesEnum', 2)


class GoogleCloudBigqueryMigrationTasksTranslationV2alphaTeradataOptions(_messages.Message):
  r"""Teradata SQL specific translation task related settings."""


class GoogleCloudBigqueryMigrationTasksTranslationV2alphaTranslationFileMapping(_messages.Message):
  r"""Mapping between an input and output file to be translated in a subtask.

  Fields:
    inputPath: The Cloud Storage path for a file to translation in a subtask.
    outputPath: The Cloud Storage path to write back the corresponding input
      file to.
  """

  inputPath = _messages.StringField(1)
  outputPath = _messages.StringField(2)


class GoogleCloudBigqueryMigrationTasksTranslationV2alphaTranslationTaskDetails(_messages.Message):
  r"""DEPRECATED! Use TranslationTaskDetails defined in
  com.google.cloud.bigquery.migration.v2alpha.TranslationTaskDetails instead.
  The translation task details to capture necessary settings for a translation
  task and subtask.

  Enums:
    FileEncodingValueValuesEnum: The file encoding type.

  Messages:
    SpecialTokenMapValue: The map capturing special tokens to be replaced
      during translation. The key is special token in string. The value is the
      token data type. This is used to translate SQL query template which
      contains special token as place holder. The special token makes a query
      invalid to parse. This map will be applied to annotate those special
      token with types to let parser understand how to parse them into proper
      structure with type information.

  Fields:
    bteqOptions: The BTEQ specific settings for the translation task.
    fileEncoding: The file encoding type.
    filePaths: Cloud Storage files to be processed for translation.
    filter: The filter applied to translation details.
    identifierSettings: The settings for SQL identifiers.
    inputPath: The Cloud Storage path for translation input files.
    outputPath: The Cloud Storage path for translation output files.
    schemaPath: The Cloud Storage path to DDL files as table schema to assist
      semantic translation.
    specialTokenMap: The map capturing special tokens to be replaced during
      translation. The key is special token in string. The value is the token
      data type. This is used to translate SQL query template which contains
      special token as place holder. The special token makes a query invalid
      to parse. This map will be applied to annotate those special token with
      types to let parser understand how to parse them into proper structure
      with type information.
    teradataOptions: The Teradata SQL specific settings for the translation
      task.
    translationExceptionTable: Specifies the exact name of the bigquery table
      ("dataset.table") to be used for surfacing raw translation errors. If
      the table does not exist, we will create it. If it already exists and
      the schema is the same, we will re-use. If the table exists and the
      schema is different, we will throw an error.
  """

  class FileEncodingValueValuesEnum(_messages.Enum):
    r"""The file encoding type.

    Values:
      FILE_ENCODING_UNSPECIFIED: File encoding setting is not specified.
      UTF_8: File encoding is UTF_8.
      ISO_8859_1: File encoding is ISO_8859_1.
      US_ASCII: File encoding is US_ASCII.
      UTF_16: File encoding is UTF_16.
      UTF_16LE: File encoding is UTF_16LE.
      UTF_16BE: File encoding is UTF_16BE.
    """
    FILE_ENCODING_UNSPECIFIED = 0
    UTF_8 = 1
    ISO_8859_1 = 2
    US_ASCII = 3
    UTF_16 = 4
    UTF_16LE = 5
    UTF_16BE = 6

  @encoding.MapUnrecognizedFields('additionalProperties')
  class SpecialTokenMapValue(_messages.Message):
    r"""The map capturing special tokens to be replaced during translation.
    The key is special token in string. The value is the token data type. This
    is used to translate SQL query template which contains special token as
    place holder. The special token makes a query invalid to parse. This map
    will be applied to annotate those special token with types to let parser
    understand how to parse them into proper structure with type information.

    Messages:
      AdditionalProperty: An additional property for a SpecialTokenMapValue
        object.

    Fields:
      additionalProperties: Additional properties of type SpecialTokenMapValue
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a SpecialTokenMapValue object.

      Enums:
        ValueValueValuesEnum:

      Fields:
        key: Name of the additional property.
        value: A ValueValueValuesEnum attribute.
      """

      class ValueValueValuesEnum(_messages.Enum):
        r"""ValueValueValuesEnum enum type.

        Values:
          TOKEN_TYPE_UNSPECIFIED: Token type is not specified.
          STRING: Token type as string.
          INT64: Token type as integer.
          NUMERIC: Token type as numeric.
          BOOL: Token type as boolean.
          FLOAT64: Token type as float.
          DATE: Token type as date.
          TIMESTAMP: Token type as timestamp.
        """
        TOKEN_TYPE_UNSPECIFIED = 0
        STRING = 1
        INT64 = 2
        NUMERIC = 3
        BOOL = 4
        FLOAT64 = 5
        DATE = 6
        TIMESTAMP = 7

      key = _messages.StringField(1)
      value = _messages.EnumField('ValueValueValuesEnum', 2)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  bteqOptions = _messages.MessageField('GoogleCloudBigqueryMigrationTasksTranslationV2alphaBteqOptions', 1)
  fileEncoding = _messages.EnumField('FileEncodingValueValuesEnum', 2)
  filePaths = _messages.MessageField('GoogleCloudBigqueryMigrationTasksTranslationV2alphaTranslationFileMapping', 3, repeated=True)
  filter = _messages.MessageField('GoogleCloudBigqueryMigrationTasksTranslationV2alphaFilter', 4)
  identifierSettings = _messages.MessageField('GoogleCloudBigqueryMigrationTasksTranslationV2alphaIdentifierSettings', 5)
  inputPath = _messages.StringField(6)
  outputPath = _messages.StringField(7)
  schemaPath = _messages.StringField(8)
  specialTokenMap = _messages.MessageField('SpecialTokenMapValue', 9)
  teradataOptions = _messages.MessageField('GoogleCloudBigqueryMigrationTasksTranslationV2alphaTeradataOptions', 10)
  translationExceptionTable = _messages.StringField(11)


class GoogleCloudBigqueryMigrationV2AssessmentFeatureHandle(_messages.Message):
  r"""User-definable feature flags for assessment tasks.

  Fields:
    addShareableDataset: Optional. Whether to create a dataset containing non-
      PII data in addition to the output dataset.
  """

  addShareableDataset = _messages.BooleanField(1)


class GoogleCloudBigqueryMigrationV2AssessmentTaskDetails(_messages.Message):
  r"""Assessment task config.

  Fields:
    dataSource: Required. The data source or data warehouse type (eg:
      TERADATA/REDSHIFT) from which the input data is extracted.
    featureHandle: Optional. A collection of additional feature flags for this
      assessment.
    inputPath: Required. The Cloud Storage path for assessment input files.
    outputDataset: Required. The BigQuery dataset for output.
    querylogsPath: Optional. An optional Cloud Storage path to write the query
      logs (which is then used as an input path on the translation task)
  """

  dataSource = _messages.StringField(1)
  featureHandle = _messages.MessageField('GoogleCloudBigqueryMigrationV2AssessmentFeatureHandle', 2)
  inputPath = _messages.StringField(3)
  outputDataset = _messages.StringField(4)
  querylogsPath = _messages.StringField(5)


class GoogleCloudBigqueryMigrationV2AssignmentInfo(_messages.Message):
  r"""Information about assignments.

  Fields:
    executionCount: A counter tracking how often an item was assigned for
      execution.
    firstAssignedTime: Time when of the task / subtask was first assigned.
      Empty if not assigned yet.
    lastAssignedTime: Time when of the task / subtask was first assigned.
      Empty if not assigned yet.
  """

  executionCount = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  firstAssignedTime = _messages.StringField(2)
  lastAssignedTime = _messages.StringField(3)


class GoogleCloudBigqueryMigrationV2AzureSynapseDialect(_messages.Message):
  r"""The dialect definition for Azure Synapse."""


class GoogleCloudBigqueryMigrationV2BigQueryDialect(_messages.Message):
  r"""The dialect definition for BigQuery."""


class GoogleCloudBigqueryMigrationV2DB2Dialect(_messages.Message):
  r"""The dialect definition for DB2."""


class GoogleCloudBigqueryMigrationV2Dialect(_messages.Message):
  r"""The possible dialect options for translation.

  Fields:
    azureSynapseDialect: The Azure Synapse dialect
    bigqueryDialect: The BigQuery dialect
    db2Dialect: DB2 dialect
    greenplumDialect: Greenplum dialect
    hiveqlDialect: The HiveQL dialect
    impalaDialect: Impala dialect
    mysqlDialect: The MySQL dialect
    netezzaDialect: The Netezza dialect
    oracleDialect: The Oracle dialect
    postgresqlDialect: The Postgresql dialect
    prestoDialect: The Presto dialect
    redshiftDialect: The Redshift dialect
    snowflakeDialect: The Snowflake dialect
    sparksqlDialect: The SparkSQL dialect
    sqlServerDialect: The SQL Server dialect
    sqliteDialect: SQLite dialect
    teradataDialect: The Teradata dialect
    verticaDialect: The Vertica dialect
  """

  azureSynapseDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2AzureSynapseDialect', 1)
  bigqueryDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2BigQueryDialect', 2)
  db2Dialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2DB2Dialect', 3)
  greenplumDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2GreenplumDialect', 4)
  hiveqlDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2HiveQLDialect', 5)
  impalaDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2ImpalaDialect', 6)
  mysqlDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2MySQLDialect', 7)
  netezzaDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2NetezzaDialect', 8)
  oracleDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2OracleDialect', 9)
  postgresqlDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2PostgresqlDialect', 10)
  prestoDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2PrestoDialect', 11)
  redshiftDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2RedshiftDialect', 12)
  snowflakeDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2SnowflakeDialect', 13)
  sparksqlDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2SparkSQLDialect', 14)
  sqlServerDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2SQLServerDialect', 15)
  sqliteDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2SQLiteDialect', 16)
  teradataDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2TeradataDialect', 17)
  verticaDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2VerticaDialect', 18)


class GoogleCloudBigqueryMigrationV2ErrorDetail(_messages.Message):
  r"""Provides details for errors, e.g. issues that where encountered when
  processing a subtask.

  Fields:
    errorInfo: Required. Describes the cause of the error with structured
      detail.
    location: Optional. The exact location within the resource (if
      applicable).
  """

  errorInfo = _messages.MessageField('GoogleRpcErrorInfo', 1)
  location = _messages.MessageField('GoogleCloudBigqueryMigrationV2ErrorLocation', 2)


class GoogleCloudBigqueryMigrationV2ErrorLocation(_messages.Message):
  r"""Holds information about where the error is located.

  Fields:
    column: Optional. If applicable, denotes the column where the error
      occurred. A zero value means that there is no columns information.
    line: Optional. If applicable, denotes the line where the error occurred.
      A zero value means that there is no line information.
  """

  column = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  line = _messages.IntegerField(2, variant=_messages.Variant.INT32)


class GoogleCloudBigqueryMigrationV2FinishMigrationTaskOrchestrationRequest(_messages.Message):
  r"""A request to transition from ORCHESTRATING to RUNNING via task type.

  Enums:
    StateValueValuesEnum: Required. The terminal state of the subtask.

  Fields:
    metrics: Optional. Metrics to be set for the orchestrated task.
    migrationSubtasks: Optional. A list of subtasks that the service shall
      create before transitioning the task to RUNNING. The operation will fail
      if creating any of the subtasks fails.
    migrationWorkflow: Required. The workflow to update. The name of the
      workflow to which the task belongs.
    orchestratorId: Required. The ID of the orchestrator that processed the
      task. This must match the `orchestrator_id` that currently holds the
      lease for the `MigrationTask` (acquired via `ReceiveMigrationTask`).
    processingError: Optional. A processing error can be provided when the
      finish state is FAILED to let the user know what went wrong.
    result: Optional. Additional result information from the orchestrator.
    state: Required. The terminal state of the subtask.
    taskId: Required. The task ID. Note: This is not the key in the
      workflows's `tasks` field, but the unique ID of the task as defined in
      the tasks's `id` field.
  """

  class StateValueValuesEnum(_messages.Enum):
    r"""Required. The terminal state of the subtask.

    Values:
      STATE_UNSPECIFIED: State is unspecified. Clients should not use this.
      SUCCEEDED: The orchestration finished successfully.
      FAILED: The orchestration finished unsuccessfully.
    """
    STATE_UNSPECIFIED = 0
    SUCCEEDED = 1
    FAILED = 2

  metrics = _messages.MessageField('GoogleCloudBigqueryMigrationV2TimeSeries', 1, repeated=True)
  migrationSubtasks = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationSubtask', 2, repeated=True)
  migrationWorkflow = _messages.StringField(3)
  orchestratorId = _messages.StringField(4)
  processingError = _messages.MessageField('GoogleRpcErrorInfo', 5)
  result = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationTaskResult', 6)
  state = _messages.EnumField('StateValueValuesEnum', 7)
  taskId = _messages.StringField(8)


class GoogleCloudBigqueryMigrationV2FinishSubtaskRequest(_messages.Message):
  r"""A request to update a migration subtask by subtask type.

  Enums:
    StateValueValuesEnum: Required. The terminal state of the subtask.

  Fields:
    metrics: Optional. Metrics to be set for the finished subtask.
    migrationSubtask: Required. The unique identifier for the migration
      subtask. Example:
      `projects/123/locations/us/workflows/1234/subtasks/543`
    orchestrationResult: Optional. Additional orchestration result information
      from the subtask.
    processingError: Optional. A processing error can be provided when the
      finish state is FAILED to let the user know what went wrong.
    resourceErrorCount: Optional. The number or resources with errors. Note:
      This is not the total number of errors as each resource can have more
      than one error. This can be used to indicate truncation by having a
      `resource_error_count` that is higher than the size of
      `resource_error_details`.
    resourceErrorDetails: Optional. Provides details to errors and issues
      encountered while processing the subtask. Presence of error details does
      not mean that the subtask failed. To limit size, there must not be more
      than 100 ErrorDetail objects in total.
    state: Required. The terminal state of the subtask.
    workerId: Required. The ID of the worker that processed the subtask. This
      must match the `worker_id` that currently holds the lease for the
      `MigrationSubtask` (acquired via `ReceiveMigrationSubtask`).
  """

  class StateValueValuesEnum(_messages.Enum):
    r"""Required. The terminal state of the subtask.

    Values:
      STATE_UNSPECIFIED: State is unspecified. Clients should not use this.
      SUCCEEDED: The subtask finished successfully.
      FAILED: The subtask finished unsuccessfully.
    """
    STATE_UNSPECIFIED = 0
    SUCCEEDED = 1
    FAILED = 2

  metrics = _messages.MessageField('GoogleCloudBigqueryMigrationV2TimeSeries', 1, repeated=True)
  migrationSubtask = _messages.StringField(2)
  orchestrationResult = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationSubtaskOrchestrationResult', 3)
  processingError = _messages.MessageField('GoogleRpcErrorInfo', 4)
  resourceErrorCount = _messages.IntegerField(5, variant=_messages.Variant.INT32)
  resourceErrorDetails = _messages.MessageField('GoogleCloudBigqueryMigrationV2ResourceErrorDetail', 6, repeated=True)
  state = _messages.EnumField('StateValueValuesEnum', 7)
  workerId = _messages.StringField(8)


class GoogleCloudBigqueryMigrationV2GcsReportLogMessage(_messages.Message):
  r"""A record in the aggregate CSV report for a migration workflow

  Enums:
    RetentionStatusValueValuesEnum: Whether the impacted lines of code are
      DROPPED or RETAINED at this stage.
    SourceTypeValueValuesEnum: The type of the source file.

  Fields:
    action: Category of the error/warning. Example: SyntaxError
    category: Category of the error/warning. Example: SyntaxError
    effect: Effect of the error/warning. Example: COMPATIBILITY
    filePath: The file path in which the error occurred
    filename: The file name in which the error occurred
    message: Detailed message of the record.
    objectName: Name of the affected object in the log message.
    retentionStatus: Whether the impacted lines of code are DROPPED or
      RETAINED at this stage.
    scriptContext: The script context (obfuscated) in which the error occurred
    severity: Severity of the translation record.
    sourceScriptColumn: Specifies the column from the source texts where the
      error occurred. (0 based, -1 for messages without column location)
      example: 6
    sourceScriptLine: Specifies the row from the source text where the error
      occurred (0 based, -1 for messages without line location). Example: 2
    sourceType: The type of the source file.
  """

  class RetentionStatusValueValuesEnum(_messages.Enum):
    r"""Whether the impacted lines of code are DROPPED or RETAINED at this
    stage.

    Values:
      RETENTION_STATUS_UNSPECIFIED: Undefined status.
      RETAINED: The impacted code of this issue is retained at this stage.
      DROPPED: The impacted code of this issue is dropped at this stage.
    """
    RETENTION_STATUS_UNSPECIFIED = 0
    RETAINED = 1
    DROPPED = 2

  class SourceTypeValueValuesEnum(_messages.Enum):
    r"""The type of the source file.

    Values:
      SOURCE_TYPE_UNSPECIFIED: Undefined source type.
      SQL: The source is a SQL file.
      LITERAL: The source is a literal.
      METADATA: The source is a metadata file.
      CONFIGURATION: The source is a configuration file.
    """
    SOURCE_TYPE_UNSPECIFIED = 0
    SQL = 1
    LITERAL = 2
    METADATA = 3
    CONFIGURATION = 4

  action = _messages.StringField(1)
  category = _messages.StringField(2)
  effect = _messages.StringField(3)
  filePath = _messages.StringField(4)
  filename = _messages.StringField(5)
  message = _messages.StringField(6)
  objectName = _messages.StringField(7)
  retentionStatus = _messages.EnumField('RetentionStatusValueValuesEnum', 8)
  scriptContext = _messages.StringField(9)
  severity = _messages.StringField(10)
  sourceScriptColumn = _messages.IntegerField(11, variant=_messages.Variant.INT32)
  sourceScriptLine = _messages.IntegerField(12, variant=_messages.Variant.INT32)
  sourceType = _messages.EnumField('SourceTypeValueValuesEnum', 13)


class GoogleCloudBigqueryMigrationV2GreenplumDialect(_messages.Message):
  r"""The dialect definition for Greenplum."""


class GoogleCloudBigqueryMigrationV2HiveQLDialect(_messages.Message):
  r"""The dialect definition for HiveQL."""


class GoogleCloudBigqueryMigrationV2ImpalaDialect(_messages.Message):
  r"""The dialect definition for Impala."""


class GoogleCloudBigqueryMigrationV2Lease(_messages.Message):
  r"""Lease for a task or subtask.

  Fields:
    callerId: The ID of the caller that is assigned the task.
    expireTime: Output only. Timestamp when the lease expries.
  """

  callerId = _messages.StringField(1)
  expireTime = _messages.StringField(2)


class GoogleCloudBigqueryMigrationV2ListMigrationSubtasksResponse(_messages.Message):
  r"""Response object for a `ListMigrationSubtasks` call.

  Fields:
    migrationSubtasks: The migration subtasks for the specified task.
    nextPageToken: A token, which can be sent as `page_token` to retrieve the
      next page. If this field is omitted, there are no subsequent pages.
  """

  migrationSubtasks = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationSubtask', 1, repeated=True)
  nextPageToken = _messages.StringField(2)


class GoogleCloudBigqueryMigrationV2ListMigrationWorkflowsResponse(_messages.Message):
  r"""Response object for a `ListMigrationWorkflows` call.

  Fields:
    migrationWorkflows: The migration workflows for the specified project /
      location.
    nextPageToken: A token, which can be sent as `page_token` to retrieve the
      next page. If this field is omitted, there are no subsequent pages.
  """

  migrationWorkflows = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationWorkflow', 1, repeated=True)
  nextPageToken = _messages.StringField(2)


class GoogleCloudBigqueryMigrationV2Literal(_messages.Message):
  r"""Literal data.

  Fields:
    literalBytes: Literal byte data.
    literalString: Literal string data.
    relativePath: Required. The identifier of the literal entry.
  """

  literalBytes = _messages.BytesField(1)
  literalString = _messages.StringField(2)
  relativePath = _messages.StringField(3)


class GoogleCloudBigqueryMigrationV2MigrationSubtask(_messages.Message):
  r"""A subtask for a migration which carries details about the configuration
  of the subtask. The content of the details should not matter to the end
  user, but is a contract between the subtask creator and subtask worker.

  Enums:
    StateValueValuesEnum: Output only. The current state of the subtask.

  Messages:
    DetailsValue: The details of the sub task. This is opaque to migration
      service and entirely between orchestrator and worker.

  Fields:
    assignmentInfo: Output only. Information about the assignment status.
    createTime: Output only. Time when the subtask was created.
    details: The details of the sub task. This is opaque to migration service
      and entirely between orchestrator and worker.
    lastUpdateTime: Output only. Time when the subtask was last updated.
    lease: Output only. A lease, if this is currently assigned.
    metrics: Output only. The metrics for the subtask.
    name: Output only. Immutable. The resource name for the migration subtask.
      The ID is server-generated. Example:
      `projects/123/locations/us/workflows/345/subtasks/678`
    processingError: Output only. An explanation that may be populated when
      the task is in FAILED state.
    resourceErrorCount: Output only. The number or resources with errors.
      Note: This is not the total number of errors as each resource can have
      more than one error. This is used to indicate truncation by having a
      `resource_error_count` that is higher than the size of
      `resource_error_details`.
    resourceErrorDetails: Output only. Provides details to errors and issues
      encountered while processing the subtask. Presence of error details does
      not mean that the subtask failed.
    state: Output only. The current state of the subtask.
    taskId: The unique ID of the task to which this subtask belongs.
    type: The type of the Subtask. The migration service does not check
      whether this is a known type. It is up to the task creator (i.e.
      orchestrator or worker) to ensure it only creates subtasks for which
      there are compatible workers polling for Subtasks.
  """

  class StateValueValuesEnum(_messages.Enum):
    r"""Output only. The current state of the subtask.

    Values:
      STATE_UNSPECIFIED: The state is unspecified.
      ACTIVE: The subtask is ready, i.e. it is ready for execution.
      RUNNING: The subtask is running, i.e. it is assigned to a worker for
        execution.
      SUCCEEDED: The subtask finished successfully.
      FAILED: The subtask finished unsuccessfully.
      PAUSED: The subtask is paused, i.e., it will not be scheduled. If it was
        already assigned,it might still finish but no new lease renewals will
        be granted.
      PENDING_DEPENDENCY: The subtask is pending a dependency. It will be
        scheduled once its dependencies are done.
    """
    STATE_UNSPECIFIED = 0
    ACTIVE = 1
    RUNNING = 2
    SUCCEEDED = 3
    FAILED = 4
    PAUSED = 5
    PENDING_DEPENDENCY = 6

  @encoding.MapUnrecognizedFields('additionalProperties')
  class DetailsValue(_messages.Message):
    r"""The details of the sub task. This is opaque to migration service and
    entirely between orchestrator and worker.

    Messages:
      AdditionalProperty: An additional property for a DetailsValue object.

    Fields:
      additionalProperties: Properties of the object. Contains field @type
        with type URL.
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a DetailsValue object.

      Fields:
        key: Name of the additional property.
        value: A extra_types.JsonValue attribute.
      """

      key = _messages.StringField(1)
      value = _messages.MessageField('extra_types.JsonValue', 2)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  assignmentInfo = _messages.MessageField('GoogleCloudBigqueryMigrationV2AssignmentInfo', 1)
  createTime = _messages.StringField(2)
  details = _messages.MessageField('DetailsValue', 3)
  lastUpdateTime = _messages.StringField(4)
  lease = _messages.MessageField('GoogleCloudBigqueryMigrationV2Lease', 5)
  metrics = _messages.MessageField('GoogleCloudBigqueryMigrationV2TimeSeries', 6, repeated=True)
  name = _messages.StringField(7)
  processingError = _messages.MessageField('GoogleRpcErrorInfo', 8)
  resourceErrorCount = _messages.IntegerField(9, variant=_messages.Variant.INT32)
  resourceErrorDetails = _messages.MessageField('GoogleCloudBigqueryMigrationV2ResourceErrorDetail', 10, repeated=True)
  state = _messages.EnumField('StateValueValuesEnum', 11)
  taskId = _messages.StringField(12)
  type = _messages.StringField(13)


class GoogleCloudBigqueryMigrationV2MigrationSubtaskOrchestrationResult(_messages.Message):
  r"""Additional orchestration information from the subtask at time of
  completion.

  Fields:
    newMigrationSubtasks: Optional. A list of subtasks that the service shall
      create before transitioning the subtask to SUCCEEDED. The operation will
      fail if creating any of the subtasks fails.
  """

  newMigrationSubtasks = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationSubtask', 1, repeated=True)


class GoogleCloudBigqueryMigrationV2MigrationTask(_messages.Message):
  r"""A single task for a migration which has details about the configuration
  of the task.

  Enums:
    StateValueValuesEnum: Output only. The current state of the task.

  Fields:
    assessmentTaskDetails: Task configuration for Assessment.
    assignmentInfo: Output only. Information about the assignment status.
    createTime: Output only. Time when the task was created.
    id: Output only. Immutable. The unique identifier for the migration task.
      The ID is server-generated.
    lastUpdateTime: Output only. Time when the task was last updated.
    lease: Output only. A lease, if this is currently assigned.
    metrics: Output only. The metrics for the task.
    processingError: Output only. An explanation that may be populated when
      the task is in FAILED state.
    resourceErrorCount: Output only. The number or resources with errors.
      Note: This is not the total number of errors as each resource can have
      more than one error. This is used to indicate truncation by having a
      `resource_error_count` that is higher than the size of
      `resource_error_details`.
    resourceErrorDetails: Output only. Provides details to errors and issues
      encountered while processing the task. Presence of error details does
      not mean that the task failed.
    state: Output only. The current state of the task.
    taskResult: Output only. The result of the task.
    totalProcessingErrorCount: Output only. Count of all the processing errors
      in this task and its subtasks.
    totalResourceErrorCount: Output only. Count of all the resource errors in
      this task and its subtasks.
    translationConfigDetails: Task configuration for CW Batch/Offline SQL
      Translation.
    translationDetails: Task details for unified SQL Translation.
    type: The type of the task. This must be one of the supported task types:
      Translation_Teradata2BQ, Translation_Redshift2BQ, Translation_Bteq2BQ,
      Translation_Oracle2BQ, Translation_HiveQL2BQ, Translation_SparkSQL2BQ,
      Translation_Snowflake2BQ, Translation_Netezza2BQ,
      Translation_AzureSynapse2BQ, Translation_Vertica2BQ,
      Translation_SQLServer2BQ, Translation_SQLServer2Postgresql,
      Translation_Presto2BQ, Translation_MySQL2BQ, Translation_Postgresql2BQ,
      Translation_SQLite2BQ, Translation_Greenplum2BQ, Translation_Impala2BQ.
  """

  class StateValueValuesEnum(_messages.Enum):
    r"""Output only. The current state of the task.

    Values:
      STATE_UNSPECIFIED: The state is unspecified.
      PENDING: The task is waiting for orchestration.
      ORCHESTRATING: The task is assigned to an orchestrator.
      RUNNING: The task is running, i.e. its subtasks are ready for execution.
      PAUSED: The task is paused. Assigned subtasks can continue, but no new
        subtasks will be scheduled.
      SUCCEEDED: The task finished successfully.
      FAILED: The task finished unsuccessfully.
    """
    STATE_UNSPECIFIED = 0
    PENDING = 1
    ORCHESTRATING = 2
    RUNNING = 3
    PAUSED = 4
    SUCCEEDED = 5
    FAILED = 6

  assessmentTaskDetails = _messages.MessageField('GoogleCloudBigqueryMigrationV2AssessmentTaskDetails', 1)
  assignmentInfo = _messages.MessageField('GoogleCloudBigqueryMigrationV2AssignmentInfo', 2)
  createTime = _messages.StringField(3)
  id = _messages.StringField(4)
  lastUpdateTime = _messages.StringField(5)
  lease = _messages.MessageField('GoogleCloudBigqueryMigrationV2Lease', 6)
  metrics = _messages.MessageField('GoogleCloudBigqueryMigrationV2TimeSeries', 7, repeated=True)
  processingError = _messages.MessageField('GoogleRpcErrorInfo', 8)
  resourceErrorCount = _messages.IntegerField(9, variant=_messages.Variant.INT32)
  resourceErrorDetails = _messages.MessageField('GoogleCloudBigqueryMigrationV2ResourceErrorDetail', 10, repeated=True)
  state = _messages.EnumField('StateValueValuesEnum', 11)
  taskResult = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationTaskResult', 12)
  totalProcessingErrorCount = _messages.IntegerField(13, variant=_messages.Variant.INT32)
  totalResourceErrorCount = _messages.IntegerField(14, variant=_messages.Variant.INT32)
  translationConfigDetails = _messages.MessageField('GoogleCloudBigqueryMigrationV2TranslationConfigDetails', 15)
  translationDetails = _messages.MessageField('GoogleCloudBigqueryMigrationV2TranslationDetails', 16)
  type = _messages.StringField(17)


class GoogleCloudBigqueryMigrationV2MigrationTaskResult(_messages.Message):
  r"""The migration task result.

  Fields:
    translationTaskResult: Details specific to translation task types.
  """

  translationTaskResult = _messages.MessageField('GoogleCloudBigqueryMigrationV2TranslationTaskResult', 1)


class GoogleCloudBigqueryMigrationV2MigrationWorkflow(_messages.Message):
  r"""A migration workflow which specifies what needs to be done for an EDW
  migration.

  Enums:
    StateValueValuesEnum: Output only. That status of the workflow.

  Messages:
    TasksValue: The tasks in a workflow in a named map. The name (i.e. key)
      has no meaning and is merely a convenient way to address a specific task
      in a workflow.

  Fields:
    createTime: Output only. Time when the workflow was created.
    displayName: The display name of the workflow. This can be set to give a
      workflow a descriptive name. There is no guarantee or enforcement of
      uniqueness.
    lastUpdateTime: Output only. Time when the workflow was last updated.
    name: Output only. Immutable. Identifier. The unique identifier for the
      migration workflow. The ID is server-generated. Example:
      `projects/123/locations/us/workflows/345`
    state: Output only. That status of the workflow.
    tasks: The tasks in a workflow in a named map. The name (i.e. key) has no
      meaning and is merely a convenient way to address a specific task in a
      workflow.
  """

  class StateValueValuesEnum(_messages.Enum):
    r"""Output only. That status of the workflow.

    Values:
      STATE_UNSPECIFIED: Workflow state is unspecified.
      DRAFT: Workflow is in draft status, i.e. tasks are not yet eligible for
        execution.
      RUNNING: Workflow is running (i.e. tasks are eligible for execution).
      PAUSED: Workflow is paused. Tasks currently in progress may continue,
        but no further tasks will be scheduled.
      COMPLETED: Workflow is complete. There should not be any task in a non-
        terminal state, but if they are (e.g. forced termination), they will
        not be scheduled.
    """
    STATE_UNSPECIFIED = 0
    DRAFT = 1
    RUNNING = 2
    PAUSED = 3
    COMPLETED = 4

  @encoding.MapUnrecognizedFields('additionalProperties')
  class TasksValue(_messages.Message):
    r"""The tasks in a workflow in a named map. The name (i.e. key) has no
    meaning and is merely a convenient way to address a specific task in a
    workflow.

    Messages:
      AdditionalProperty: An additional property for a TasksValue object.

    Fields:
      additionalProperties: Additional properties of type TasksValue
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a TasksValue object.

      Fields:
        key: Name of the additional property.
        value: A GoogleCloudBigqueryMigrationV2MigrationTask attribute.
      """

      key = _messages.StringField(1)
      value = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationTask', 2)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  createTime = _messages.StringField(1)
  displayName = _messages.StringField(2)
  lastUpdateTime = _messages.StringField(3)
  name = _messages.StringField(4)
  state = _messages.EnumField('StateValueValuesEnum', 5)
  tasks = _messages.MessageField('TasksValue', 6)


class GoogleCloudBigqueryMigrationV2MySQLDialect(_messages.Message):
  r"""The dialect definition for MySQL."""


class GoogleCloudBigqueryMigrationV2NameMappingKey(_messages.Message):
  r"""The potential components of a full name mapping that will be mapped
  during translation in the source data warehouse.

  Enums:
    TypeValueValuesEnum: The type of object that is being mapped.

  Fields:
    attribute: The attribute name (BigQuery column equivalent in the source
      data warehouse).
    database: The database name (BigQuery project ID equivalent in the source
      data warehouse).
    relation: The relation name (BigQuery table or view equivalent in the
      source data warehouse).
    schema: The schema name (BigQuery dataset equivalent in the source data
      warehouse).
    type: The type of object that is being mapped.
  """

  class TypeValueValuesEnum(_messages.Enum):
    r"""The type of object that is being mapped.

    Values:
      TYPE_UNSPECIFIED: Unspecified name mapping type.
      DATABASE: The object being mapped is a database.
      SCHEMA: The object being mapped is a schema.
      RELATION: The object being mapped is a relation.
      ATTRIBUTE: The object being mapped is an attribute.
      RELATION_ALIAS: The object being mapped is a relation alias.
      ATTRIBUTE_ALIAS: The object being mapped is a an attribute alias.
      FUNCTION: The object being mapped is a function.
    """
    TYPE_UNSPECIFIED = 0
    DATABASE = 1
    SCHEMA = 2
    RELATION = 3
    ATTRIBUTE = 4
    RELATION_ALIAS = 5
    ATTRIBUTE_ALIAS = 6
    FUNCTION = 7

  attribute = _messages.StringField(1)
  database = _messages.StringField(2)
  relation = _messages.StringField(3)
  schema = _messages.StringField(4)
  type = _messages.EnumField('TypeValueValuesEnum', 5)


class GoogleCloudBigqueryMigrationV2NameMappingValue(_messages.Message):
  r"""The potential components of a full name mapping that will be mapped
  during translation in the target data warehouse.

  Fields:
    attribute: The attribute name (BigQuery column equivalent in the target
      data warehouse).
    database: The database name (BigQuery project ID equivalent in the target
      data warehouse).
    relation: The relation name (BigQuery table or view equivalent in the
      target data warehouse).
    schema: The schema name (BigQuery dataset equivalent in the target data
      warehouse).
  """

  attribute = _messages.StringField(1)
  database = _messages.StringField(2)
  relation = _messages.StringField(3)
  schema = _messages.StringField(4)


class GoogleCloudBigqueryMigrationV2NetezzaDialect(_messages.Message):
  r"""The dialect definition for Netezza."""


class GoogleCloudBigqueryMigrationV2ObjectNameMapping(_messages.Message):
  r"""Represents a key-value pair of NameMappingKey to NameMappingValue to
  represent the mapping of SQL names from the input value to desired output.

  Fields:
    source: The name of the object in source that is being mapped.
    target: The desired target name of the object that is being mapped.
  """

  source = _messages.MessageField('GoogleCloudBigqueryMigrationV2NameMappingKey', 1)
  target = _messages.MessageField('GoogleCloudBigqueryMigrationV2NameMappingValue', 2)


class GoogleCloudBigqueryMigrationV2ObjectNameMappingList(_messages.Message):
  r"""Represents a map of name mappings using a list of key:value proto
  messages of existing name to desired output name.

  Fields:
    nameMap: The elements of the object name map.
  """

  nameMap = _messages.MessageField('GoogleCloudBigqueryMigrationV2ObjectNameMapping', 1, repeated=True)


class GoogleCloudBigqueryMigrationV2OracleDialect(_messages.Message):
  r"""The dialect definition for Oracle."""


class GoogleCloudBigqueryMigrationV2Point(_messages.Message):
  r"""A single data point in a time series.

  Fields:
    interval: The time interval to which the data point applies. For `GAUGE`
      metrics, the start time does not need to be supplied, but if it is
      supplied, it must equal the end time. For `DELTA` metrics, the start and
      end time should specify a non-zero interval, with subsequent points
      specifying contiguous and non-overlapping intervals. For `CUMULATIVE`
      metrics, the start and end time should specify a non-zero interval, with
      subsequent points specifying the same start time and increasing end
      times, until an event resets the cumulative value to zero and sets a new
      start time for the following points.
    value: The value of the data point.
  """

  interval = _messages.MessageField('GoogleCloudBigqueryMigrationV2TimeInterval', 1)
  value = _messages.MessageField('GoogleCloudBigqueryMigrationV2TypedValue', 2)


class GoogleCloudBigqueryMigrationV2PostgresqlDialect(_messages.Message):
  r"""The dialect definition for Postgresql."""


class GoogleCloudBigqueryMigrationV2PrestoDialect(_messages.Message):
  r"""The dialect definition for Presto."""


class GoogleCloudBigqueryMigrationV2ReceiveMigrationSubtaskRequest(_messages.Message):
  r"""A request to assign a subtask to the caller by subtask type.

  Fields:
    filter: Optional. The filter to apply in AIP-160 format. This can be used
      to get the subtasks of a specific project_number or workflow_id, e.g.
      `project_number = 123`.
    types: Required. The types of subtasks the caller is willing to accepts.
      At least one type must be provided.
    workerId: The ID of the worker that will process the task.
  """

  filter = _messages.StringField(1)
  types = _messages.StringField(2, repeated=True)
  workerId = _messages.StringField(3)


class GoogleCloudBigqueryMigrationV2ReceiveMigrationSubtaskResponse(_messages.Message):
  r"""Response object for an `ReceiveMigrationSubtask` call.

  Enums:
    AssignmentStateValueValuesEnum: The status of the subtask assignment. If
      the `filter` field was provided in the request, then the
      `assignment_state` represents the assignment of the filtered subtasks,
      which might not be all the subtasks.

  Fields:
    assignmentState: The status of the subtask assignment. If the `filter`
      field was provided in the request, then the `assignment_state`
      represents the assignment of the filtered subtasks, which might not be
      all the subtasks.
    migrationSubtask: The assigned subtask, if available. If there was already
      a subtask assigned for the given worker ID, the same subtask will be
      returned.
  """

  class AssignmentStateValueValuesEnum(_messages.Enum):
    r"""The status of the subtask assignment. If the `filter` field was
    provided in the request, then the `assignment_state` represents the
    assignment of the filtered subtasks, which might not be all the subtasks.

    Values:
      ASSIGNMENT_STATE_UNSPECIFIED: State is unspecified.
      NO_ASSIGNMENT: No assignment is available.
      NEW_ASSIGNMENT: New task was assigned.
      EXISTING_ASSIGNMENT: There was already an assignment for the caller.
    """
    ASSIGNMENT_STATE_UNSPECIFIED = 0
    NO_ASSIGNMENT = 1
    NEW_ASSIGNMENT = 2
    EXISTING_ASSIGNMENT = 3

  assignmentState = _messages.EnumField('AssignmentStateValueValuesEnum', 1)
  migrationSubtask = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationSubtask', 2)


class GoogleCloudBigqueryMigrationV2ReceiveMigrationTaskRequest(_messages.Message):
  r"""A request to assign a task to the caller by task type.

  Fields:
    orchestratorId: The ID of the orchestrator that will process the task. The
      service does not validate the ID or keep track of orchestrators. It is
      up to the orchestrator to provide a unique ID. This is used for
      operational purposes and to ensure that an orchestrator does not get
      more than one task assigned at a time (which also helps with
      idempotency).
    types: Required. The types of tasks the caller is willing to accepts. At
      least one type must be provided.
  """

  orchestratorId = _messages.StringField(1)
  types = _messages.StringField(2, repeated=True)


class GoogleCloudBigqueryMigrationV2ReceiveMigrationTaskResponse(_messages.Message):
  r"""Response object for an `ReceiveMigrationTask` call.

  Enums:
    AssignmentStateValueValuesEnum: The status of the task assignment.

  Fields:
    assignmentState: The status of the task assignment.
    migrationTask: The assigned task, if available. If there was already a
      task assigned for the given orchestrator ID, the same task will be
      returned.
    migrationWorkflow: The name of the migration workflow to which the task
      belongs or an empty string if `assignment_state` is `NO_ASSIGNMENT`.
  """

  class AssignmentStateValueValuesEnum(_messages.Enum):
    r"""The status of the task assignment.

    Values:
      ASSIGNMENT_STATE_UNSPECIFIED: State is unspecified.
      NO_ASSIGNMENT: No assignment is available.
      NEW_ASSIGNMENT: New task was assigned.
      EXISTING_ASSIGNMENT: There was already an assignment for the caller.
    """
    ASSIGNMENT_STATE_UNSPECIFIED = 0
    NO_ASSIGNMENT = 1
    NEW_ASSIGNMENT = 2
    EXISTING_ASSIGNMENT = 3

  assignmentState = _messages.EnumField('AssignmentStateValueValuesEnum', 1)
  migrationTask = _messages.MessageField('GoogleCloudBigqueryMigrationV2MigrationTask', 2)
  migrationWorkflow = _messages.StringField(3)


class GoogleCloudBigqueryMigrationV2RedshiftDialect(_messages.Message):
  r"""The dialect definition for Redshift."""


class GoogleCloudBigqueryMigrationV2RenewSubtaskLeaseRequest(_messages.Message):
  r"""Request to renew the lease of a subtask by subtask type.

  Fields:
    migrationSubtask: Required. The unique identifier for the migration
      subtask. Example:
      `projects/123/locations/us/workflows/1234/subtasks/543`
    workerId: Required. The ID of the worker that is renewing the subtask
      lease. This must match the `worker_id` that currently holds the lease
      for the `MigrationSubtask` (acquired via `ReceiveNextMigrationSubtask`).
  """

  migrationSubtask = _messages.StringField(1)
  workerId = _messages.StringField(2)


class GoogleCloudBigqueryMigrationV2RenewSubtaskLeaseResponse(_messages.Message):
  r"""Response to renew the lease of a subtask by type.

  Fields:
    lease: Optional. The renewed lease if available.
  """

  lease = _messages.MessageField('GoogleCloudBigqueryMigrationV2Lease', 1)


class GoogleCloudBigqueryMigrationV2RenewTaskLeaseRequest(_messages.Message):
  r"""Request to renew the lease of a task by task type.

  Fields:
    migrationWorkflow: Required. The unique identifier for the migration
      workflow. Example: `projects/123/locations/us/workflows/1234`
    orchestratorId: Required. The ID of the worker that is renewing the
      subtask lease. This must match the `orchestrator_id` that currently
      holds the lease for the `MigrationTask` (acquired via
      `ReceiveNextMigrationTask`).
    taskId: Required. The unique identifier for the migration task. Example:
      `0b2e6759-f320-4f76-bad7-8b2b48e65b5c`
  """

  migrationWorkflow = _messages.StringField(1)
  orchestratorId = _messages.StringField(2)
  taskId = _messages.StringField(3)


class GoogleCloudBigqueryMigrationV2RenewTaskLeaseResponse(_messages.Message):
  r"""Response to renew the lease of a subtask by type.

  Fields:
    lease: Optional. The renewed lease if available.
  """

  lease = _messages.MessageField('GoogleCloudBigqueryMigrationV2Lease', 1)


class GoogleCloudBigqueryMigrationV2ResourceErrorDetail(_messages.Message):
  r"""Provides details for errors and the corresponding resources.

  Fields:
    errorCount: Required. How many errors there are in total for the resource.
      Truncation can be indicated by having an `error_count` that is higher
      than the size of `error_details`.
    errorDetails: Required. The error details for the resource.
    resourceInfo: Required. Information about the resource where the error is
      located.
  """

  errorCount = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  errorDetails = _messages.MessageField('GoogleCloudBigqueryMigrationV2ErrorDetail', 2, repeated=True)
  resourceInfo = _messages.MessageField('GoogleRpcResourceInfo', 3)


class GoogleCloudBigqueryMigrationV2SQLServerDialect(_messages.Message):
  r"""The dialect definition for SQL Server."""


class GoogleCloudBigqueryMigrationV2SQLiteDialect(_messages.Message):
  r"""The dialect definition for SQLite."""


class GoogleCloudBigqueryMigrationV2SnowflakeDialect(_messages.Message):
  r"""The dialect definition for Snowflake."""


class GoogleCloudBigqueryMigrationV2SourceEnv(_messages.Message):
  r"""Represents the default source environment values for the translation.

  Fields:
    defaultDatabase: The default database name to fully qualify SQL objects
      when their database name is missing.
    metadataStoreDataset: Optional. Expects a valid BigQuery dataset ID that
      exists, e.g., project-123.metadata_store_123. If specified, translation
      will search and read the required schema information from a metadata
      store in this dataset. If metadata store doesn't exist, translation will
      parse the metadata file and upload the schema info to a temp table in
      the dataset to speed up future translation jobs.
    schemaSearchPath: The schema search path. When SQL objects are missing
      schema name, translation engine will search through this list to find
      the value.
  """

  defaultDatabase = _messages.StringField(1)
  metadataStoreDataset = _messages.StringField(2)
  schemaSearchPath = _messages.StringField(3, repeated=True)


class GoogleCloudBigqueryMigrationV2SourceEnvironment(_messages.Message):
  r"""Represents the default source environment values for the translation.

  Fields:
    defaultDatabase: The default database name to fully qualify SQL objects
      when their database name is missing.
    metadataStoreDataset: Optional. Expects a validQ BigQuery dataset ID that
      exists, e.g., project-123.metadata_store_123. If specified, translation
      will search and read the required schema information from a metadata
      store in this dataset. If metadata store doesn't exist, translation will
      parse the metadata file and upload the schema info to a temp table in
      the dataset to speed up future translation jobs.
    schemaSearchPath: The schema search path. When SQL objects are missing
      schema name, translation engine will search through this list to find
      the value.
  """

  defaultDatabase = _messages.StringField(1)
  metadataStoreDataset = _messages.StringField(2)
  schemaSearchPath = _messages.StringField(3, repeated=True)


class GoogleCloudBigqueryMigrationV2SourceSpec(_messages.Message):
  r"""Represents one path to the location that holds source data.

  Fields:
    baseUri: The base URI for all files to be read in as sources for
      translation.
    encoding: Optional. The optional field to specify the encoding of the sql
      bytes.
    literal: Source literal.
  """

  baseUri = _messages.StringField(1)
  encoding = _messages.StringField(2)
  literal = _messages.MessageField('GoogleCloudBigqueryMigrationV2Literal', 3)


class GoogleCloudBigqueryMigrationV2SourceTargetMapping(_messages.Message):
  r"""Represents one mapping from a source SQL to a target SQL.

  Fields:
    sourceSpec: The source SQL or the path to it.
    targetSpec: The target SQL or the path for it.
  """

  sourceSpec = _messages.MessageField('GoogleCloudBigqueryMigrationV2SourceSpec', 1)
  targetSpec = _messages.MessageField('GoogleCloudBigqueryMigrationV2TargetSpec', 2)


class GoogleCloudBigqueryMigrationV2SparkSQLDialect(_messages.Message):
  r"""The dialect definition for SparkSQL."""


class GoogleCloudBigqueryMigrationV2StartMigrationWorkflowRequest(_messages.Message):
  r"""A request to start a previously created migration workflow."""


class GoogleCloudBigqueryMigrationV2SuggestionConfig(_messages.Message):
  r"""The configuration for the suggestion if requested as a target type.

  Fields:
    skipSuggestionSteps: The list of suggestion steps to skip.
  """

  skipSuggestionSteps = _messages.MessageField('GoogleCloudBigqueryMigrationV2SuggestionStep', 1, repeated=True)


class GoogleCloudBigqueryMigrationV2SuggestionStep(_messages.Message):
  r"""Suggestion step to skip.

  Enums:
    RewriteTargetValueValuesEnum: The rewrite target.
    SuggestionTypeValueValuesEnum: The type of suggestion.

  Fields:
    rewriteTarget: The rewrite target.
    suggestionType: The type of suggestion.
  """

  class RewriteTargetValueValuesEnum(_messages.Enum):
    r"""The rewrite target.

    Values:
      REWRITE_TARGET_UNSPECIFIED: Rewrite target unspecified.
      SOURCE_SQL: Source SQL.
      TARGET_SQL: Target SQL.
    """
    REWRITE_TARGET_UNSPECIFIED = 0
    SOURCE_SQL = 1
    TARGET_SQL = 2

  class SuggestionTypeValueValuesEnum(_messages.Enum):
    r"""The type of suggestion.

    Values:
      SUGGESTION_TYPE_UNSPECIFIED: Suggestion type unspecified.
      QUERY_CUSTOMIZATION: Query customization.
      TRANSLATION_EXPLANATION: Translation explanation.
    """
    SUGGESTION_TYPE_UNSPECIFIED = 0
    QUERY_CUSTOMIZATION = 1
    TRANSLATION_EXPLANATION = 2

  rewriteTarget = _messages.EnumField('RewriteTargetValueValuesEnum', 1)
  suggestionType = _messages.EnumField('SuggestionTypeValueValuesEnum', 2)


class GoogleCloudBigqueryMigrationV2TargetSpec(_messages.Message):
  r"""Represents one path to the location that holds target data.

  Fields:
    relativePath: The relative path for the target data. Given source file
      `base_uri/input/sql`, the output would be
      `target_base_uri/sql/relative_path/input.sql`.
  """

  relativePath = _messages.StringField(1)


class GoogleCloudBigqueryMigrationV2TeradataDialect(_messages.Message):
  r"""The dialect definition for Teradata.

  Enums:
    ModeValueValuesEnum: Which Teradata sub-dialect mode the user specifies.

  Fields:
    mode: Which Teradata sub-dialect mode the user specifies.
  """

  class ModeValueValuesEnum(_messages.Enum):
    r"""Which Teradata sub-dialect mode the user specifies.

    Values:
      MODE_UNSPECIFIED: Unspecified mode.
      SQL: Teradata SQL mode.
      BTEQ: BTEQ mode (which includes SQL).
    """
    MODE_UNSPECIFIED = 0
    SQL = 1
    BTEQ = 2

  mode = _messages.EnumField('ModeValueValuesEnum', 1)


class GoogleCloudBigqueryMigrationV2TimeInterval(_messages.Message):
  r"""A time interval extending just after a start time through an end time.
  If the start time is the same as the end time, then the interval represents
  a single point in time.

  Fields:
    endTime: Required. The end of the time interval.
    startTime: Optional. The beginning of the time interval. The default value
      for the start time is the end time. The start time must not be later
      than the end time.
  """

  endTime = _messages.StringField(1)
  startTime = _messages.StringField(2)


class GoogleCloudBigqueryMigrationV2TimeSeries(_messages.Message):
  r"""The metrics object for a SubTask.

  Enums:
    MetricKindValueValuesEnum: Optional. The metric kind of the time series.
      If present, it must be the same as the metric kind of the associated
      metric. If the associated metric's descriptor must be auto-created, then
      this field specifies the metric kind of the new descriptor and must be
      either `GAUGE` (the default) or `CUMULATIVE`.
    ValueTypeValueValuesEnum: Required. The value type of the time series.

  Fields:
    metric: Required. The name of the metric. If the metric is not known by
      the service yet, it will be auto-created.
    metricKind: Optional. The metric kind of the time series. If present, it
      must be the same as the metric kind of the associated metric. If the
      associated metric's descriptor must be auto-created, then this field
      specifies the metric kind of the new descriptor and must be either
      `GAUGE` (the default) or `CUMULATIVE`.
    points: Required. The data points of this time series. When listing time
      series, points are returned in reverse time order. When creating a time
      series, this field must contain exactly one point and the point's type
      must be the same as the value type of the associated metric. If the
      associated metric's descriptor must be auto-created, then the value type
      of the descriptor is determined by the point's type, which must be
      `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
    valueType: Required. The value type of the time series.
  """

  class MetricKindValueValuesEnum(_messages.Enum):
    r"""Optional. The metric kind of the time series. If present, it must be
    the same as the metric kind of the associated metric. If the associated
    metric's descriptor must be auto-created, then this field specifies the
    metric kind of the new descriptor and must be either `GAUGE` (the default)
    or `CUMULATIVE`.

    Values:
      METRIC_KIND_UNSPECIFIED: Do not use this default value.
      GAUGE: An instantaneous measurement of a value.
      DELTA: The change in a value during a time interval.
      CUMULATIVE: A value accumulated over a time interval. Cumulative
        measurements in a time series should have the same start time and
        increasing end times, until an event resets the cumulative value to
        zero and sets a new start time for the following points.
    """
    METRIC_KIND_UNSPECIFIED = 0
    GAUGE = 1
    DELTA = 2
    CUMULATIVE = 3

  class ValueTypeValueValuesEnum(_messages.Enum):
    r"""Required. The value type of the time series.

    Values:
      VALUE_TYPE_UNSPECIFIED: Do not use this default value.
      BOOL: The value is a boolean. This value type can be used only if the
        metric kind is `GAUGE`.
      INT64: The value is a signed 64-bit integer.
      DOUBLE: The value is a double precision floating point number.
      STRING: The value is a text string. This value type can be used only if
        the metric kind is `GAUGE`.
      DISTRIBUTION: The value is a `Distribution`.
      MONEY: The value is money.
    """
    VALUE_TYPE_UNSPECIFIED = 0
    BOOL = 1
    INT64 = 2
    DOUBLE = 3
    STRING = 4
    DISTRIBUTION = 5
    MONEY = 6

  metric = _messages.StringField(1)
  metricKind = _messages.EnumField('MetricKindValueValuesEnum', 2)
  points = _messages.MessageField('GoogleCloudBigqueryMigrationV2Point', 3, repeated=True)
  valueType = _messages.EnumField('ValueTypeValueValuesEnum', 4)


class GoogleCloudBigqueryMigrationV2TranslateQueryRequest(_messages.Message):
  r"""The request of translating a SQL query to GoogleSQL.

  Enums:
    SourceDialectValueValuesEnum: Required. The source SQL dialect of
      `queries`.

  Fields:
    query: Required. The query to be translated.
    sourceDialect: Required. The source SQL dialect of `queries`.
  """

  class SourceDialectValueValuesEnum(_messages.Enum):
    r"""Required. The source SQL dialect of `queries`.

    Values:
      SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED: SqlTranslationSourceDialect
        not specified.
      TERADATA: Teradata SQL.
      REDSHIFT: Redshift SQL.
      ORACLE: Oracle SQL.
      HIVEQL: HiveQL SQL.
      SPARKSQL: SPARK SQL.
      SNOWFLAKE: Snowflake SQL.
      NETEZZA: IBM Netezza SQL.
      AZURESYNAPSE: Azure Synapse
      VERTICA: Vertica.
      SQLSERVER: SQL Server dialect.
      PRESTO: Presto.
      POSTGRESQL: PostgreSQL.
      MYSQL: MySQL.
      BTEQ: BTEQ.
      SQLITE: SQLite dialect.
      DB2: DB2 dialect.
      GREENPLUM: Greenplum dialect.
      IMPALA: Impala dialect.
    """
    SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED = 0
    TERADATA = 1
    REDSHIFT = 2
    ORACLE = 3
    HIVEQL = 4
    SPARKSQL = 5
    SNOWFLAKE = 6
    NETEZZA = 7
    AZURESYNAPSE = 8
    VERTICA = 9
    SQLSERVER = 10
    PRESTO = 11
    POSTGRESQL = 12
    MYSQL = 13
    BTEQ = 14
    SQLITE = 15
    DB2 = 16
    GREENPLUM = 17
    IMPALA = 18

  query = _messages.StringField(1)
  sourceDialect = _messages.EnumField('SourceDialectValueValuesEnum', 2)


class GoogleCloudBigqueryMigrationV2TranslateQueryResponse(_messages.Message):
  r"""The response of translating a SQL query to GoogleSQL.

  Fields:
    translatedQuery: The translated result. It may contain error/warning
      messages as comments.
    translationJob: Output only. Immutable. The unique identifier for the SQL
      translation job. Example: `projects/123/locations/us/translation/1234`
    translationResultDetails: The details of the translation results.
  """

  translatedQuery = _messages.StringField(1)
  translationJob = _messages.StringField(2)
  translationResultDetails = _messages.MessageField('GoogleCloudBigqueryMigrationV2TranslationResultDetails', 3)


class GoogleCloudBigqueryMigrationV2TranslationConfigDetails(_messages.Message):
  r"""The translation config to capture necessary settings for a translation
  task and subtask.

  Fields:
    gcsSourcePath: The Cloud Storage path for a directory of files to
      translate in a task.
    gcsTargetPath: The Cloud Storage path to write back the corresponding
      input files to.
    nameMappingList: The mapping of objects to their desired output names in
      list form.
    requestSource: The indicator to show translation request initiator.
    sourceDialect: The dialect of the input files.
    sourceEnv: The default source environment values for the translation.
    targetDialect: The target dialect for the engine to translate the input
      to.
    targetTypes: The types of output to generate, e.g. sql, metadata etc. If
      not specified, a default set of targets will be generated. Some
      additional target types may be slower to generate. See the documentation
      for the set of available target types.
  """

  gcsSourcePath = _messages.StringField(1)
  gcsTargetPath = _messages.StringField(2)
  nameMappingList = _messages.MessageField('GoogleCloudBigqueryMigrationV2ObjectNameMappingList', 3)
  requestSource = _messages.StringField(4)
  sourceDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2Dialect', 5)
  sourceEnv = _messages.MessageField('GoogleCloudBigqueryMigrationV2SourceEnv', 6)
  targetDialect = _messages.MessageField('GoogleCloudBigqueryMigrationV2Dialect', 7)
  targetTypes = _messages.StringField(8, repeated=True)


class GoogleCloudBigqueryMigrationV2TranslationDetails(_messages.Message):
  r"""The translation details to capture the necessary settings for a
  translation job.

  Fields:
    sourceEnvironment: The default source environment values for the
      translation.
    sourceTargetMapping: The mapping from source to target SQL.
    suggestionConfig: The configuration for the suggestion if requested as a
      target type.
    targetBaseUri: The base URI for all writes to persistent storage.
    targetReturnLiterals: The list of literal targets that will be directly
      returned to the response. Each entry consists of the constructed path,
      EXCLUDING the base path. Not providing a target_base_uri will prevent
      writing to persistent storage.
    targetTypes: The types of output to generate, e.g. sql, metadata,
      lineage_from_sql_scripts, etc. If not specified, a default set of
      targets will be generated. Some additional target types may be slower to
      generate. See the documentation for the set of available target types.
  """

  sourceEnvironment = _messages.MessageField('GoogleCloudBigqueryMigrationV2SourceEnvironment', 1)
  sourceTargetMapping = _messages.MessageField('GoogleCloudBigqueryMigrationV2SourceTargetMapping', 2, repeated=True)
  suggestionConfig = _messages.MessageField('GoogleCloudBigqueryMigrationV2SuggestionConfig', 3)
  targetBaseUri = _messages.StringField(4)
  targetReturnLiterals = _messages.StringField(5, repeated=True)
  targetTypes = _messages.StringField(6, repeated=True)


class GoogleCloudBigqueryMigrationV2TranslationReportRecord(_messages.Message):
  r"""Details about a record.

  Enums:
    SeverityValueValuesEnum: Severity of the translation record.

  Fields:
    category: Category of the error/warning. Example: SyntaxError
    message: Detailed message of the record.
    scriptColumn: Specifies the column from the source texts where the error
      occurred. (0 based) example: 6
    scriptLine: Specifies the row from the source text where the error
      occurred (0 based). Example: 2
    severity: Severity of the translation record.
  """

  class SeverityValueValuesEnum(_messages.Enum):
    r"""Severity of the translation record.

    Values:
      SEVERITY_UNSPECIFIED: SeverityType not specified.
      INFO: INFO type.
      WARNING: WARNING type. The translated query may still provide useful
        information if all the report records are WARNING.
      ERROR: ERROR type. Translation failed.
    """
    SEVERITY_UNSPECIFIED = 0
    INFO = 1
    WARNING = 2
    ERROR = 3

  category = _messages.StringField(1)
  message = _messages.StringField(2)
  scriptColumn = _messages.IntegerField(3, variant=_messages.Variant.INT32)
  scriptLine = _messages.IntegerField(4, variant=_messages.Variant.INT32)
  severity = _messages.EnumField('SeverityValueValuesEnum', 5)


class GoogleCloudBigqueryMigrationV2TranslationResultDetails(_messages.Message):
  r"""Translation details like metrics, warning/error records.

  Messages:
    TranslationMetricsValue: Translation metrics. E.g., Compiler_Clean: 5,
      Compiler_BestEffort: 2, Emitter_Clean: 1,Emitter_Error: 3, etc.

  Fields:
    reportRecords: Translation details about warning/error records.
    translationMetrics: Translation metrics. E.g., Compiler_Clean: 5,
      Compiler_BestEffort: 2, Emitter_Clean: 1,Emitter_Error: 3, etc.
  """

  @encoding.MapUnrecognizedFields('additionalProperties')
  class TranslationMetricsValue(_messages.Message):
    r"""Translation metrics. E.g., Compiler_Clean: 5, Compiler_BestEffort: 2,
    Emitter_Clean: 1,Emitter_Error: 3, etc.

    Messages:
      AdditionalProperty: An additional property for a TranslationMetricsValue
        object.

    Fields:
      additionalProperties: Additional properties of type
        TranslationMetricsValue
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a TranslationMetricsValue object.

      Fields:
        key: Name of the additional property.
        value: A integer attribute.
      """

      key = _messages.StringField(1)
      value = _messages.IntegerField(2, variant=_messages.Variant.INT32)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  reportRecords = _messages.MessageField('GoogleCloudBigqueryMigrationV2TranslationReportRecord', 1, repeated=True)
  translationMetrics = _messages.MessageField('TranslationMetricsValue', 2)


class GoogleCloudBigqueryMigrationV2TranslationTaskResult(_messages.Message):
  r"""Translation specific result details from the migration task.

  Fields:
    consoleUri: The Cloud Console URI for the migration workflow.
    reportLogMessages: The records from the aggregate CSV report for a
      migration workflow.
    translatedLiterals: The list of the translated literals.
  """

  consoleUri = _messages.StringField(1)
  reportLogMessages = _messages.MessageField('GoogleCloudBigqueryMigrationV2GcsReportLogMessage', 2, repeated=True)
  translatedLiterals = _messages.MessageField('GoogleCloudBigqueryMigrationV2Literal', 3, repeated=True)


class GoogleCloudBigqueryMigrationV2TypedValue(_messages.Message):
  r"""A single strongly-typed value.

  Fields:
    boolValue: A Boolean value: `true` or `false`.
    distributionValue: A distribution value.
    doubleValue: A 64-bit double-precision floating-point number. Its
      magnitude is approximately `+/-10^(+/-300)` and it has 16 significant
      digits of precision.
    int64Value: A 64-bit integer. Its range is approximately `+/-9.2x10^18`.
    stringValue: A variable-length string value.
  """

  boolValue = _messages.BooleanField(1)
  distributionValue = _messages.MessageField('GoogleApiDistribution', 2)
  doubleValue = _messages.FloatField(3)
  int64Value = _messages.IntegerField(4)
  stringValue = _messages.StringField(5)


class GoogleCloudBigqueryMigrationV2VerticaDialect(_messages.Message):
  r"""The dialect definition for Vertica."""


class GoogleProtobufEmpty(_messages.Message):
  r"""A generic empty message that you can re-use to avoid defining duplicated
  empty messages in your APIs. A typical example is to use it as the request
  or the response type of an API method. For instance: service Foo { rpc
  Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }
  """



class GoogleRpcErrorInfo(_messages.Message):
  r"""Describes the cause of the error with structured details. Example of an
  error when contacting the "pubsub.googleapis.com" API when it is not
  enabled: { "reason": "API_DISABLED" "domain": "googleapis.com" "metadata": {
  "resource": "projects/123", "service": "pubsub.googleapis.com" } } This
  response indicates that the pubsub.googleapis.com API is not enabled.
  Example of an error that is returned when attempting to create a Spanner
  instance in a region that is out of stock: { "reason": "STOCKOUT" "domain":
  "spanner.googleapis.com", "metadata": { "availableRegions": "us-central1,us-
  east2" } }

  Messages:
    MetadataValue: Additional structured details about this error. Keys must
      match a regular expression of `a-z+` but should ideally be
      lowerCamelCase. Also, they must be limited to 64 characters in length.
      When identifying the current value of an exceeded limit, the units
      should be contained in the key, not the value. For example, rather than
      `{"instanceLimit": "100/request"}`, should be returned as,
      `{"instanceLimitPerRequest": "100"}`, if the client exceeds the number
      of instances that can be created in a single (batch) request.

  Fields:
    domain: The logical grouping to which the "reason" belongs. The error
      domain is typically the registered service name of the tool or product
      that generates the error. Example: "pubsub.googleapis.com". If the error
      is generated by some common infrastructure, the error domain must be a
      globally unique value that identifies the infrastructure. For Google API
      infrastructure, the error domain is "googleapis.com".
    metadata: Additional structured details about this error. Keys must match
      a regular expression of `a-z+` but should ideally be lowerCamelCase.
      Also, they must be limited to 64 characters in length. When identifying
      the current value of an exceeded limit, the units should be contained in
      the key, not the value. For example, rather than `{"instanceLimit":
      "100/request"}`, should be returned as, `{"instanceLimitPerRequest":
      "100"}`, if the client exceeds the number of instances that can be
      created in a single (batch) request.
    reason: The reason of the error. This is a constant value that identifies
      the proximate cause of the error. Error reasons are unique within a
      particular domain of errors. This should be at most 63 characters and
      match a regular expression of `A-Z+[A-Z0-9]`, which represents
      UPPER_SNAKE_CASE.
  """

  @encoding.MapUnrecognizedFields('additionalProperties')
  class MetadataValue(_messages.Message):
    r"""Additional structured details about this error. Keys must match a
    regular expression of `a-z+` but should ideally be lowerCamelCase. Also,
    they must be limited to 64 characters in length. When identifying the
    current value of an exceeded limit, the units should be contained in the
    key, not the value. For example, rather than `{"instanceLimit":
    "100/request"}`, should be returned as, `{"instanceLimitPerRequest":
    "100"}`, if the client exceeds the number of instances that can be created
    in a single (batch) request.

    Messages:
      AdditionalProperty: An additional property for a MetadataValue object.

    Fields:
      additionalProperties: Additional properties of type MetadataValue
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a MetadataValue object.

      Fields:
        key: Name of the additional property.
        value: A string attribute.
      """

      key = _messages.StringField(1)
      value = _messages.StringField(2)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  domain = _messages.StringField(1)
  metadata = _messages.MessageField('MetadataValue', 2)
  reason = _messages.StringField(3)


class GoogleRpcResourceInfo(_messages.Message):
  r"""Describes the resource that is being accessed.

  Fields:
    description: Describes what error is encountered when accessing this
      resource. For example, updating a cloud project may require the `writer`
      permission on the developer console project.
    owner: The owner of the resource (optional). For example, "user:" or
      "project:".
    resourceName: The name of the resource being accessed. For example, a
      shared calendar name:
      "example.com_4fghdhgsrgh@group.calendar.google.com", if the current
      error is google.rpc.Code.PERMISSION_DENIED.
    resourceType: A name for the type of resource being accessed, e.g. "sql
      table", "cloud storage bucket", "file", "Google calendar"; or the type
      URL of the resource: e.g. "type.googleapis.com/google.pubsub.v1.Topic".
  """

  description = _messages.StringField(1)
  owner = _messages.StringField(2)
  resourceName = _messages.StringField(3)
  resourceType = _messages.StringField(4)


class StandardQueryParameters(_messages.Message):
  r"""Query parameters accepted by all methods.

  Enums:
    FXgafvValueValuesEnum: V1 error format.
    AltValueValuesEnum: Data format for response.

  Fields:
    f__xgafv: V1 error format.
    access_token: OAuth access token.
    alt: Data format for response.
    callback: JSONP
    fields: Selector specifying which fields to include in a partial response.
    key: API key. Your API key identifies your project and provides you with
      API access, quota, and reports. Required unless you provide an OAuth 2.0
      token.
    oauth_token: OAuth 2.0 token for the current user.
    prettyPrint: Returns response with indentations and line breaks.
    quotaUser: Available to use for quota purposes for server-side
      applications. Can be any arbitrary string assigned to a user, but should
      not exceed 40 characters.
    trace: A tracing token of the form "token:<tokenid>" to include in api
      requests.
    uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
    upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
  """

  class AltValueValuesEnum(_messages.Enum):
    r"""Data format for response.

    Values:
      json: Responses with Content-Type of application/json
      media: Media download with context-dependent Content-Type
      proto: Responses with Content-Type of application/x-protobuf
    """
    json = 0
    media = 1
    proto = 2

  class FXgafvValueValuesEnum(_messages.Enum):
    r"""V1 error format.

    Values:
      _1: v1 error format
      _2: v2 error format
    """
    _1 = 0
    _2 = 1

  f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
  access_token = _messages.StringField(2)
  alt = _messages.EnumField('AltValueValuesEnum', 3, default='json')
  callback = _messages.StringField(4)
  fields = _messages.StringField(5)
  key = _messages.StringField(6)
  oauth_token = _messages.StringField(7)
  prettyPrint = _messages.BooleanField(8, default=True)
  quotaUser = _messages.StringField(9)
  trace = _messages.StringField(10)
  uploadType = _messages.StringField(11)
  upload_protocol = _messages.StringField(12)


encoding.AddCustomJsonFieldMapping(
    StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
    StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
    StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
