"""Generated message classes for transcoder version v1.

This API converts video files into formats suitable for consumer distribution.
For more information, see the Transcoder API overview.
"""
# NOTE: This file is autogenerated and should not be edited by hand.

from __future__ import absolute_import

from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types


package = 'transcoder'


class AdBreak(_messages.Message):
  r"""Ad break.

  Fields:
    startTimeOffset: Start time in seconds for the ad break, relative to the
      output file timeline. The default is `0s`.
  """

  startTimeOffset = _messages.StringField(1)


class Aes128Encryption(_messages.Message):
  r"""Configuration for AES-128 encryption."""


class Animation(_messages.Message):
  r"""Animation types.

  Fields:
    animationEnd: End previous animation.
    animationFade: Display overlay object with fade animation.
    animationStatic: Display static overlay object.
  """

  animationEnd = _messages.MessageField('AnimationEnd', 1)
  animationFade = _messages.MessageField('AnimationFade', 2)
  animationStatic = _messages.MessageField('AnimationStatic', 3)


class AnimationEnd(_messages.Message):
  r"""End previous overlay animation from the video. Without `AnimationEnd`,
  the overlay object will keep the state of previous animation until the end
  of the video.

  Fields:
    startTimeOffset: The time to end overlay object, in seconds. Default: 0
  """

  startTimeOffset = _messages.StringField(1)


class AnimationFade(_messages.Message):
  r"""Display overlay object with fade animation.

  Enums:
    FadeTypeValueValuesEnum: Required. Type of fade animation: `FADE_IN` or
      `FADE_OUT`.

  Fields:
    endTimeOffset: The time to end the fade animation, in seconds. Default:
      `start_time_offset` + 1s
    fadeType: Required. Type of fade animation: `FADE_IN` or `FADE_OUT`.
    startTimeOffset: The time to start the fade animation, in seconds.
      Default: 0
    xy: Normalized coordinates based on output video resolution. Valid values:
      `0.0`\u2013`1.0`. `xy` is the upper-left coordinate of the overlay
      object. For example, use the x and y coordinates {0,0} to position the
      top-left corner of the overlay animation in the top-left corner of the
      output video.
  """

  class FadeTypeValueValuesEnum(_messages.Enum):
    r"""Required. Type of fade animation: `FADE_IN` or `FADE_OUT`.

    Values:
      FADE_TYPE_UNSPECIFIED: The fade type is not specified.
      FADE_IN: Fade the overlay object into view.
      FADE_OUT: Fade the overlay object out of view.
    """
    FADE_TYPE_UNSPECIFIED = 0
    FADE_IN = 1
    FADE_OUT = 2

  endTimeOffset = _messages.StringField(1)
  fadeType = _messages.EnumField('FadeTypeValueValuesEnum', 2)
  startTimeOffset = _messages.StringField(3)
  xy = _messages.MessageField('NormalizedCoordinate', 4)


class AnimationStatic(_messages.Message):
  r"""Display static overlay object.

  Fields:
    startTimeOffset: The time to start displaying the overlay object, in
      seconds. Default: 0
    xy: Normalized coordinates based on output video resolution. Valid values:
      `0.0`\u2013`1.0`. `xy` is the upper-left coordinate of the overlay
      object. For example, use the x and y coordinates {0,0} to position the
      top-left corner of the overlay animation in the top-left corner of the
      output video.
  """

  startTimeOffset = _messages.StringField(1)
  xy = _messages.MessageField('NormalizedCoordinate', 2)


class Audio(_messages.Message):
  r"""Audio preprocessing configuration.

  Fields:
    highBoost: Enable boosting high frequency components. The default is
      `false`. **Note:** This field is not supported.
    lowBoost: Enable boosting low frequency components. The default is
      `false`. **Note:** This field is not supported.
    lufs: Specify audio loudness normalization in loudness units relative to
      full scale (LUFS). Enter a value between -24 and 0 (the default), where:
      * -24 is the Advanced Television Systems Committee (ATSC A/85) standard
      * -23 is the EU R128 broadcast standard * -19 is the prior standard for
      online mono audio * -18 is the ReplayGain standard * -16 is the prior
      standard for stereo audio * -14 is the new online audio standard
      recommended by Spotify, as well as Amazon Echo * 0 disables
      normalization
  """

  highBoost = _messages.BooleanField(1)
  lowBoost = _messages.BooleanField(2)
  lufs = _messages.FloatField(3)


class AudioMapping(_messages.Message):
  r"""The mapping for the JobConfig.edit_list atoms with audio
  EditAtom.inputs.

  Fields:
    atomKey: Required. The EditAtom.key that references the atom with audio
      inputs in the JobConfig.edit_list.
    gainDb: Audio volume control in dB. Negative values decrease volume,
      positive values increase. The default is 0.
    inputChannel: Required. The zero-based index of the channel in the input
      audio stream.
    inputKey: Required. The Input.key that identifies the input file.
    inputTrack: Required. The zero-based index of the track in the input file.
    outputChannel: Required. The zero-based index of the channel in the output
      audio stream.
  """

  atomKey = _messages.StringField(1)
  gainDb = _messages.FloatField(2)
  inputChannel = _messages.IntegerField(3, variant=_messages.Variant.INT32)
  inputKey = _messages.StringField(4)
  inputTrack = _messages.IntegerField(5, variant=_messages.Variant.INT32)
  outputChannel = _messages.IntegerField(6, variant=_messages.Variant.INT32)


class AudioStream(_messages.Message):
  r"""Audio stream resource.

  Fields:
    bitrateBps: Required. Audio bitrate in bits per second. Must be between 1
      and 10,000,000.
    channelCount: Number of audio channels. Must be between 1 and 6. The
      default is 2.
    channelLayout: A list of channel names specifying layout of the audio
      channels. This only affects the metadata embedded in the container
      headers, if supported by the specified format. The default is `["fl",
      "fr"]`. Supported channel names: - `fl` - Front left channel - `fr` -
      Front right channel - `sl` - Side left channel - `sr` - Side right
      channel - `fc` - Front center channel - `lfe` - Low frequency
    codec: The codec for this audio stream. The default is `aac`. Supported
      audio codecs: - `aac` - `aac-he` - `aac-he-v2` - `mp3` - `ac3` - `eac3`
      - `vorbis`
    displayName: The name for this particular audio stream that will be added
      to the HLS/DASH manifest. Not supported in MP4 files.
    languageCode: The BCP-47 language code, such as `en-US` or `sr-Latn`. For
      more information, see
      https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. Not
      supported in MP4 files.
    mapping: The mapping for the JobConfig.edit_list atoms with audio
      EditAtom.inputs.
    sampleRateHertz: The audio sample rate in Hertz. The default is 48000
      Hertz.
  """

  bitrateBps = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  channelCount = _messages.IntegerField(2, variant=_messages.Variant.INT32)
  channelLayout = _messages.StringField(3, repeated=True)
  codec = _messages.StringField(4)
  displayName = _messages.StringField(5)
  languageCode = _messages.StringField(6)
  mapping = _messages.MessageField('AudioMapping', 7, repeated=True)
  sampleRateHertz = _messages.IntegerField(8, variant=_messages.Variant.INT32)


class BwdifConfig(_messages.Message):
  r"""Bob Weaver Deinterlacing Filter Configuration.

  Fields:
    deinterlaceAllFrames: Deinterlace all frames rather than just the frames
      identified as interlaced. The default is `false`.
    mode: Specifies the deinterlacing mode to adopt. The default is
      `send_frame`. Supported values: - `send_frame`: Output one frame for
      each frame - `send_field`: Output one frame for each field
    parity: The picture field parity assumed for the input interlaced video.
      The default is `auto`. Supported values: - `tff`: Assume the top field
      is first - `bff`: Assume the bottom field is first - `auto`: Enable
      automatic detection of field parity
  """

  deinterlaceAllFrames = _messages.BooleanField(1)
  mode = _messages.StringField(2)
  parity = _messages.StringField(3)


class Clearkey(_messages.Message):
  r"""Clearkey configuration."""


class Color(_messages.Message):
  r"""Color preprocessing configuration. **Note:** This configuration is not
  supported.

  Fields:
    brightness: Control brightness of the video. Enter a value between -1 and
      1, where -1 is minimum brightness and 1 is maximum brightness. 0 is no
      change. The default is 0.
    contrast: Control black and white contrast of the video. Enter a value
      between -1 and 1, where -1 is minimum contrast and 1 is maximum
      contrast. 0 is no change. The default is 0.
    saturation: Control color saturation of the video. Enter a value between
      -1 and 1, where -1 is fully desaturated and 1 is maximum saturation. 0
      is no change. The default is 0.
  """

  brightness = _messages.FloatField(1)
  contrast = _messages.FloatField(2)
  saturation = _messages.FloatField(3)


class Crop(_messages.Message):
  r"""Video cropping configuration for the input video. The cropped input
  video is scaled to match the output resolution.

  Fields:
    bottomPixels: The number of pixels to crop from the bottom. The default is
      0.
    leftPixels: The number of pixels to crop from the left. The default is 0.
    rightPixels: The number of pixels to crop from the right. The default is
      0.
    topPixels: The number of pixels to crop from the top. The default is 0.
  """

  bottomPixels = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  leftPixels = _messages.IntegerField(2, variant=_messages.Variant.INT32)
  rightPixels = _messages.IntegerField(3, variant=_messages.Variant.INT32)
  topPixels = _messages.IntegerField(4, variant=_messages.Variant.INT32)


class DashConfig(_messages.Message):
  r"""`DASH` manifest configuration.

  Enums:
    SegmentReferenceSchemeValueValuesEnum: The segment reference scheme for a
      `DASH` manifest. The default is `SEGMENT_LIST`.

  Fields:
    segmentReferenceScheme: The segment reference scheme for a `DASH`
      manifest. The default is `SEGMENT_LIST`.
  """

  class SegmentReferenceSchemeValueValuesEnum(_messages.Enum):
    r"""The segment reference scheme for a `DASH` manifest. The default is
    `SEGMENT_LIST`.

    Values:
      SEGMENT_REFERENCE_SCHEME_UNSPECIFIED: The segment reference scheme is
        not specified.
      SEGMENT_LIST: Explicitly lists the URLs of media files for each segment.
        For example, if SegmentSettings.individual_segments is `true`, then
        the manifest contains fields similar to the following: ```xml ... ```
      SEGMENT_TEMPLATE_NUMBER: SegmentSettings.individual_segments must be set
        to `true` to use this segment reference scheme. Uses the DASH
        specification `` tag to determine the URLs of media files for each
        segment. For example: ```xml ... ```
    """
    SEGMENT_REFERENCE_SCHEME_UNSPECIFIED = 0
    SEGMENT_LIST = 1
    SEGMENT_TEMPLATE_NUMBER = 2

  segmentReferenceScheme = _messages.EnumField('SegmentReferenceSchemeValueValuesEnum', 1)


class Deblock(_messages.Message):
  r"""Deblock preprocessing configuration. **Note:** This configuration is not
  supported.

  Fields:
    enabled: Enable deblocker. The default is `false`.
    strength: Set strength of the deblocker. Enter a value between 0 and 1.
      The higher the value, the stronger the block removal. 0 is no
      deblocking. The default is 0.
  """

  enabled = _messages.BooleanField(1)
  strength = _messages.FloatField(2)


class Deinterlace(_messages.Message):
  r"""Deinterlace configuration for input video.

  Fields:
    bwdif: Specifies the Bob Weaver Deinterlacing Filter Configuration.
    yadif: Specifies the Yet Another Deinterlacing Filter Configuration.
  """

  bwdif = _messages.MessageField('BwdifConfig', 1)
  yadif = _messages.MessageField('YadifConfig', 2)


class Denoise(_messages.Message):
  r"""Denoise preprocessing configuration. **Note:** This configuration is not
  supported.

  Fields:
    strength: Set strength of the denoise. Enter a value between 0 and 1. The
      higher the value, the smoother the image. 0 is no denoising. The default
      is 0.
    tune: Set the denoiser mode. The default is `standard`. Supported denoiser
      modes: - `standard` - `grain`
  """

  strength = _messages.FloatField(1)
  tune = _messages.StringField(2)


class DrmSystems(_messages.Message):
  r"""Defines configuration for DRM systems in use.

  Fields:
    clearkey: Clearkey configuration.
    fairplay: Fairplay configuration.
    playready: Playready configuration.
    widevine: Widevine configuration.
  """

  clearkey = _messages.MessageField('Clearkey', 1)
  fairplay = _messages.MessageField('Fairplay', 2)
  playready = _messages.MessageField('Playready', 3)
  widevine = _messages.MessageField('Widevine', 4)


class EditAtom(_messages.Message):
  r"""Edit atom.

  Fields:
    endTimeOffset: End time in seconds for the atom, relative to the input
      file timeline. When `end_time_offset` is not specified, the `inputs` are
      used until the end of the atom.
    inputs: List of Input.key values identifying files that should be used in
      this atom. The listed `inputs` must have the same timeline.
    key: A unique key for this atom. Must be specified when using advanced
      mapping.
    startTimeOffset: Start time in seconds for the atom, relative to the input
      file timeline. The default is `0s`.
  """

  endTimeOffset = _messages.StringField(1)
  inputs = _messages.StringField(2, repeated=True)
  key = _messages.StringField(3)
  startTimeOffset = _messages.StringField(4)


class ElementaryStream(_messages.Message):
  r"""Encoding of an input file such as an audio, video, or text track.
  Elementary streams must be packaged before mapping and sharing between
  different output formats.

  Fields:
    audioStream: Encoding of an audio stream.
    key: A unique key for this elementary stream.
    textStream: Encoding of a text stream. For example, closed captions or
      subtitles.
    videoStream: Encoding of a video stream.
  """

  audioStream = _messages.MessageField('AudioStream', 1)
  key = _messages.StringField(2)
  textStream = _messages.MessageField('TextStream', 3)
  videoStream = _messages.MessageField('VideoStream', 4)


class Empty(_messages.Message):
  r"""A generic empty message that you can re-use to avoid defining duplicated
  empty messages in your APIs. A typical example is to use it as the request
  or the response type of an API method. For instance: service Foo { rpc
  Bar(google.protobuf.Empty) returns (google.protobuf.Empty); }
  """



class Encryption(_messages.Message):
  r"""Encryption settings.

  Fields:
    aes128: Configuration for AES-128 encryption.
    drmSystems: Required. DRM system(s) to use; at least one must be
      specified. If a DRM system is omitted, it is considered disabled.
    id: Required. Identifier for this set of encryption options.
    mpegCenc: Configuration for MPEG Common Encryption (MPEG-CENC).
    sampleAes: Configuration for SAMPLE-AES encryption.
    secretManagerKeySource: Keys are stored in Google Secret Manager.
  """

  aes128 = _messages.MessageField('Aes128Encryption', 1)
  drmSystems = _messages.MessageField('DrmSystems', 2)
  id = _messages.StringField(3)
  mpegCenc = _messages.MessageField('MpegCommonEncryption', 4)
  sampleAes = _messages.MessageField('SampleAesEncryption', 5)
  secretManagerKeySource = _messages.MessageField('SecretManagerSource', 6)


class Fairplay(_messages.Message):
  r"""Fairplay configuration."""


class Fmp4Config(_messages.Message):
  r"""`fmp4` container configuration.

  Fields:
    codecTag: Optional. Specify the codec tag string that will be used in the
      media bitstream. When not specified, the codec appropriate value is
      used. Supported H265 codec tags: - `hvc1` (default) - `hev1`
  """

  codecTag = _messages.StringField(1)


class H264CodecSettings(_messages.Message):
  r"""H264 codec settings.

  Enums:
    FrameRateConversionStrategyValueValuesEnum: Optional. Frame rate
      conversion strategy for desired frame rate. The default is `DOWNSAMPLE`.

  Fields:
    allowOpenGop: Specifies whether an open Group of Pictures (GOP) structure
      should be allowed or not. The default is `false`.
    aqStrength: Specify the intensity of the adaptive quantizer (AQ). Must be
      between 0 and 1, where 0 disables the quantizer and 1 maximizes the
      quantizer. A higher value equals a lower bitrate but smoother image. The
      default is 0.
    bFrameCount: The number of consecutive B-frames. Must be greater than or
      equal to zero. Must be less than H264CodecSettings.gop_frame_count if
      set. The default is 0.
    bPyramid: Allow B-pyramid for reference frame selection. This may not be
      supported on all decoders. The default is `false`.
    bitrateBps: Required. The video bitrate in bits per second. The minimum
      value is 1,000. The maximum value is 800,000,000.
    crfLevel: Target CRF level. Must be between 10 and 36, where 10 is the
      highest quality and 36 is the most efficient compression. The default is
      21.
    enableTwoPass: Use two-pass encoding strategy to achieve better video
      quality. H264CodecSettings.rate_control_mode must be `vbr`. The default
      is `false`.
    entropyCoder: The entropy coder to use. The default is `cabac`. Supported
      entropy coders: - `cavlc` - `cabac`
    frameRate: Required. The target video frame rate in frames per second
      (FPS). Must be less than or equal to 120.
    frameRateConversionStrategy: Optional. Frame rate conversion strategy for
      desired frame rate. The default is `DOWNSAMPLE`.
    gopDuration: Select the GOP size based on the specified duration. The
      default is `3s`. Note that `gopDuration` must be less than or equal to
      [`segmentDuration`](#SegmentSettings), and
      [`segmentDuration`](#SegmentSettings) must be divisible by
      `gopDuration`.
    gopFrameCount: Select the GOP size based on the specified frame count.
      Must be greater than zero.
    heightPixels: The height of the video in pixels. Must be an even integer.
      When not specified, the height is adjusted to match the specified width
      and input aspect ratio. If both are omitted, the input height is used.
      For portrait videos that contain horizontal ASR and rotation metadata,
      provide the height, in pixels, per the horizontal ASR. The API
      calculates the width per the horizontal ASR. The API detects any
      rotation metadata and swaps the requested height and width for the
      output.
    hlg: Optional. HLG color format setting for H264.
    pixelFormat: Pixel format to use. The default is `yuv420p`. Supported
      pixel formats: - `yuv420p` pixel format - `yuv422p` pixel format -
      `yuv444p` pixel format - `yuv420p10` 10-bit HDR pixel format -
      `yuv422p10` 10-bit HDR pixel format - `yuv444p10` 10-bit HDR pixel
      format - `yuv420p12` 12-bit HDR pixel format - `yuv422p12` 12-bit HDR
      pixel format - `yuv444p12` 12-bit HDR pixel format
    preset: Enforces the specified codec preset. The default is `veryfast`.
      The available options are [FFmpeg-
      compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Preset). Note that
      certain values for this field may cause the transcoder to override other
      fields you set in the `H264CodecSettings` message.
    profile: Enforces the specified codec profile. The following profiles are
      supported: * `baseline` * `main` * `high` (default) The available
      options are [FFmpeg-
      compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Tune). Note that
      certain values for this field may cause the transcoder to override other
      fields you set in the `H264CodecSettings` message.
    rateControlMode: Specify the mode. The default is `vbr`. Supported rate
      control modes: - `vbr` - variable bitrate - `crf` - constant rate factor
    sdr: Optional. SDR color format setting for H264.
    tune: Enforces the specified codec tune. The available options are
      [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Tune).
      Note that certain values for this field may cause the transcoder to
      override other fields you set in the `H264CodecSettings` message.
    vbvFullnessBits: Initial fullness of the Video Buffering Verifier (VBV)
      buffer in bits. Must be greater than zero. The default is equal to 90%
      of H264CodecSettings.vbv_size_bits.
    vbvSizeBits: Size of the Video Buffering Verifier (VBV) buffer in bits.
      Must be greater than zero. The default is equal to
      H264CodecSettings.bitrate_bps.
    widthPixels: The width of the video in pixels. Must be an even integer.
      When not specified, the width is adjusted to match the specified height
      and input aspect ratio. If both are omitted, the input width is used.
      For portrait videos that contain horizontal ASR and rotation metadata,
      provide the width, in pixels, per the horizontal ASR. The API calculates
      the height per the horizontal ASR. The API detects any rotation metadata
      and swaps the requested height and width for the output.
  """

  class FrameRateConversionStrategyValueValuesEnum(_messages.Enum):
    r"""Optional. Frame rate conversion strategy for desired frame rate. The
    default is `DOWNSAMPLE`.

    Values:
      FRAME_RATE_CONVERSION_STRATEGY_UNSPECIFIED: Unspecified frame rate
        conversion strategy.
      DOWNSAMPLE: Selectively retain frames to reduce the output frame rate.
        Every _n_ th frame is kept, where `n = ceil(input frame rate / target
        frame rate)`. When _n_ = 1 (that is, the target frame rate is greater
        than the input frame rate), the output frame rate matches the input
        frame rate. When _n_ > 1, frames are dropped and the output frame rate
        is equal to `(input frame rate / n)`. For more information, see
        [Calculate frame
        rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate).
      DROP_DUPLICATE: Drop or duplicate frames to match the specified frame
        rate.
    """
    FRAME_RATE_CONVERSION_STRATEGY_UNSPECIFIED = 0
    DOWNSAMPLE = 1
    DROP_DUPLICATE = 2

  allowOpenGop = _messages.BooleanField(1)
  aqStrength = _messages.FloatField(2)
  bFrameCount = _messages.IntegerField(3, variant=_messages.Variant.INT32)
  bPyramid = _messages.BooleanField(4)
  bitrateBps = _messages.IntegerField(5, variant=_messages.Variant.INT32)
  crfLevel = _messages.IntegerField(6, variant=_messages.Variant.INT32)
  enableTwoPass = _messages.BooleanField(7)
  entropyCoder = _messages.StringField(8)
  frameRate = _messages.FloatField(9)
  frameRateConversionStrategy = _messages.EnumField('FrameRateConversionStrategyValueValuesEnum', 10)
  gopDuration = _messages.StringField(11)
  gopFrameCount = _messages.IntegerField(12, variant=_messages.Variant.INT32)
  heightPixels = _messages.IntegerField(13, variant=_messages.Variant.INT32)
  hlg = _messages.MessageField('H264ColorFormatHLG', 14)
  pixelFormat = _messages.StringField(15)
  preset = _messages.StringField(16)
  profile = _messages.StringField(17)
  rateControlMode = _messages.StringField(18)
  sdr = _messages.MessageField('H264ColorFormatSDR', 19)
  tune = _messages.StringField(20)
  vbvFullnessBits = _messages.IntegerField(21, variant=_messages.Variant.INT32)
  vbvSizeBits = _messages.IntegerField(22, variant=_messages.Variant.INT32)
  widthPixels = _messages.IntegerField(23, variant=_messages.Variant.INT32)


class H264ColorFormatHLG(_messages.Message):
  r"""Convert the input video to a Hybrid Log Gamma (HLG) video."""


class H264ColorFormatSDR(_messages.Message):
  r"""Convert the input video to a Standard Dynamic Range (SDR) video."""


class H265CodecSettings(_messages.Message):
  r"""H265 codec settings.

  Enums:
    FrameRateConversionStrategyValueValuesEnum: Optional. Frame rate
      conversion strategy for desired frame rate. The default is `DOWNSAMPLE`.

  Fields:
    allowOpenGop: Specifies whether an open Group of Pictures (GOP) structure
      should be allowed or not. The default is `false`.
    aqStrength: Specify the intensity of the adaptive quantizer (AQ). Must be
      between 0 and 1, where 0 disables the quantizer and 1 maximizes the
      quantizer. A higher value equals a lower bitrate but smoother image. The
      default is 0.
    bFrameCount: The number of consecutive B-frames. Must be greater than or
      equal to zero. Must be less than H265CodecSettings.gop_frame_count if
      set. The default is 0.
    bPyramid: Allow B-pyramid for reference frame selection. This may not be
      supported on all decoders. The default is `false`.
    bitrateBps: Required. The video bitrate in bits per second. The minimum
      value is 1,000. The maximum value is 800,000,000.
    crfLevel: Target CRF level. Must be between 10 and 36, where 10 is the
      highest quality and 36 is the most efficient compression. The default is
      21.
    enableTwoPass: Use two-pass encoding strategy to achieve better video
      quality. H265CodecSettings.rate_control_mode must be `vbr`. The default
      is `false`.
    frameRate: Required. The target video frame rate in frames per second
      (FPS). Must be less than or equal to 120.
    frameRateConversionStrategy: Optional. Frame rate conversion strategy for
      desired frame rate. The default is `DOWNSAMPLE`.
    gopDuration: Select the GOP size based on the specified duration. The
      default is `3s`. Note that `gopDuration` must be less than or equal to
      [`segmentDuration`](#SegmentSettings), and
      [`segmentDuration`](#SegmentSettings) must be divisible by
      `gopDuration`.
    gopFrameCount: Select the GOP size based on the specified frame count.
      Must be greater than zero.
    hdr10: Optional. HDR10 color format setting for H265.
    heightPixels: The height of the video in pixels. Must be an even integer.
      When not specified, the height is adjusted to match the specified width
      and input aspect ratio. If both are omitted, the input height is used.
      For portrait videos that contain horizontal ASR and rotation metadata,
      provide the height, in pixels, per the horizontal ASR. The API
      calculates the width per the horizontal ASR. The API detects any
      rotation metadata and swaps the requested height and width for the
      output.
    hlg: Optional. HLG color format setting for H265.
    pixelFormat: Pixel format to use. The default is `yuv420p`. Supported
      pixel formats: - `yuv420p` pixel format - `yuv422p` pixel format -
      `yuv444p` pixel format - `yuv420p10` 10-bit HDR pixel format -
      `yuv422p10` 10-bit HDR pixel format - `yuv444p10` 10-bit HDR pixel
      format - `yuv420p12` 12-bit HDR pixel format - `yuv422p12` 12-bit HDR
      pixel format - `yuv444p12` 12-bit HDR pixel format
    preset: Enforces the specified codec preset. The default is `veryfast`.
      The available options are [FFmpeg-
      compatible](https://trac.ffmpeg.org/wiki/Encode/H.265). Note that
      certain values for this field may cause the transcoder to override other
      fields you set in the `H265CodecSettings` message.
    profile: Enforces the specified codec profile. The following profiles are
      supported: * 8-bit profiles * `main` (default) * `main-intra` *
      `mainstillpicture` * 10-bit profiles * `main10` (default) *
      `main10-intra` * `main422-10` * `main422-10-intra` * `main444-10` *
      `main444-10-intra` * 12-bit profiles * `main12` (default) *
      `main12-intra` * `main422-12` * `main422-12-intra` * `main444-12` *
      `main444-12-intra` The available options are [FFmpeg-
      compatible](https://x265.readthedocs.io/). Note that certain values for
      this field may cause the transcoder to override other fields you set in
      the `H265CodecSettings` message.
    rateControlMode: Specify the mode. The default is `vbr`. Supported rate
      control modes: - `vbr` - variable bitrate - `crf` - constant rate factor
    sdr: Optional. SDR color format setting for H265.
    tune: Enforces the specified codec tune. The available options are
      [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.265). Note
      that certain values for this field may cause the transcoder to override
      other fields you set in the `H265CodecSettings` message.
    vbvFullnessBits: Initial fullness of the Video Buffering Verifier (VBV)
      buffer in bits. Must be greater than zero. The default is equal to 90%
      of H265CodecSettings.vbv_size_bits.
    vbvSizeBits: Size of the Video Buffering Verifier (VBV) buffer in bits.
      Must be greater than zero. The default is equal to
      `VideoStream.bitrate_bps`.
    widthPixels: The width of the video in pixels. Must be an even integer.
      When not specified, the width is adjusted to match the specified height
      and input aspect ratio. If both are omitted, the input width is used.
      For portrait videos that contain horizontal ASR and rotation metadata,
      provide the width, in pixels, per the horizontal ASR. The API calculates
      the height per the horizontal ASR. The API detects any rotation metadata
      and swaps the requested height and width for the output.
  """

  class FrameRateConversionStrategyValueValuesEnum(_messages.Enum):
    r"""Optional. Frame rate conversion strategy for desired frame rate. The
    default is `DOWNSAMPLE`.

    Values:
      FRAME_RATE_CONVERSION_STRATEGY_UNSPECIFIED: Unspecified frame rate
        conversion strategy.
      DOWNSAMPLE: Selectively retain frames to reduce the output frame rate.
        Every _n_ th frame is kept, where `n = ceil(input frame rate / target
        frame rate)`. When _n_ = 1 (that is, the target frame rate is greater
        than the input frame rate), the output frame rate matches the input
        frame rate. When _n_ > 1, frames are dropped and the output frame rate
        is equal to `(input frame rate / n)`. For more information, see
        [Calculate frame
        rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate).
      DROP_DUPLICATE: Drop or duplicate frames to match the specified frame
        rate.
    """
    FRAME_RATE_CONVERSION_STRATEGY_UNSPECIFIED = 0
    DOWNSAMPLE = 1
    DROP_DUPLICATE = 2

  allowOpenGop = _messages.BooleanField(1)
  aqStrength = _messages.FloatField(2)
  bFrameCount = _messages.IntegerField(3, variant=_messages.Variant.INT32)
  bPyramid = _messages.BooleanField(4)
  bitrateBps = _messages.IntegerField(5, variant=_messages.Variant.INT32)
  crfLevel = _messages.IntegerField(6, variant=_messages.Variant.INT32)
  enableTwoPass = _messages.BooleanField(7)
  frameRate = _messages.FloatField(8)
  frameRateConversionStrategy = _messages.EnumField('FrameRateConversionStrategyValueValuesEnum', 9)
  gopDuration = _messages.StringField(10)
  gopFrameCount = _messages.IntegerField(11, variant=_messages.Variant.INT32)
  hdr10 = _messages.MessageField('H265ColorFormatHDR10', 12)
  heightPixels = _messages.IntegerField(13, variant=_messages.Variant.INT32)
  hlg = _messages.MessageField('H265ColorFormatHLG', 14)
  pixelFormat = _messages.StringField(15)
  preset = _messages.StringField(16)
  profile = _messages.StringField(17)
  rateControlMode = _messages.StringField(18)
  sdr = _messages.MessageField('H265ColorFormatSDR', 19)
  tune = _messages.StringField(20)
  vbvFullnessBits = _messages.IntegerField(21, variant=_messages.Variant.INT32)
  vbvSizeBits = _messages.IntegerField(22, variant=_messages.Variant.INT32)
  widthPixels = _messages.IntegerField(23, variant=_messages.Variant.INT32)


class H265ColorFormatHDR10(_messages.Message):
  r"""Convert the input video to a High Dynamic Range 10 (HDR10) video."""


class H265ColorFormatHLG(_messages.Message):
  r"""Convert the input video to a Hybrid Log Gamma (HLG) video."""


class H265ColorFormatSDR(_messages.Message):
  r"""Convert the input video to a Standard Dynamic Range (SDR) video."""


class Image(_messages.Message):
  r"""Overlaid image.

  Fields:
    alpha: Target image opacity. Valid values are from `1.0` (solid, default)
      to `0.0` (transparent), exclusive. Set this to a value greater than
      `0.0`.
    resolution: Normalized image resolution, based on output video resolution.
      Valid values: `0.0`\u2013`1.0`. To respect the original image aspect
      ratio, set either `x` or `y` to `0.0`. To use the original image
      resolution, set both `x` and `y` to `0.0`.
    uri: Required. URI of the image in Cloud Storage. For example,
      `gs://bucket/inputs/image.png`. Only PNG and JPEG images are supported.
  """

  alpha = _messages.FloatField(1)
  resolution = _messages.MessageField('NormalizedCoordinate', 2)
  uri = _messages.StringField(3)


class Input(_messages.Message):
  r"""Input asset.

  Fields:
    attributes: Optional. Input Attributes.
    key: A unique key for this input. Must be specified when using advanced
      mapping and edit lists.
    preprocessingConfig: Preprocessing configurations.
    uri: URI of the media. Input files must be at least 5 seconds in duration
      and stored in Cloud Storage (for example,
      `gs://bucket/inputs/file.mp4`). If empty, the value is populated from
      Job.input_uri. See [Supported input and output
      formats](https://cloud.google.com/transcoder/docs/concepts/supported-
      input-and-output-formats).
  """

  attributes = _messages.MessageField('InputAttributes', 1)
  key = _messages.StringField(2)
  preprocessingConfig = _messages.MessageField('PreprocessingConfig', 3)
  uri = _messages.StringField(4)


class InputAttributes(_messages.Message):
  r"""Input attributes that provide additional information about the input
  asset.

  Fields:
    trackDefinitions: Optional. A list of track definitions for the input
      asset.
  """

  trackDefinitions = _messages.MessageField('TrackDefinition', 1, repeated=True)


class Job(_messages.Message):
  r"""Transcoding job resource.

  Enums:
    ModeValueValuesEnum: The processing mode of the job. The default is
      `PROCESSING_MODE_INTERACTIVE`.
    OptimizationValueValuesEnum: Optional. The optimization strategy of the
      job. The default is `AUTODETECT`.
    StateValueValuesEnum: Output only. The current state of the job.

  Messages:
    LabelsValue: The labels associated with this job. You can use these to
      organize and group your jobs.

  Fields:
    batchModePriority: The processing priority of a batch job. This field can
      only be set for batch mode jobs. The default value is 0. This value
      cannot be negative. Higher values correspond to higher priorities for
      the job.
    config: The configuration for this job.
    createTime: Output only. The time the job was created.
    endTime: Output only. The time the transcoding finished.
    error: Output only. An error object that describes the reason for the
      failure. This property is always present when ProcessingState is
      `FAILED`.
    fillContentGaps: Optional. Insert silence and duplicate frames when
      timestamp gaps are detected in a given stream.
    inputUri: Input only. Specify the `input_uri` to populate empty `uri`
      fields in each element of `Job.config.inputs` or
      `JobTemplate.config.inputs` when using template. URI of the media. Input
      files must be at least 5 seconds in duration and stored in Cloud Storage
      (for example, `gs://bucket/inputs/file.mp4`). See [Supported input and
      output
      formats](https://cloud.google.com/transcoder/docs/concepts/supported-
      input-and-output-formats).
    labels: The labels associated with this job. You can use these to organize
      and group your jobs.
    mode: The processing mode of the job. The default is
      `PROCESSING_MODE_INTERACTIVE`.
    name: The resource name of the job. Format:
      `projects/{project_number}/locations/{location}/jobs/{job}`
    optimization: Optional. The optimization strategy of the job. The default
      is `AUTODETECT`.
    outputUri: Input only. Specify the `output_uri` to populate an empty
      `Job.config.output.uri` or `JobTemplate.config.output.uri` when using
      template. URI for the output file(s). For example, `gs://my-
      bucket/outputs/`. See [Supported input and output
      formats](https://cloud.google.com/transcoder/docs/concepts/supported-
      input-and-output-formats).
    startTime: Output only. The time the transcoding started.
    state: Output only. The current state of the job.
    templateId: Input only. Specify the `template_id` to use for populating
      `Job.config`. The default is `preset/web-hd`, which is the only
      supported preset. User defined JobTemplate: `{job_template_id}`
    ttlAfterCompletionDays: Job time to live value in days, which will be
      effective after job completion. Job should be deleted automatically
      after the given TTL. Enter a value between 1 and 90. The default is 30.
  """

  class ModeValueValuesEnum(_messages.Enum):
    r"""The processing mode of the job. The default is
    `PROCESSING_MODE_INTERACTIVE`.

    Values:
      PROCESSING_MODE_UNSPECIFIED: The job processing mode is not specified.
      PROCESSING_MODE_INTERACTIVE: The job processing mode is interactive
        mode. Interactive job will either be ran or rejected if quota does not
        allow for it.
      PROCESSING_MODE_BATCH: The job processing mode is batch mode. Batch mode
        allows queuing of jobs.
    """
    PROCESSING_MODE_UNSPECIFIED = 0
    PROCESSING_MODE_INTERACTIVE = 1
    PROCESSING_MODE_BATCH = 2

  class OptimizationValueValuesEnum(_messages.Enum):
    r"""Optional. The optimization strategy of the job. The default is
    `AUTODETECT`.

    Values:
      OPTIMIZATION_STRATEGY_UNSPECIFIED: The optimization strategy is not
        specified.
      AUTODETECT: Prioritize job processing speed.
      DISABLED: Disable all optimizations.
    """
    OPTIMIZATION_STRATEGY_UNSPECIFIED = 0
    AUTODETECT = 1
    DISABLED = 2

  class StateValueValuesEnum(_messages.Enum):
    r"""Output only. The current state of the job.

    Values:
      PROCESSING_STATE_UNSPECIFIED: The processing state is not specified.
      PENDING: The job is enqueued and will be picked up for processing soon.
      RUNNING: The job is being processed.
      SUCCEEDED: The job has been completed successfully.
      FAILED: The job has failed. For additional information, see [Troubleshoo
        ting](https://cloud.google.com/transcoder/docs/troubleshooting).
    """
    PROCESSING_STATE_UNSPECIFIED = 0
    PENDING = 1
    RUNNING = 2
    SUCCEEDED = 3
    FAILED = 4

  @encoding.MapUnrecognizedFields('additionalProperties')
  class LabelsValue(_messages.Message):
    r"""The labels associated with this job. You can use these to organize and
    group your jobs.

    Messages:
      AdditionalProperty: An additional property for a LabelsValue object.

    Fields:
      additionalProperties: Additional properties of type LabelsValue
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a LabelsValue object.

      Fields:
        key: Name of the additional property.
        value: A string attribute.
      """

      key = _messages.StringField(1)
      value = _messages.StringField(2)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  batchModePriority = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  config = _messages.MessageField('JobConfig', 2)
  createTime = _messages.StringField(3)
  endTime = _messages.StringField(4)
  error = _messages.MessageField('Status', 5)
  fillContentGaps = _messages.BooleanField(6)
  inputUri = _messages.StringField(7)
  labels = _messages.MessageField('LabelsValue', 8)
  mode = _messages.EnumField('ModeValueValuesEnum', 9)
  name = _messages.StringField(10)
  optimization = _messages.EnumField('OptimizationValueValuesEnum', 11)
  outputUri = _messages.StringField(12)
  startTime = _messages.StringField(13)
  state = _messages.EnumField('StateValueValuesEnum', 14)
  templateId = _messages.StringField(15)
  ttlAfterCompletionDays = _messages.IntegerField(16, variant=_messages.Variant.INT32)


class JobConfig(_messages.Message):
  r"""Job configuration

  Fields:
    adBreaks: List of ad breaks. Specifies where to insert ad break tags in
      the output manifests.
    editList: List of edit atoms. Defines the ultimate timeline of the
      resulting file or manifest.
    elementaryStreams: List of elementary streams.
    encryptions: List of encryption configurations for the content. Each
      configuration has an ID. Specify this ID in the MuxStream.encryption_id
      field to indicate the configuration to use for that `MuxStream` output.
    inputs: List of input assets stored in Cloud Storage.
    manifests: List of output manifests.
    muxStreams: List of multiplexing settings for output streams.
    output: Output configuration.
    overlays: List of overlays on the output video, in descending Z-order.
    pubsubDestination: Destination on Pub/Sub.
    spriteSheets: List of output sprite sheets. Spritesheets require at least
      one VideoStream in the Jobconfig.
  """

  adBreaks = _messages.MessageField('AdBreak', 1, repeated=True)
  editList = _messages.MessageField('EditAtom', 2, repeated=True)
  elementaryStreams = _messages.MessageField('ElementaryStream', 3, repeated=True)
  encryptions = _messages.MessageField('Encryption', 4, repeated=True)
  inputs = _messages.MessageField('Input', 5, repeated=True)
  manifests = _messages.MessageField('Manifest', 6, repeated=True)
  muxStreams = _messages.MessageField('MuxStream', 7, repeated=True)
  output = _messages.MessageField('Output', 8)
  overlays = _messages.MessageField('Overlay', 9, repeated=True)
  pubsubDestination = _messages.MessageField('PubsubDestination', 10)
  spriteSheets = _messages.MessageField('SpriteSheet', 11, repeated=True)


class JobTemplate(_messages.Message):
  r"""Transcoding job template resource.

  Messages:
    LabelsValue: The labels associated with this job template. You can use
      these to organize and group your job templates.

  Fields:
    config: The configuration for this template.
    labels: The labels associated with this job template. You can use these to
      organize and group your job templates.
    name: The resource name of the job template. Format: `projects/{project_nu
      mber}/locations/{location}/jobTemplates/{job_template}`
  """

  @encoding.MapUnrecognizedFields('additionalProperties')
  class LabelsValue(_messages.Message):
    r"""The labels associated with this job template. You can use these to
    organize and group your job templates.

    Messages:
      AdditionalProperty: An additional property for a LabelsValue object.

    Fields:
      additionalProperties: Additional properties of type LabelsValue
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a LabelsValue object.

      Fields:
        key: Name of the additional property.
        value: A string attribute.
      """

      key = _messages.StringField(1)
      value = _messages.StringField(2)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  config = _messages.MessageField('JobConfig', 1)
  labels = _messages.MessageField('LabelsValue', 2)
  name = _messages.StringField(3)


class ListJobTemplatesResponse(_messages.Message):
  r"""Response message for `TranscoderService.ListJobTemplates`.

  Fields:
    jobTemplates: List of job templates in the specified region.
    nextPageToken: The pagination token.
    unreachable: List of regions that could not be reached.
  """

  jobTemplates = _messages.MessageField('JobTemplate', 1, repeated=True)
  nextPageToken = _messages.StringField(2)
  unreachable = _messages.StringField(3, repeated=True)


class ListJobsResponse(_messages.Message):
  r"""Response message for `TranscoderService.ListJobs`.

  Fields:
    jobs: List of jobs in the specified region.
    nextPageToken: The pagination token.
    unreachable: List of regions that could not be reached.
  """

  jobs = _messages.MessageField('Job', 1, repeated=True)
  nextPageToken = _messages.StringField(2)
  unreachable = _messages.StringField(3, repeated=True)


class Manifest(_messages.Message):
  r"""Manifest configuration.

  Enums:
    TypeValueValuesEnum: Required. Type of the manifest.

  Fields:
    dash: `DASH` manifest configuration.
    fileName: The name of the generated file. The default is `manifest` with
      the extension suffix corresponding to the Manifest.type.
    muxStreams: Required. List of user supplied MuxStream.key values that
      should appear in this manifest. When Manifest.type is `HLS`, a media
      manifest with name MuxStream.key and `.m3u8` extension is generated for
      each element in this list.
    type: Required. Type of the manifest.
  """

  class TypeValueValuesEnum(_messages.Enum):
    r"""Required. Type of the manifest.

    Values:
      MANIFEST_TYPE_UNSPECIFIED: The manifest type is not specified.
      HLS: Create an HLS manifest. The corresponding file extension is
        `.m3u8`.
      DASH: Create an MPEG-DASH manifest. The corresponding file extension is
        `.mpd`.
    """
    MANIFEST_TYPE_UNSPECIFIED = 0
    HLS = 1
    DASH = 2

  dash = _messages.MessageField('DashConfig', 1)
  fileName = _messages.StringField(2)
  muxStreams = _messages.StringField(3, repeated=True)
  type = _messages.EnumField('TypeValueValuesEnum', 4)


class MpegCommonEncryption(_messages.Message):
  r"""Configuration for MPEG Common Encryption (MPEG-CENC).

  Fields:
    scheme: Required. Specify the encryption scheme. Supported encryption
      schemes: - `cenc` - `cbcs`
  """

  scheme = _messages.StringField(1)


class MuxStream(_messages.Message):
  r"""Multiplexing settings for output stream.

  Fields:
    container: The container format. The default is `mp4` Supported streaming
      formats: - `ts` - `fmp4`- the corresponding file extension is `.m4s`
      Supported standalone file formats: - `mp4` - `mp3` - `ogg` - `vtt` See
      also: [Supported input and output
      formats](https://cloud.google.com/transcoder/docs/concepts/supported-
      input-and-output-formats)
    elementaryStreams: List of ElementaryStream.key values multiplexed in this
      stream.
    encryptionId: Identifier of the encryption configuration to use. If
      omitted, output will be unencrypted.
    fileName: The name of the generated file. The default is MuxStream.key
      with the extension suffix corresponding to the MuxStream.container.
      Individual segments also have an incremental 10-digit zero-padded suffix
      starting from 0 before the extension, such as `mux_stream0000000123.ts`.
    fmp4: Optional. `fmp4` container configuration.
    key: A unique key for this multiplexed stream.
    segmentSettings: Segment settings for `ts`, `fmp4` and `vtt`.
  """

  container = _messages.StringField(1)
  elementaryStreams = _messages.StringField(2, repeated=True)
  encryptionId = _messages.StringField(3)
  fileName = _messages.StringField(4)
  fmp4 = _messages.MessageField('Fmp4Config', 5)
  key = _messages.StringField(6)
  segmentSettings = _messages.MessageField('SegmentSettings', 7)


class NormalizedCoordinate(_messages.Message):
  r"""2D normalized coordinates. Default: `{0.0, 0.0}`

  Fields:
    x: Normalized x coordinate.
    y: Normalized y coordinate.
  """

  x = _messages.FloatField(1)
  y = _messages.FloatField(2)


class Output(_messages.Message):
  r"""Location of output file(s) in a Cloud Storage bucket.

  Fields:
    uri: URI for the output file(s). For example, `gs://my-bucket/outputs/`.
      Must be a directory and not a top-level bucket. If empty, the value is
      populated from Job.output_uri. See [Supported input and output
      formats](https://cloud.google.com/transcoder/docs/concepts/supported-
      input-and-output-formats).
  """

  uri = _messages.StringField(1)


class Overlay(_messages.Message):
  r"""Overlay configuration.

  Fields:
    animations: List of animations. The list should be chronological, without
      any time overlap.
    image: Image overlay.
  """

  animations = _messages.MessageField('Animation', 1, repeated=True)
  image = _messages.MessageField('Image', 2)


class Pad(_messages.Message):
  r"""Pad filter configuration for the input video. The padded input video is
  scaled after padding with black to match the output resolution.

  Fields:
    bottomPixels: The number of pixels to add to the bottom. The default is 0.
    leftPixels: The number of pixels to add to the left. The default is 0.
    rightPixels: The number of pixels to add to the right. The default is 0.
    topPixels: The number of pixels to add to the top. The default is 0.
  """

  bottomPixels = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  leftPixels = _messages.IntegerField(2, variant=_messages.Variant.INT32)
  rightPixels = _messages.IntegerField(3, variant=_messages.Variant.INT32)
  topPixels = _messages.IntegerField(4, variant=_messages.Variant.INT32)


class Playready(_messages.Message):
  r"""Playready configuration."""


class PreprocessingConfig(_messages.Message):
  r"""Preprocessing configurations.

  Fields:
    audio: Audio preprocessing configuration.
    color: Color preprocessing configuration.
    crop: Specify the video cropping configuration.
    deblock: Deblock preprocessing configuration.
    deinterlace: Specify the video deinterlace configuration.
    denoise: Denoise preprocessing configuration.
    pad: Specify the video pad filter configuration.
  """

  audio = _messages.MessageField('Audio', 1)
  color = _messages.MessageField('Color', 2)
  crop = _messages.MessageField('Crop', 3)
  deblock = _messages.MessageField('Deblock', 4)
  deinterlace = _messages.MessageField('Deinterlace', 5)
  denoise = _messages.MessageField('Denoise', 6)
  pad = _messages.MessageField('Pad', 7)


class PubsubDestination(_messages.Message):
  r"""A Pub/Sub destination.

  Fields:
    topic: The name of the Pub/Sub topic to publish job completion
      notification to. For example: `projects/{project}/topics/{topic}`.
  """

  topic = _messages.StringField(1)


class SampleAesEncryption(_messages.Message):
  r"""Configuration for SAMPLE-AES encryption."""


class SecretManagerSource(_messages.Message):
  r"""Configuration for secrets stored in Google Secret Manager.

  Fields:
    secretVersion: Required. The name of the Secret Version containing the
      encryption key in the following format:
      `projects/{project}/secrets/{secret_id}/versions/{version_number}` Note
      that only numbered versions are supported. Aliases like "latest" are not
      supported.
  """

  secretVersion = _messages.StringField(1)


class SegmentSettings(_messages.Message):
  r"""Segment settings for `ts`, `fmp4` and `vtt`.

  Fields:
    individualSegments: Required. Create an individual segment file. The
      default is `false`.
    segmentDuration: Duration of the segments in seconds. The default is
      `6.0s`. Note that `segmentDuration` must be greater than or equal to
      [`gopDuration`](#videostream), and `segmentDuration` must be divisible
      by [`gopDuration`](#videostream).
  """

  individualSegments = _messages.BooleanField(1)
  segmentDuration = _messages.StringField(2)


class SpriteSheet(_messages.Message):
  r"""Sprite sheet configuration.

  Fields:
    columnCount: The maximum number of sprites per row in a sprite sheet. The
      default is 0, which indicates no maximum limit.
    endTimeOffset: End time in seconds, relative to the output file timeline.
      When `end_time_offset` is not specified, the sprites are generated until
      the end of the output file.
    filePrefix: Required. File name prefix for the generated sprite sheets.
      Each sprite sheet has an incremental 10-digit zero-padded suffix
      starting from 0 before the extension, such as
      `sprite_sheet0000000123.jpeg`.
    format: Format type. The default is `jpeg`. Supported formats: - `jpeg`
    interval: Starting from `0s`, create sprites at regular intervals. Specify
      the interval value in seconds.
    quality: The quality of the generated sprite sheet. Enter a value between
      1 and 100, where 1 is the lowest quality and 100 is the highest quality.
      The default is 100. A high quality value corresponds to a low image data
      compression ratio.
    rowCount: The maximum number of rows per sprite sheet. When the sprite
      sheet is full, a new sprite sheet is created. The default is 0, which
      indicates no maximum limit.
    spriteHeightPixels: Required. The height of sprite in pixels. Must be an
      even integer. To preserve the source aspect ratio, set the
      SpriteSheet.sprite_height_pixels field or the
      SpriteSheet.sprite_width_pixels field, but not both (the API will
      automatically calculate the missing field). For portrait videos that
      contain horizontal ASR and rotation metadata, provide the height, in
      pixels, per the horizontal ASR. The API calculates the width per the
      horizontal ASR. The API detects any rotation metadata and swaps the
      requested height and width for the output.
    spriteWidthPixels: Required. The width of sprite in pixels. Must be an
      even integer. To preserve the source aspect ratio, set the
      SpriteSheet.sprite_width_pixels field or the
      SpriteSheet.sprite_height_pixels field, but not both (the API will
      automatically calculate the missing field). For portrait videos that
      contain horizontal ASR and rotation metadata, provide the width, in
      pixels, per the horizontal ASR. The API calculates the height per the
      horizontal ASR. The API detects any rotation metadata and swaps the
      requested height and width for the output.
    startTimeOffset: Start time in seconds, relative to the output file
      timeline. Determines the first sprite to pick. The default is `0s`.
    totalCount: Total number of sprites. Create the specified number of
      sprites distributed evenly across the timeline of the output media. The
      default is 100.
  """

  columnCount = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  endTimeOffset = _messages.StringField(2)
  filePrefix = _messages.StringField(3)
  format = _messages.StringField(4)
  interval = _messages.StringField(5)
  quality = _messages.IntegerField(6, variant=_messages.Variant.INT32)
  rowCount = _messages.IntegerField(7, variant=_messages.Variant.INT32)
  spriteHeightPixels = _messages.IntegerField(8, variant=_messages.Variant.INT32)
  spriteWidthPixels = _messages.IntegerField(9, variant=_messages.Variant.INT32)
  startTimeOffset = _messages.StringField(10)
  totalCount = _messages.IntegerField(11, variant=_messages.Variant.INT32)


class StandardQueryParameters(_messages.Message):
  r"""Query parameters accepted by all methods.

  Enums:
    FXgafvValueValuesEnum: V1 error format.
    AltValueValuesEnum: Data format for response.

  Fields:
    f__xgafv: V1 error format.
    access_token: OAuth access token.
    alt: Data format for response.
    callback: JSONP
    fields: Selector specifying which fields to include in a partial response.
    key: API key. Your API key identifies your project and provides you with
      API access, quota, and reports. Required unless you provide an OAuth 2.0
      token.
    oauth_token: OAuth 2.0 token for the current user.
    prettyPrint: Returns response with indentations and line breaks.
    quotaUser: Available to use for quota purposes for server-side
      applications. Can be any arbitrary string assigned to a user, but should
      not exceed 40 characters.
    trace: A tracing token of the form "token:<tokenid>" to include in api
      requests.
    uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
    upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
  """

  class AltValueValuesEnum(_messages.Enum):
    r"""Data format for response.

    Values:
      json: Responses with Content-Type of application/json
      media: Media download with context-dependent Content-Type
      proto: Responses with Content-Type of application/x-protobuf
    """
    json = 0
    media = 1
    proto = 2

  class FXgafvValueValuesEnum(_messages.Enum):
    r"""V1 error format.

    Values:
      _1: v1 error format
      _2: v2 error format
    """
    _1 = 0
    _2 = 1

  f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
  access_token = _messages.StringField(2)
  alt = _messages.EnumField('AltValueValuesEnum', 3, default='json')
  callback = _messages.StringField(4)
  fields = _messages.StringField(5)
  key = _messages.StringField(6)
  oauth_token = _messages.StringField(7)
  prettyPrint = _messages.BooleanField(8, default=True)
  quotaUser = _messages.StringField(9)
  trace = _messages.StringField(10)
  uploadType = _messages.StringField(11)
  upload_protocol = _messages.StringField(12)


class Status(_messages.Message):
  r"""The `Status` type defines a logical error model that is suitable for
  different programming environments, including REST APIs and RPC APIs. It is
  used by [gRPC](https://github.com/grpc). Each `Status` message contains
  three pieces of data: error code, error message, and error details. You can
  find out more about this error model and how to work with it in the [API
  Design Guide](https://cloud.google.com/apis/design/errors).

  Messages:
    DetailsValueListEntry: A DetailsValueListEntry object.

  Fields:
    code: The status code, which should be an enum value of google.rpc.Code.
    details: A list of messages that carry the error details. There is a
      common set of message types for APIs to use.
    message: A developer-facing error message, which should be in English. Any
      user-facing error message should be localized and sent in the
      google.rpc.Status.details field, or localized by the client.
  """

  @encoding.MapUnrecognizedFields('additionalProperties')
  class DetailsValueListEntry(_messages.Message):
    r"""A DetailsValueListEntry object.

    Messages:
      AdditionalProperty: An additional property for a DetailsValueListEntry
        object.

    Fields:
      additionalProperties: Properties of the object. Contains field @type
        with type URL.
    """

    class AdditionalProperty(_messages.Message):
      r"""An additional property for a DetailsValueListEntry object.

      Fields:
        key: Name of the additional property.
        value: A extra_types.JsonValue attribute.
      """

      key = _messages.StringField(1)
      value = _messages.MessageField('extra_types.JsonValue', 2)

    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)

  code = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  details = _messages.MessageField('DetailsValueListEntry', 2, repeated=True)
  message = _messages.StringField(3)


class TextMapping(_messages.Message):
  r"""The mapping for the JobConfig.edit_list atoms with text EditAtom.inputs.

  Fields:
    atomKey: Required. The EditAtom.key that references atom with text inputs
      in the JobConfig.edit_list.
    inputKey: Required. The Input.key that identifies the input file.
    inputTrack: Required. The zero-based index of the track in the input file.
  """

  atomKey = _messages.StringField(1)
  inputKey = _messages.StringField(2)
  inputTrack = _messages.IntegerField(3, variant=_messages.Variant.INT32)


class TextStream(_messages.Message):
  r"""Encoding of a text stream. For example, closed captions or subtitles.

  Fields:
    codec: The codec for this text stream. The default is `webvtt`. Supported
      text codecs: - `srt` - `ttml` - `cea608` - `cea708` - `webvtt`
    displayName: The name for this particular text stream that will be added
      to the HLS/DASH manifest. Not supported in MP4 files.
    languageCode: The BCP-47 language code, such as `en-US` or `sr-Latn`. For
      more information, see
      https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. Not
      supported in MP4 files.
    mapping: The mapping for the JobConfig.edit_list atoms with text
      EditAtom.inputs.
  """

  codec = _messages.StringField(1)
  displayName = _messages.StringField(2)
  languageCode = _messages.StringField(3)
  mapping = _messages.MessageField('TextMapping', 4, repeated=True)


class TrackDefinition(_messages.Message):
  r"""Track definition for the input asset.

  Fields:
    detectLanguages: Optional. Whether to automatically detect the languages
      present in the track. If true, the system will attempt to identify all
      the languages present in the track and populate the languages field.
    detectedLanguages: Output only. A list of languages detected in the input
      asset, represented by a BCP 47 language code, such as "en-US" or "sr-
      Latn". For more information, see
      https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. This
      field is only populated if the detect_languages field is set to true.
    inputTrack: The input track.
    languages: Optional. A list of languages spoken in the input asset,
      represented by a BCP 47 language code, such as "en-US" or "sr-Latn". For
      more information, see
      https://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  """

  detectLanguages = _messages.BooleanField(1)
  detectedLanguages = _messages.StringField(2, repeated=True)
  inputTrack = _messages.IntegerField(3, variant=_messages.Variant.INT32)
  languages = _messages.StringField(4, repeated=True)


class TranscoderProjectsLocationsJobTemplatesCreateRequest(_messages.Message):
  r"""A TranscoderProjectsLocationsJobTemplatesCreateRequest object.

  Fields:
    jobTemplate: A JobTemplate resource to be passed as the request body.
    jobTemplateId: Required. The ID to use for the job template, which will
      become the final component of the job template's resource name. This
      value should be 4-63 characters, and valid characters must match the
      regular expression `a-zA-Z*`.
    parent: Required. The parent location to create this job template. Format:
      `projects/{project}/locations/{location}`
  """

  jobTemplate = _messages.MessageField('JobTemplate', 1)
  jobTemplateId = _messages.StringField(2)
  parent = _messages.StringField(3, required=True)


class TranscoderProjectsLocationsJobTemplatesDeleteRequest(_messages.Message):
  r"""A TranscoderProjectsLocationsJobTemplatesDeleteRequest object.

  Fields:
    allowMissing: If set to true, and the job template is not found, the
      request will succeed but no action will be taken on the server.
    name: Required. The name of the job template to delete.
      `projects/{project}/locations/{location}/jobTemplates/{job_template}`
  """

  allowMissing = _messages.BooleanField(1)
  name = _messages.StringField(2, required=True)


class TranscoderProjectsLocationsJobTemplatesGetRequest(_messages.Message):
  r"""A TranscoderProjectsLocationsJobTemplatesGetRequest object.

  Fields:
    name: Required. The name of the job template to retrieve. Format:
      `projects/{project}/locations/{location}/jobTemplates/{job_template}`
  """

  name = _messages.StringField(1, required=True)


class TranscoderProjectsLocationsJobTemplatesListRequest(_messages.Message):
  r"""A TranscoderProjectsLocationsJobTemplatesListRequest object.

  Fields:
    filter: The filter expression, following the syntax outlined in
      https://google.aip.dev/160.
    orderBy: One or more fields to compare and use to sort the output. See
      https://google.aip.dev/132#ordering.
    pageSize: The maximum number of items to return.
    pageToken: The `next_page_token` value returned from a previous List
      request, if any.
    parent: Required. The parent location from which to retrieve the
      collection of job templates. Format:
      `projects/{project}/locations/{location}`
  """

  filter = _messages.StringField(1)
  orderBy = _messages.StringField(2)
  pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
  pageToken = _messages.StringField(4)
  parent = _messages.StringField(5, required=True)


class TranscoderProjectsLocationsJobsCreateRequest(_messages.Message):
  r"""A TranscoderProjectsLocationsJobsCreateRequest object.

  Fields:
    job: A Job resource to be passed as the request body.
    parent: Required. The parent location to create and process this job.
      Format: `projects/{project}/locations/{location}`
  """

  job = _messages.MessageField('Job', 1)
  parent = _messages.StringField(2, required=True)


class TranscoderProjectsLocationsJobsDeleteRequest(_messages.Message):
  r"""A TranscoderProjectsLocationsJobsDeleteRequest object.

  Fields:
    allowMissing: If set to true, and the job is not found, the request will
      succeed but no action will be taken on the server.
    name: Required. The name of the job to delete. Format:
      `projects/{project}/locations/{location}/jobs/{job}`
  """

  allowMissing = _messages.BooleanField(1)
  name = _messages.StringField(2, required=True)


class TranscoderProjectsLocationsJobsGetRequest(_messages.Message):
  r"""A TranscoderProjectsLocationsJobsGetRequest object.

  Fields:
    name: Required. The name of the job to retrieve. Format:
      `projects/{project}/locations/{location}/jobs/{job}`
  """

  name = _messages.StringField(1, required=True)


class TranscoderProjectsLocationsJobsListRequest(_messages.Message):
  r"""A TranscoderProjectsLocationsJobsListRequest object.

  Fields:
    filter: The filter expression, following the syntax outlined in
      https://google.aip.dev/160.
    orderBy: One or more fields to compare and use to sort the output. See
      https://google.aip.dev/132#ordering.
    pageSize: The maximum number of items to return.
    pageToken: The `next_page_token` value returned from a previous List
      request, if any.
    parent: Required. Format: `projects/{project}/locations/{location}`
  """

  filter = _messages.StringField(1)
  orderBy = _messages.StringField(2)
  pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
  pageToken = _messages.StringField(4)
  parent = _messages.StringField(5, required=True)


class VideoStream(_messages.Message):
  r"""Video stream resource.

  Fields:
    h264: H264 codec settings.
    h265: H265 codec settings.
    vp9: VP9 codec settings.
  """

  h264 = _messages.MessageField('H264CodecSettings', 1)
  h265 = _messages.MessageField('H265CodecSettings', 2)
  vp9 = _messages.MessageField('Vp9CodecSettings', 3)


class Vp9CodecSettings(_messages.Message):
  r"""VP9 codec settings.

  Enums:
    FrameRateConversionStrategyValueValuesEnum: Optional. Frame rate
      conversion strategy for desired frame rate. The default is `DOWNSAMPLE`.

  Fields:
    bitrateBps: Required. The video bitrate in bits per second. The minimum
      value is 1,000. The maximum value is 480,000,000.
    crfLevel: Target CRF level. Must be between 10 and 36, where 10 is the
      highest quality and 36 is the most efficient compression. The default is
      21. **Note:** This field is not supported.
    frameRate: Required. The target video frame rate in frames per second
      (FPS). Must be less than or equal to 120.
    frameRateConversionStrategy: Optional. Frame rate conversion strategy for
      desired frame rate. The default is `DOWNSAMPLE`.
    gopDuration: Select the GOP size based on the specified duration. The
      default is `3s`. Note that `gopDuration` must be less than or equal to
      [`segmentDuration`](#SegmentSettings), and
      [`segmentDuration`](#SegmentSettings) must be divisible by
      `gopDuration`.
    gopFrameCount: Select the GOP size based on the specified frame count.
      Must be greater than zero.
    heightPixels: The height of the video in pixels. Must be an even integer.
      When not specified, the height is adjusted to match the specified width
      and input aspect ratio. If both are omitted, the input height is used.
      For portrait videos that contain horizontal ASR and rotation metadata,
      provide the height, in pixels, per the horizontal ASR. The API
      calculates the width per the horizontal ASR. The API detects any
      rotation metadata and swaps the requested height and width for the
      output.
    hlg: Optional. HLG color format setting for VP9.
    pixelFormat: Pixel format to use. The default is `yuv420p`. Supported
      pixel formats: - `yuv420p` pixel format - `yuv422p` pixel format -
      `yuv444p` pixel format - `yuv420p10` 10-bit HDR pixel format -
      `yuv422p10` 10-bit HDR pixel format - `yuv444p10` 10-bit HDR pixel
      format - `yuv420p12` 12-bit HDR pixel format - `yuv422p12` 12-bit HDR
      pixel format - `yuv444p12` 12-bit HDR pixel format
    profile: Enforces the specified codec profile. The following profiles are
      supported: * `profile0` (default) * `profile1` * `profile2` * `profile3`
      The available options are [WebM-
      compatible](https://www.webmproject.org/vp9/profiles/). Note that
      certain values for this field may cause the transcoder to override other
      fields you set in the `Vp9CodecSettings` message.
    rateControlMode: Specify the mode. The default is `vbr`. Supported rate
      control modes: - `vbr` - variable bitrate
    sdr: Optional. SDR color format setting for VP9.
    widthPixels: The width of the video in pixels. Must be an even integer.
      When not specified, the width is adjusted to match the specified height
      and input aspect ratio. If both are omitted, the input width is used.
      For portrait videos that contain horizontal ASR and rotation metadata,
      provide the width, in pixels, per the horizontal ASR. The API calculates
      the height per the horizontal ASR. The API detects any rotation metadata
      and swaps the requested height and width for the output.
  """

  class FrameRateConversionStrategyValueValuesEnum(_messages.Enum):
    r"""Optional. Frame rate conversion strategy for desired frame rate. The
    default is `DOWNSAMPLE`.

    Values:
      FRAME_RATE_CONVERSION_STRATEGY_UNSPECIFIED: Unspecified frame rate
        conversion strategy.
      DOWNSAMPLE: Selectively retain frames to reduce the output frame rate.
        Every _n_ th frame is kept, where `n = ceil(input frame rate / target
        frame rate)`. When _n_ = 1 (that is, the target frame rate is greater
        than the input frame rate), the output frame rate matches the input
        frame rate. When _n_ > 1, frames are dropped and the output frame rate
        is equal to `(input frame rate / n)`. For more information, see
        [Calculate frame
        rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate).
      DROP_DUPLICATE: Drop or duplicate frames to match the specified frame
        rate.
    """
    FRAME_RATE_CONVERSION_STRATEGY_UNSPECIFIED = 0
    DOWNSAMPLE = 1
    DROP_DUPLICATE = 2

  bitrateBps = _messages.IntegerField(1, variant=_messages.Variant.INT32)
  crfLevel = _messages.IntegerField(2, variant=_messages.Variant.INT32)
  frameRate = _messages.FloatField(3)
  frameRateConversionStrategy = _messages.EnumField('FrameRateConversionStrategyValueValuesEnum', 4)
  gopDuration = _messages.StringField(5)
  gopFrameCount = _messages.IntegerField(6, variant=_messages.Variant.INT32)
  heightPixels = _messages.IntegerField(7, variant=_messages.Variant.INT32)
  hlg = _messages.MessageField('Vp9ColorFormatHLG', 8)
  pixelFormat = _messages.StringField(9)
  profile = _messages.StringField(10)
  rateControlMode = _messages.StringField(11)
  sdr = _messages.MessageField('Vp9ColorFormatSDR', 12)
  widthPixels = _messages.IntegerField(13, variant=_messages.Variant.INT32)


class Vp9ColorFormatHLG(_messages.Message):
  r"""Convert the input video to a Hybrid Log Gamma (HLG) video."""


class Vp9ColorFormatSDR(_messages.Message):
  r"""Convert the input video to a Standard Dynamic Range (SDR) video."""


class Widevine(_messages.Message):
  r"""Widevine configuration."""


class YadifConfig(_messages.Message):
  r"""Yet Another Deinterlacing Filter Configuration.

  Fields:
    deinterlaceAllFrames: Deinterlace all frames rather than just the frames
      identified as interlaced. The default is `false`.
    disableSpatialInterlacing: Disable spacial interlacing. The default is
      `false`.
    mode: Specifies the deinterlacing mode to adopt. The default is
      `send_frame`. Supported values: - `send_frame`: Output one frame for
      each frame - `send_field`: Output one frame for each field
    parity: The picture field parity assumed for the input interlaced video.
      The default is `auto`. Supported values: - `tff`: Assume the top field
      is first - `bff`: Assume the bottom field is first - `auto`: Enable
      automatic detection of field parity
  """

  deinterlaceAllFrames = _messages.BooleanField(1)
  disableSpatialInterlacing = _messages.BooleanField(2)
  mode = _messages.StringField(3)
  parity = _messages.StringField(4)


encoding.AddCustomJsonFieldMapping(
    StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
    StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
    StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
