/*
 * Copyright 2020 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: google/cloud/speech/v1p1beta1/cloud_speech.proto

package com.google.cloud.speech.v1p1beta1;

/**
 *
 *
 * <pre>
 * Description of audio data to be recognized.
 * </pre>
 *
 * Protobuf type {@code google.cloud.speech.v1p1beta1.RecognitionMetadata}
 */
@java.lang.Deprecated
public final class RecognitionMetadata extends com.google.protobuf.GeneratedMessageV3
    implements
    // @@protoc_insertion_point(message_implements:google.cloud.speech.v1p1beta1.RecognitionMetadata)
    RecognitionMetadataOrBuilder {
  private static final long serialVersionUID = 0L;
  // Use RecognitionMetadata.newBuilder() to construct.
  private RecognitionMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
    super(builder);
  }

  private RecognitionMetadata() {
    interactionType_ = 0;
    microphoneDistance_ = 0;
    originalMediaType_ = 0;
    recordingDeviceType_ = 0;
    recordingDeviceName_ = "";
    originalMimeType_ = "";
    audioTopic_ = "";
  }

  @java.lang.Override
  @SuppressWarnings({"unused"})
  protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
    return new RecognitionMetadata();
  }

  @java.lang.Override
  public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
    return this.unknownFields;
  }

  public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
    return com.google.cloud.speech.v1p1beta1.SpeechProto
        .internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_descriptor;
  }

  @java.lang.Override
  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
      internalGetFieldAccessorTable() {
    return com.google.cloud.speech.v1p1beta1.SpeechProto
        .internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_fieldAccessorTable
        .ensureFieldAccessorsInitialized(
            com.google.cloud.speech.v1p1beta1.RecognitionMetadata.class,
            com.google.cloud.speech.v1p1beta1.RecognitionMetadata.Builder.class);
  }

  /**
   *
   *
   * <pre>
   * Use case categories that the audio recognition request can be described
   * by.
   * </pre>
   *
   * Protobuf enum {@code google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType}
   */
  public enum InteractionType implements com.google.protobuf.ProtocolMessageEnum {
    /**
     *
     *
     * <pre>
     * Use case is either unknown or is something other than one of the other
     * values below.
     * </pre>
     *
     * <code>INTERACTION_TYPE_UNSPECIFIED = 0;</code>
     */
    INTERACTION_TYPE_UNSPECIFIED(0),
    /**
     *
     *
     * <pre>
     * Multiple people in a conversation or discussion. For example in a
     * meeting with two or more people actively participating. Typically
     * all the primary people speaking would be in the same room (if not,
     * see PHONE_CALL)
     * </pre>
     *
     * <code>DISCUSSION = 1;</code>
     */
    DISCUSSION(1),
    /**
     *
     *
     * <pre>
     * One or more persons lecturing or presenting to others, mostly
     * uninterrupted.
     * </pre>
     *
     * <code>PRESENTATION = 2;</code>
     */
    PRESENTATION(2),
    /**
     *
     *
     * <pre>
     * A phone-call or video-conference in which two or more people, who are
     * not in the same room, are actively participating.
     * </pre>
     *
     * <code>PHONE_CALL = 3;</code>
     */
    PHONE_CALL(3),
    /**
     *
     *
     * <pre>
     * A recorded message intended for another person to listen to.
     * </pre>
     *
     * <code>VOICEMAIL = 4;</code>
     */
    VOICEMAIL(4),
    /**
     *
     *
     * <pre>
     * Professionally produced audio (eg. TV Show, Podcast).
     * </pre>
     *
     * <code>PROFESSIONALLY_PRODUCED = 5;</code>
     */
    PROFESSIONALLY_PRODUCED(5),
    /**
     *
     *
     * <pre>
     * Transcribe spoken questions and queries into text.
     * </pre>
     *
     * <code>VOICE_SEARCH = 6;</code>
     */
    VOICE_SEARCH(6),
    /**
     *
     *
     * <pre>
     * Transcribe voice commands, such as for controlling a device.
     * </pre>
     *
     * <code>VOICE_COMMAND = 7;</code>
     */
    VOICE_COMMAND(7),
    /**
     *
     *
     * <pre>
     * Transcribe speech to text to create a written document, such as a
     * text-message, email or report.
     * </pre>
     *
     * <code>DICTATION = 8;</code>
     */
    DICTATION(8),
    UNRECOGNIZED(-1),
    ;

    /**
     *
     *
     * <pre>
     * Use case is either unknown or is something other than one of the other
     * values below.
     * </pre>
     *
     * <code>INTERACTION_TYPE_UNSPECIFIED = 0;</code>
     */
    public static final int INTERACTION_TYPE_UNSPECIFIED_VALUE = 0;
    /**
     *
     *
     * <pre>
     * Multiple people in a conversation or discussion. For example in a
     * meeting with two or more people actively participating. Typically
     * all the primary people speaking would be in the same room (if not,
     * see PHONE_CALL)
     * </pre>
     *
     * <code>DISCUSSION = 1;</code>
     */
    public static final int DISCUSSION_VALUE = 1;
    /**
     *
     *
     * <pre>
     * One or more persons lecturing or presenting to others, mostly
     * uninterrupted.
     * </pre>
     *
     * <code>PRESENTATION = 2;</code>
     */
    public static final int PRESENTATION_VALUE = 2;
    /**
     *
     *
     * <pre>
     * A phone-call or video-conference in which two or more people, who are
     * not in the same room, are actively participating.
     * </pre>
     *
     * <code>PHONE_CALL = 3;</code>
     */
    public static final int PHONE_CALL_VALUE = 3;
    /**
     *
     *
     * <pre>
     * A recorded message intended for another person to listen to.
     * </pre>
     *
     * <code>VOICEMAIL = 4;</code>
     */
    public static final int VOICEMAIL_VALUE = 4;
    /**
     *
     *
     * <pre>
     * Professionally produced audio (eg. TV Show, Podcast).
     * </pre>
     *
     * <code>PROFESSIONALLY_PRODUCED = 5;</code>
     */
    public static final int PROFESSIONALLY_PRODUCED_VALUE = 5;
    /**
     *
     *
     * <pre>
     * Transcribe spoken questions and queries into text.
     * </pre>
     *
     * <code>VOICE_SEARCH = 6;</code>
     */
    public static final int VOICE_SEARCH_VALUE = 6;
    /**
     *
     *
     * <pre>
     * Transcribe voice commands, such as for controlling a device.
     * </pre>
     *
     * <code>VOICE_COMMAND = 7;</code>
     */
    public static final int VOICE_COMMAND_VALUE = 7;
    /**
     *
     *
     * <pre>
     * Transcribe speech to text to create a written document, such as a
     * text-message, email or report.
     * </pre>
     *
     * <code>DICTATION = 8;</code>
     */
    public static final int DICTATION_VALUE = 8;

    public final int getNumber() {
      if (this == UNRECOGNIZED) {
        throw new java.lang.IllegalArgumentException(
            "Can't get the number of an unknown enum value.");
      }
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static InteractionType valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static InteractionType forNumber(int value) {
      switch (value) {
        case 0:
          return INTERACTION_TYPE_UNSPECIFIED;
        case 1:
          return DISCUSSION;
        case 2:
          return PRESENTATION;
        case 3:
          return PHONE_CALL;
        case 4:
          return VOICEMAIL;
        case 5:
          return PROFESSIONALLY_PRODUCED;
        case 6:
          return VOICE_SEARCH;
        case 7:
          return VOICE_COMMAND;
        case 8:
          return DICTATION;
        default:
          return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap<InteractionType> internalGetValueMap() {
      return internalValueMap;
    }

    private static final com.google.protobuf.Internal.EnumLiteMap<InteractionType>
        internalValueMap =
            new com.google.protobuf.Internal.EnumLiteMap<InteractionType>() {
              public InteractionType findValueByNumber(int number) {
                return InteractionType.forNumber(number);
              }
            };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
      if (this == UNRECOGNIZED) {
        throw new java.lang.IllegalStateException(
            "Can't get the descriptor of an unrecognized enum value.");
      }
      return getDescriptor().getValues().get(ordinal());
    }

    public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
      return getDescriptor();
    }

    public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
      return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDescriptor()
          .getEnumTypes()
          .get(0);
    }

    private static final InteractionType[] VALUES = values();

    public static InteractionType valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
      }
      if (desc.getIndex() == -1) {
        return UNRECOGNIZED;
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private InteractionType(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType)
  }

  /**
   *
   *
   * <pre>
   * Enumerates the types of capture settings describing an audio file.
   * </pre>
   *
   * Protobuf enum {@code google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance}
   */
  public enum MicrophoneDistance implements com.google.protobuf.ProtocolMessageEnum {
    /**
     *
     *
     * <pre>
     * Audio type is not known.
     * </pre>
     *
     * <code>MICROPHONE_DISTANCE_UNSPECIFIED = 0;</code>
     */
    MICROPHONE_DISTANCE_UNSPECIFIED(0),
    /**
     *
     *
     * <pre>
     * The audio was captured from a closely placed microphone. Eg. phone,
     * dictaphone, or handheld microphone. Generally if there speaker is within
     * 1 meter of the microphone.
     * </pre>
     *
     * <code>NEARFIELD = 1;</code>
     */
    NEARFIELD(1),
    /**
     *
     *
     * <pre>
     * The speaker if within 3 meters of the microphone.
     * </pre>
     *
     * <code>MIDFIELD = 2;</code>
     */
    MIDFIELD(2),
    /**
     *
     *
     * <pre>
     * The speaker is more than 3 meters away from the microphone.
     * </pre>
     *
     * <code>FARFIELD = 3;</code>
     */
    FARFIELD(3),
    UNRECOGNIZED(-1),
    ;

    /**
     *
     *
     * <pre>
     * Audio type is not known.
     * </pre>
     *
     * <code>MICROPHONE_DISTANCE_UNSPECIFIED = 0;</code>
     */
    public static final int MICROPHONE_DISTANCE_UNSPECIFIED_VALUE = 0;
    /**
     *
     *
     * <pre>
     * The audio was captured from a closely placed microphone. Eg. phone,
     * dictaphone, or handheld microphone. Generally if there speaker is within
     * 1 meter of the microphone.
     * </pre>
     *
     * <code>NEARFIELD = 1;</code>
     */
    public static final int NEARFIELD_VALUE = 1;
    /**
     *
     *
     * <pre>
     * The speaker if within 3 meters of the microphone.
     * </pre>
     *
     * <code>MIDFIELD = 2;</code>
     */
    public static final int MIDFIELD_VALUE = 2;
    /**
     *
     *
     * <pre>
     * The speaker is more than 3 meters away from the microphone.
     * </pre>
     *
     * <code>FARFIELD = 3;</code>
     */
    public static final int FARFIELD_VALUE = 3;

    public final int getNumber() {
      if (this == UNRECOGNIZED) {
        throw new java.lang.IllegalArgumentException(
            "Can't get the number of an unknown enum value.");
      }
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static MicrophoneDistance valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static MicrophoneDistance forNumber(int value) {
      switch (value) {
        case 0:
          return MICROPHONE_DISTANCE_UNSPECIFIED;
        case 1:
          return NEARFIELD;
        case 2:
          return MIDFIELD;
        case 3:
          return FARFIELD;
        default:
          return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap<MicrophoneDistance>
        internalGetValueMap() {
      return internalValueMap;
    }

    private static final com.google.protobuf.Internal.EnumLiteMap<MicrophoneDistance>
        internalValueMap =
            new com.google.protobuf.Internal.EnumLiteMap<MicrophoneDistance>() {
              public MicrophoneDistance findValueByNumber(int number) {
                return MicrophoneDistance.forNumber(number);
              }
            };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
      if (this == UNRECOGNIZED) {
        throw new java.lang.IllegalStateException(
            "Can't get the descriptor of an unrecognized enum value.");
      }
      return getDescriptor().getValues().get(ordinal());
    }

    public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
      return getDescriptor();
    }

    public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
      return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDescriptor()
          .getEnumTypes()
          .get(1);
    }

    private static final MicrophoneDistance[] VALUES = values();

    public static MicrophoneDistance valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
      }
      if (desc.getIndex() == -1) {
        return UNRECOGNIZED;
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private MicrophoneDistance(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance)
  }

  /**
   *
   *
   * <pre>
   * The original media the speech was recorded on.
   * </pre>
   *
   * Protobuf enum {@code google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType}
   */
  public enum OriginalMediaType implements com.google.protobuf.ProtocolMessageEnum {
    /**
     *
     *
     * <pre>
     * Unknown original media type.
     * </pre>
     *
     * <code>ORIGINAL_MEDIA_TYPE_UNSPECIFIED = 0;</code>
     */
    ORIGINAL_MEDIA_TYPE_UNSPECIFIED(0),
    /**
     *
     *
     * <pre>
     * The speech data is an audio recording.
     * </pre>
     *
     * <code>AUDIO = 1;</code>
     */
    AUDIO(1),
    /**
     *
     *
     * <pre>
     * The speech data originally recorded on a video.
     * </pre>
     *
     * <code>VIDEO = 2;</code>
     */
    VIDEO(2),
    UNRECOGNIZED(-1),
    ;

    /**
     *
     *
     * <pre>
     * Unknown original media type.
     * </pre>
     *
     * <code>ORIGINAL_MEDIA_TYPE_UNSPECIFIED = 0;</code>
     */
    public static final int ORIGINAL_MEDIA_TYPE_UNSPECIFIED_VALUE = 0;
    /**
     *
     *
     * <pre>
     * The speech data is an audio recording.
     * </pre>
     *
     * <code>AUDIO = 1;</code>
     */
    public static final int AUDIO_VALUE = 1;
    /**
     *
     *
     * <pre>
     * The speech data originally recorded on a video.
     * </pre>
     *
     * <code>VIDEO = 2;</code>
     */
    public static final int VIDEO_VALUE = 2;

    public final int getNumber() {
      if (this == UNRECOGNIZED) {
        throw new java.lang.IllegalArgumentException(
            "Can't get the number of an unknown enum value.");
      }
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static OriginalMediaType valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static OriginalMediaType forNumber(int value) {
      switch (value) {
        case 0:
          return ORIGINAL_MEDIA_TYPE_UNSPECIFIED;
        case 1:
          return AUDIO;
        case 2:
          return VIDEO;
        default:
          return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap<OriginalMediaType>
        internalGetValueMap() {
      return internalValueMap;
    }

    private static final com.google.protobuf.Internal.EnumLiteMap<OriginalMediaType>
        internalValueMap =
            new com.google.protobuf.Internal.EnumLiteMap<OriginalMediaType>() {
              public OriginalMediaType findValueByNumber(int number) {
                return OriginalMediaType.forNumber(number);
              }
            };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
      if (this == UNRECOGNIZED) {
        throw new java.lang.IllegalStateException(
            "Can't get the descriptor of an unrecognized enum value.");
      }
      return getDescriptor().getValues().get(ordinal());
    }

    public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
      return getDescriptor();
    }

    public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
      return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDescriptor()
          .getEnumTypes()
          .get(2);
    }

    private static final OriginalMediaType[] VALUES = values();

    public static OriginalMediaType valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
      }
      if (desc.getIndex() == -1) {
        return UNRECOGNIZED;
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private OriginalMediaType(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType)
  }

  /**
   *
   *
   * <pre>
   * The type of device the speech was recorded with.
   * </pre>
   *
   * Protobuf enum {@code google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType}
   */
  public enum RecordingDeviceType implements com.google.protobuf.ProtocolMessageEnum {
    /**
     *
     *
     * <pre>
     * The recording device is unknown.
     * </pre>
     *
     * <code>RECORDING_DEVICE_TYPE_UNSPECIFIED = 0;</code>
     */
    RECORDING_DEVICE_TYPE_UNSPECIFIED(0),
    /**
     *
     *
     * <pre>
     * Speech was recorded on a smartphone.
     * </pre>
     *
     * <code>SMARTPHONE = 1;</code>
     */
    SMARTPHONE(1),
    /**
     *
     *
     * <pre>
     * Speech was recorded using a personal computer or tablet.
     * </pre>
     *
     * <code>PC = 2;</code>
     */
    PC(2),
    /**
     *
     *
     * <pre>
     * Speech was recorded over a phone line.
     * </pre>
     *
     * <code>PHONE_LINE = 3;</code>
     */
    PHONE_LINE(3),
    /**
     *
     *
     * <pre>
     * Speech was recorded in a vehicle.
     * </pre>
     *
     * <code>VEHICLE = 4;</code>
     */
    VEHICLE(4),
    /**
     *
     *
     * <pre>
     * Speech was recorded outdoors.
     * </pre>
     *
     * <code>OTHER_OUTDOOR_DEVICE = 5;</code>
     */
    OTHER_OUTDOOR_DEVICE(5),
    /**
     *
     *
     * <pre>
     * Speech was recorded indoors.
     * </pre>
     *
     * <code>OTHER_INDOOR_DEVICE = 6;</code>
     */
    OTHER_INDOOR_DEVICE(6),
    UNRECOGNIZED(-1),
    ;

    /**
     *
     *
     * <pre>
     * The recording device is unknown.
     * </pre>
     *
     * <code>RECORDING_DEVICE_TYPE_UNSPECIFIED = 0;</code>
     */
    public static final int RECORDING_DEVICE_TYPE_UNSPECIFIED_VALUE = 0;
    /**
     *
     *
     * <pre>
     * Speech was recorded on a smartphone.
     * </pre>
     *
     * <code>SMARTPHONE = 1;</code>
     */
    public static final int SMARTPHONE_VALUE = 1;
    /**
     *
     *
     * <pre>
     * Speech was recorded using a personal computer or tablet.
     * </pre>
     *
     * <code>PC = 2;</code>
     */
    public static final int PC_VALUE = 2;
    /**
     *
     *
     * <pre>
     * Speech was recorded over a phone line.
     * </pre>
     *
     * <code>PHONE_LINE = 3;</code>
     */
    public static final int PHONE_LINE_VALUE = 3;
    /**
     *
     *
     * <pre>
     * Speech was recorded in a vehicle.
     * </pre>
     *
     * <code>VEHICLE = 4;</code>
     */
    public static final int VEHICLE_VALUE = 4;
    /**
     *
     *
     * <pre>
     * Speech was recorded outdoors.
     * </pre>
     *
     * <code>OTHER_OUTDOOR_DEVICE = 5;</code>
     */
    public static final int OTHER_OUTDOOR_DEVICE_VALUE = 5;
    /**
     *
     *
     * <pre>
     * Speech was recorded indoors.
     * </pre>
     *
     * <code>OTHER_INDOOR_DEVICE = 6;</code>
     */
    public static final int OTHER_INDOOR_DEVICE_VALUE = 6;

    public final int getNumber() {
      if (this == UNRECOGNIZED) {
        throw new java.lang.IllegalArgumentException(
            "Can't get the number of an unknown enum value.");
      }
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static RecordingDeviceType valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static RecordingDeviceType forNumber(int value) {
      switch (value) {
        case 0:
          return RECORDING_DEVICE_TYPE_UNSPECIFIED;
        case 1:
          return SMARTPHONE;
        case 2:
          return PC;
        case 3:
          return PHONE_LINE;
        case 4:
          return VEHICLE;
        case 5:
          return OTHER_OUTDOOR_DEVICE;
        case 6:
          return OTHER_INDOOR_DEVICE;
        default:
          return null;
      }
    }

    public static com.google.protobuf.Internal.EnumLiteMap<RecordingDeviceType>
        internalGetValueMap() {
      return internalValueMap;
    }

    private static final com.google.protobuf.Internal.EnumLiteMap<RecordingDeviceType>
        internalValueMap =
            new com.google.protobuf.Internal.EnumLiteMap<RecordingDeviceType>() {
              public RecordingDeviceType findValueByNumber(int number) {
                return RecordingDeviceType.forNumber(number);
              }
            };

    public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
      if (this == UNRECOGNIZED) {
        throw new java.lang.IllegalStateException(
            "Can't get the descriptor of an unrecognized enum value.");
      }
      return getDescriptor().getValues().get(ordinal());
    }

    public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
      return getDescriptor();
    }

    public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
      return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDescriptor()
          .getEnumTypes()
          .get(3);
    }

    private static final RecordingDeviceType[] VALUES = values();

    public static RecordingDeviceType valueOf(
        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
      }
      if (desc.getIndex() == -1) {
        return UNRECOGNIZED;
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private RecordingDeviceType(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType)
  }

  public static final int INTERACTION_TYPE_FIELD_NUMBER = 1;
  private int interactionType_ = 0;
  /**
   *
   *
   * <pre>
   * The use case most closely describing the audio content to be recognized.
   * </pre>
   *
   * <code>.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
   * </code>
   *
   * @return The enum numeric value on the wire for interactionType.
   */
  @java.lang.Override
  public int getInteractionTypeValue() {
    return interactionType_;
  }
  /**
   *
   *
   * <pre>
   * The use case most closely describing the audio content to be recognized.
   * </pre>
   *
   * <code>.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
   * </code>
   *
   * @return The interactionType.
   */
  @java.lang.Override
  public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType
      getInteractionType() {
    com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType result =
        com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.forNumber(
            interactionType_);
    return result == null
        ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.UNRECOGNIZED
        : result;
  }

  public static final int INDUSTRY_NAICS_CODE_OF_AUDIO_FIELD_NUMBER = 3;
  private int industryNaicsCodeOfAudio_ = 0;
  /**
   *
   *
   * <pre>
   * The industry vertical to which this speech recognition request most
   * closely applies. This is most indicative of the topics contained
   * in the audio.  Use the 6-digit NAICS code to identify the industry
   * vertical - see https://www.naics.com/search/.
   * </pre>
   *
   * <code>uint32 industry_naics_code_of_audio = 3;</code>
   *
   * @return The industryNaicsCodeOfAudio.
   */
  @java.lang.Override
  public int getIndustryNaicsCodeOfAudio() {
    return industryNaicsCodeOfAudio_;
  }

  public static final int MICROPHONE_DISTANCE_FIELD_NUMBER = 4;
  private int microphoneDistance_ = 0;
  /**
   *
   *
   * <pre>
   * The audio type that most closely describes the audio being recognized.
   * </pre>
   *
   * <code>
   * .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
   * </code>
   *
   * @return The enum numeric value on the wire for microphoneDistance.
   */
  @java.lang.Override
  public int getMicrophoneDistanceValue() {
    return microphoneDistance_;
  }
  /**
   *
   *
   * <pre>
   * The audio type that most closely describes the audio being recognized.
   * </pre>
   *
   * <code>
   * .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
   * </code>
   *
   * @return The microphoneDistance.
   */
  @java.lang.Override
  public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance
      getMicrophoneDistance() {
    com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance result =
        com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.forNumber(
            microphoneDistance_);
    return result == null
        ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.UNRECOGNIZED
        : result;
  }

  public static final int ORIGINAL_MEDIA_TYPE_FIELD_NUMBER = 5;
  private int originalMediaType_ = 0;
  /**
   *
   *
   * <pre>
   * The original media the speech was recorded on.
   * </pre>
   *
   * <code>
   * .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
   * </code>
   *
   * @return The enum numeric value on the wire for originalMediaType.
   */
  @java.lang.Override
  public int getOriginalMediaTypeValue() {
    return originalMediaType_;
  }
  /**
   *
   *
   * <pre>
   * The original media the speech was recorded on.
   * </pre>
   *
   * <code>
   * .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
   * </code>
   *
   * @return The originalMediaType.
   */
  @java.lang.Override
  public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType
      getOriginalMediaType() {
    com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType result =
        com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.forNumber(
            originalMediaType_);
    return result == null
        ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.UNRECOGNIZED
        : result;
  }

  public static final int RECORDING_DEVICE_TYPE_FIELD_NUMBER = 6;
  private int recordingDeviceType_ = 0;
  /**
   *
   *
   * <pre>
   * The type of device the speech was recorded with.
   * </pre>
   *
   * <code>
   * .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
   * </code>
   *
   * @return The enum numeric value on the wire for recordingDeviceType.
   */
  @java.lang.Override
  public int getRecordingDeviceTypeValue() {
    return recordingDeviceType_;
  }
  /**
   *
   *
   * <pre>
   * The type of device the speech was recorded with.
   * </pre>
   *
   * <code>
   * .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
   * </code>
   *
   * @return The recordingDeviceType.
   */
  @java.lang.Override
  public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType
      getRecordingDeviceType() {
    com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType result =
        com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.forNumber(
            recordingDeviceType_);
    return result == null
        ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.UNRECOGNIZED
        : result;
  }

  public static final int RECORDING_DEVICE_NAME_FIELD_NUMBER = 7;

  @SuppressWarnings("serial")
  private volatile java.lang.Object recordingDeviceName_ = "";
  /**
   *
   *
   * <pre>
   * The device used to make the recording.  Examples 'Nexus 5X' or
   * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
   * 'Cardioid Microphone'.
   * </pre>
   *
   * <code>string recording_device_name = 7;</code>
   *
   * @return The recordingDeviceName.
   */
  @java.lang.Override
  public java.lang.String getRecordingDeviceName() {
    java.lang.Object ref = recordingDeviceName_;
    if (ref instanceof java.lang.String) {
      return (java.lang.String) ref;
    } else {
      com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
      java.lang.String s = bs.toStringUtf8();
      recordingDeviceName_ = s;
      return s;
    }
  }
  /**
   *
   *
   * <pre>
   * The device used to make the recording.  Examples 'Nexus 5X' or
   * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
   * 'Cardioid Microphone'.
   * </pre>
   *
   * <code>string recording_device_name = 7;</code>
   *
   * @return The bytes for recordingDeviceName.
   */
  @java.lang.Override
  public com.google.protobuf.ByteString getRecordingDeviceNameBytes() {
    java.lang.Object ref = recordingDeviceName_;
    if (ref instanceof java.lang.String) {
      com.google.protobuf.ByteString b =
          com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
      recordingDeviceName_ = b;
      return b;
    } else {
      return (com.google.protobuf.ByteString) ref;
    }
  }

  public static final int ORIGINAL_MIME_TYPE_FIELD_NUMBER = 8;

  @SuppressWarnings("serial")
  private volatile java.lang.Object originalMimeType_ = "";
  /**
   *
   *
   * <pre>
   * Mime type of the original audio file.  For example `audio/m4a`,
   * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
   * A list of possible audio mime types is maintained at
   * http://www.iana.org/assignments/media-types/media-types.xhtml#audio
   * </pre>
   *
   * <code>string original_mime_type = 8;</code>
   *
   * @return The originalMimeType.
   */
  @java.lang.Override
  public java.lang.String getOriginalMimeType() {
    java.lang.Object ref = originalMimeType_;
    if (ref instanceof java.lang.String) {
      return (java.lang.String) ref;
    } else {
      com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
      java.lang.String s = bs.toStringUtf8();
      originalMimeType_ = s;
      return s;
    }
  }
  /**
   *
   *
   * <pre>
   * Mime type of the original audio file.  For example `audio/m4a`,
   * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
   * A list of possible audio mime types is maintained at
   * http://www.iana.org/assignments/media-types/media-types.xhtml#audio
   * </pre>
   *
   * <code>string original_mime_type = 8;</code>
   *
   * @return The bytes for originalMimeType.
   */
  @java.lang.Override
  public com.google.protobuf.ByteString getOriginalMimeTypeBytes() {
    java.lang.Object ref = originalMimeType_;
    if (ref instanceof java.lang.String) {
      com.google.protobuf.ByteString b =
          com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
      originalMimeType_ = b;
      return b;
    } else {
      return (com.google.protobuf.ByteString) ref;
    }
  }

  public static final int OBFUSCATED_ID_FIELD_NUMBER = 9;
  private long obfuscatedId_ = 0L;
  /**
   *
   *
   * <pre>
   * Obfuscated (privacy-protected) ID of the user, to identify number of
   * unique users using the service.
   * </pre>
   *
   * <code>int64 obfuscated_id = 9 [deprecated = true];</code>
   *
   * @deprecated google.cloud.speech.v1p1beta1.RecognitionMetadata.obfuscated_id is deprecated. See
   *     google/cloud/speech/v1p1beta1/cloud_speech.proto;l=633
   * @return The obfuscatedId.
   */
  @java.lang.Override
  @java.lang.Deprecated
  public long getObfuscatedId() {
    return obfuscatedId_;
  }

  public static final int AUDIO_TOPIC_FIELD_NUMBER = 10;

  @SuppressWarnings("serial")
  private volatile java.lang.Object audioTopic_ = "";
  /**
   *
   *
   * <pre>
   * Description of the content. Eg. "Recordings of federal supreme court
   * hearings from 2012".
   * </pre>
   *
   * <code>string audio_topic = 10;</code>
   *
   * @return The audioTopic.
   */
  @java.lang.Override
  public java.lang.String getAudioTopic() {
    java.lang.Object ref = audioTopic_;
    if (ref instanceof java.lang.String) {
      return (java.lang.String) ref;
    } else {
      com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
      java.lang.String s = bs.toStringUtf8();
      audioTopic_ = s;
      return s;
    }
  }
  /**
   *
   *
   * <pre>
   * Description of the content. Eg. "Recordings of federal supreme court
   * hearings from 2012".
   * </pre>
   *
   * <code>string audio_topic = 10;</code>
   *
   * @return The bytes for audioTopic.
   */
  @java.lang.Override
  public com.google.protobuf.ByteString getAudioTopicBytes() {
    java.lang.Object ref = audioTopic_;
    if (ref instanceof java.lang.String) {
      com.google.protobuf.ByteString b =
          com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
      audioTopic_ = b;
      return b;
    } else {
      return (com.google.protobuf.ByteString) ref;
    }
  }

  private byte memoizedIsInitialized = -1;

  @java.lang.Override
  public final boolean isInitialized() {
    byte isInitialized = memoizedIsInitialized;
    if (isInitialized == 1) return true;
    if (isInitialized == 0) return false;

    memoizedIsInitialized = 1;
    return true;
  }

  @java.lang.Override
  public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
    if (interactionType_
        != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType
            .INTERACTION_TYPE_UNSPECIFIED
            .getNumber()) {
      output.writeEnum(1, interactionType_);
    }
    if (industryNaicsCodeOfAudio_ != 0) {
      output.writeUInt32(3, industryNaicsCodeOfAudio_);
    }
    if (microphoneDistance_
        != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance
            .MICROPHONE_DISTANCE_UNSPECIFIED
            .getNumber()) {
      output.writeEnum(4, microphoneDistance_);
    }
    if (originalMediaType_
        != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType
            .ORIGINAL_MEDIA_TYPE_UNSPECIFIED
            .getNumber()) {
      output.writeEnum(5, originalMediaType_);
    }
    if (recordingDeviceType_
        != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType
            .RECORDING_DEVICE_TYPE_UNSPECIFIED
            .getNumber()) {
      output.writeEnum(6, recordingDeviceType_);
    }
    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recordingDeviceName_)) {
      com.google.protobuf.GeneratedMessageV3.writeString(output, 7, recordingDeviceName_);
    }
    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(originalMimeType_)) {
      com.google.protobuf.GeneratedMessageV3.writeString(output, 8, originalMimeType_);
    }
    if (obfuscatedId_ != 0L) {
      output.writeInt64(9, obfuscatedId_);
    }
    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(audioTopic_)) {
      com.google.protobuf.GeneratedMessageV3.writeString(output, 10, audioTopic_);
    }
    getUnknownFields().writeTo(output);
  }

  @java.lang.Override
  public int getSerializedSize() {
    int size = memoizedSize;
    if (size != -1) return size;

    size = 0;
    if (interactionType_
        != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType
            .INTERACTION_TYPE_UNSPECIFIED
            .getNumber()) {
      size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, interactionType_);
    }
    if (industryNaicsCodeOfAudio_ != 0) {
      size += com.google.protobuf.CodedOutputStream.computeUInt32Size(3, industryNaicsCodeOfAudio_);
    }
    if (microphoneDistance_
        != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance
            .MICROPHONE_DISTANCE_UNSPECIFIED
            .getNumber()) {
      size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, microphoneDistance_);
    }
    if (originalMediaType_
        != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType
            .ORIGINAL_MEDIA_TYPE_UNSPECIFIED
            .getNumber()) {
      size += com.google.protobuf.CodedOutputStream.computeEnumSize(5, originalMediaType_);
    }
    if (recordingDeviceType_
        != com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType
            .RECORDING_DEVICE_TYPE_UNSPECIFIED
            .getNumber()) {
      size += com.google.protobuf.CodedOutputStream.computeEnumSize(6, recordingDeviceType_);
    }
    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recordingDeviceName_)) {
      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, recordingDeviceName_);
    }
    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(originalMimeType_)) {
      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, originalMimeType_);
    }
    if (obfuscatedId_ != 0L) {
      size += com.google.protobuf.CodedOutputStream.computeInt64Size(9, obfuscatedId_);
    }
    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(audioTopic_)) {
      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, audioTopic_);
    }
    size += getUnknownFields().getSerializedSize();
    memoizedSize = size;
    return size;
  }

  @java.lang.Override
  public boolean equals(final java.lang.Object obj) {
    if (obj == this) {
      return true;
    }
    if (!(obj instanceof com.google.cloud.speech.v1p1beta1.RecognitionMetadata)) {
      return super.equals(obj);
    }
    com.google.cloud.speech.v1p1beta1.RecognitionMetadata other =
        (com.google.cloud.speech.v1p1beta1.RecognitionMetadata) obj;

    if (interactionType_ != other.interactionType_) return false;
    if (getIndustryNaicsCodeOfAudio() != other.getIndustryNaicsCodeOfAudio()) return false;
    if (microphoneDistance_ != other.microphoneDistance_) return false;
    if (originalMediaType_ != other.originalMediaType_) return false;
    if (recordingDeviceType_ != other.recordingDeviceType_) return false;
    if (!getRecordingDeviceName().equals(other.getRecordingDeviceName())) return false;
    if (!getOriginalMimeType().equals(other.getOriginalMimeType())) return false;
    if (getObfuscatedId() != other.getObfuscatedId()) return false;
    if (!getAudioTopic().equals(other.getAudioTopic())) return false;
    if (!getUnknownFields().equals(other.getUnknownFields())) return false;
    return true;
  }

  @java.lang.Override
  public int hashCode() {
    if (memoizedHashCode != 0) {
      return memoizedHashCode;
    }
    int hash = 41;
    hash = (19 * hash) + getDescriptor().hashCode();
    hash = (37 * hash) + INTERACTION_TYPE_FIELD_NUMBER;
    hash = (53 * hash) + interactionType_;
    hash = (37 * hash) + INDUSTRY_NAICS_CODE_OF_AUDIO_FIELD_NUMBER;
    hash = (53 * hash) + getIndustryNaicsCodeOfAudio();
    hash = (37 * hash) + MICROPHONE_DISTANCE_FIELD_NUMBER;
    hash = (53 * hash) + microphoneDistance_;
    hash = (37 * hash) + ORIGINAL_MEDIA_TYPE_FIELD_NUMBER;
    hash = (53 * hash) + originalMediaType_;
    hash = (37 * hash) + RECORDING_DEVICE_TYPE_FIELD_NUMBER;
    hash = (53 * hash) + recordingDeviceType_;
    hash = (37 * hash) + RECORDING_DEVICE_NAME_FIELD_NUMBER;
    hash = (53 * hash) + getRecordingDeviceName().hashCode();
    hash = (37 * hash) + ORIGINAL_MIME_TYPE_FIELD_NUMBER;
    hash = (53 * hash) + getOriginalMimeType().hashCode();
    hash = (37 * hash) + OBFUSCATED_ID_FIELD_NUMBER;
    hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getObfuscatedId());
    hash = (37 * hash) + AUDIO_TOPIC_FIELD_NUMBER;
    hash = (53 * hash) + getAudioTopic().hashCode();
    hash = (29 * hash) + getUnknownFields().hashCode();
    memoizedHashCode = hash;
    return hash;
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
      java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
      java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
      com.google.protobuf.ByteString data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
      com.google.protobuf.ByteString data,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(byte[] data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
      byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
      java.io.InputStream input) throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
      java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
        PARSER, input, extensionRegistry);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseDelimitedFrom(
      java.io.InputStream input) throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseDelimitedFrom(
      java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
        PARSER, input, extensionRegistry);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
      com.google.protobuf.CodedInputStream input) throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata parseFrom(
      com.google.protobuf.CodedInputStream input,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
        PARSER, input, extensionRegistry);
  }

  @java.lang.Override
  public Builder newBuilderForType() {
    return newBuilder();
  }

  public static Builder newBuilder() {
    return DEFAULT_INSTANCE.toBuilder();
  }

  public static Builder newBuilder(
      com.google.cloud.speech.v1p1beta1.RecognitionMetadata prototype) {
    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
  }

  @java.lang.Override
  public Builder toBuilder() {
    return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
  }

  @java.lang.Override
  protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
    Builder builder = new Builder(parent);
    return builder;
  }
  /**
   *
   *
   * <pre>
   * Description of audio data to be recognized.
   * </pre>
   *
   * Protobuf type {@code google.cloud.speech.v1p1beta1.RecognitionMetadata}
   */
  public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
      implements
      // @@protoc_insertion_point(builder_implements:google.cloud.speech.v1p1beta1.RecognitionMetadata)
      com.google.cloud.speech.v1p1beta1.RecognitionMetadataOrBuilder {
    public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
      return com.google.cloud.speech.v1p1beta1.SpeechProto
          .internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_descriptor;
    }

    @java.lang.Override
    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return com.google.cloud.speech.v1p1beta1.SpeechProto
          .internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              com.google.cloud.speech.v1p1beta1.RecognitionMetadata.class,
              com.google.cloud.speech.v1p1beta1.RecognitionMetadata.Builder.class);
    }

    // Construct using com.google.cloud.speech.v1p1beta1.RecognitionMetadata.newBuilder()
    private Builder() {}

    private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
      super(parent);
    }

    @java.lang.Override
    public Builder clear() {
      super.clear();
      bitField0_ = 0;
      interactionType_ = 0;
      industryNaicsCodeOfAudio_ = 0;
      microphoneDistance_ = 0;
      originalMediaType_ = 0;
      recordingDeviceType_ = 0;
      recordingDeviceName_ = "";
      originalMimeType_ = "";
      obfuscatedId_ = 0L;
      audioTopic_ = "";
      return this;
    }

    @java.lang.Override
    public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
      return com.google.cloud.speech.v1p1beta1.SpeechProto
          .internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_descriptor;
    }

    @java.lang.Override
    public com.google.cloud.speech.v1p1beta1.RecognitionMetadata getDefaultInstanceForType() {
      return com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDefaultInstance();
    }

    @java.lang.Override
    public com.google.cloud.speech.v1p1beta1.RecognitionMetadata build() {
      com.google.cloud.speech.v1p1beta1.RecognitionMetadata result = buildPartial();
      if (!result.isInitialized()) {
        throw newUninitializedMessageException(result);
      }
      return result;
    }

    @java.lang.Override
    public com.google.cloud.speech.v1p1beta1.RecognitionMetadata buildPartial() {
      com.google.cloud.speech.v1p1beta1.RecognitionMetadata result =
          new com.google.cloud.speech.v1p1beta1.RecognitionMetadata(this);
      if (bitField0_ != 0) {
        buildPartial0(result);
      }
      onBuilt();
      return result;
    }

    private void buildPartial0(com.google.cloud.speech.v1p1beta1.RecognitionMetadata result) {
      int from_bitField0_ = bitField0_;
      if (((from_bitField0_ & 0x00000001) != 0)) {
        result.interactionType_ = interactionType_;
      }
      if (((from_bitField0_ & 0x00000002) != 0)) {
        result.industryNaicsCodeOfAudio_ = industryNaicsCodeOfAudio_;
      }
      if (((from_bitField0_ & 0x00000004) != 0)) {
        result.microphoneDistance_ = microphoneDistance_;
      }
      if (((from_bitField0_ & 0x00000008) != 0)) {
        result.originalMediaType_ = originalMediaType_;
      }
      if (((from_bitField0_ & 0x00000010) != 0)) {
        result.recordingDeviceType_ = recordingDeviceType_;
      }
      if (((from_bitField0_ & 0x00000020) != 0)) {
        result.recordingDeviceName_ = recordingDeviceName_;
      }
      if (((from_bitField0_ & 0x00000040) != 0)) {
        result.originalMimeType_ = originalMimeType_;
      }
      if (((from_bitField0_ & 0x00000080) != 0)) {
        result.obfuscatedId_ = obfuscatedId_;
      }
      if (((from_bitField0_ & 0x00000100) != 0)) {
        result.audioTopic_ = audioTopic_;
      }
    }

    @java.lang.Override
    public Builder clone() {
      return super.clone();
    }

    @java.lang.Override
    public Builder setField(
        com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
      return super.setField(field, value);
    }

    @java.lang.Override
    public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
      return super.clearField(field);
    }

    @java.lang.Override
    public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
      return super.clearOneof(oneof);
    }

    @java.lang.Override
    public Builder setRepeatedField(
        com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
      return super.setRepeatedField(field, index, value);
    }

    @java.lang.Override
    public Builder addRepeatedField(
        com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
      return super.addRepeatedField(field, value);
    }

    @java.lang.Override
    public Builder mergeFrom(com.google.protobuf.Message other) {
      if (other instanceof com.google.cloud.speech.v1p1beta1.RecognitionMetadata) {
        return mergeFrom((com.google.cloud.speech.v1p1beta1.RecognitionMetadata) other);
      } else {
        super.mergeFrom(other);
        return this;
      }
    }

    public Builder mergeFrom(com.google.cloud.speech.v1p1beta1.RecognitionMetadata other) {
      if (other == com.google.cloud.speech.v1p1beta1.RecognitionMetadata.getDefaultInstance())
        return this;
      if (other.interactionType_ != 0) {
        setInteractionTypeValue(other.getInteractionTypeValue());
      }
      if (other.getIndustryNaicsCodeOfAudio() != 0) {
        setIndustryNaicsCodeOfAudio(other.getIndustryNaicsCodeOfAudio());
      }
      if (other.microphoneDistance_ != 0) {
        setMicrophoneDistanceValue(other.getMicrophoneDistanceValue());
      }
      if (other.originalMediaType_ != 0) {
        setOriginalMediaTypeValue(other.getOriginalMediaTypeValue());
      }
      if (other.recordingDeviceType_ != 0) {
        setRecordingDeviceTypeValue(other.getRecordingDeviceTypeValue());
      }
      if (!other.getRecordingDeviceName().isEmpty()) {
        recordingDeviceName_ = other.recordingDeviceName_;
        bitField0_ |= 0x00000020;
        onChanged();
      }
      if (!other.getOriginalMimeType().isEmpty()) {
        originalMimeType_ = other.originalMimeType_;
        bitField0_ |= 0x00000040;
        onChanged();
      }
      if (other.getObfuscatedId() != 0L) {
        setObfuscatedId(other.getObfuscatedId());
      }
      if (!other.getAudioTopic().isEmpty()) {
        audioTopic_ = other.audioTopic_;
        bitField0_ |= 0x00000100;
        onChanged();
      }
      this.mergeUnknownFields(other.getUnknownFields());
      onChanged();
      return this;
    }

    @java.lang.Override
    public final boolean isInitialized() {
      return true;
    }

    @java.lang.Override
    public Builder mergeFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 8:
              {
                interactionType_ = input.readEnum();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
            case 24:
              {
                industryNaicsCodeOfAudio_ = input.readUInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 24
            case 32:
              {
                microphoneDistance_ = input.readEnum();
                bitField0_ |= 0x00000004;
                break;
              } // case 32
            case 40:
              {
                originalMediaType_ = input.readEnum();
                bitField0_ |= 0x00000008;
                break;
              } // case 40
            case 48:
              {
                recordingDeviceType_ = input.readEnum();
                bitField0_ |= 0x00000010;
                break;
              } // case 48
            case 58:
              {
                recordingDeviceName_ = input.readStringRequireUtf8();
                bitField0_ |= 0x00000020;
                break;
              } // case 58
            case 66:
              {
                originalMimeType_ = input.readStringRequireUtf8();
                bitField0_ |= 0x00000040;
                break;
              } // case 66
            case 72:
              {
                obfuscatedId_ = input.readInt64();
                bitField0_ |= 0x00000080;
                break;
              } // case 72
            case 82:
              {
                audioTopic_ = input.readStringRequireUtf8();
                bitField0_ |= 0x00000100;
                break;
              } // case 82
            default:
              {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
          } // switch (tag)
        } // while (!done)
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.unwrapIOException();
      } finally {
        onChanged();
      } // finally
      return this;
    }

    private int bitField0_;

    private int interactionType_ = 0;
    /**
     *
     *
     * <pre>
     * The use case most closely describing the audio content to be recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
     * </code>
     *
     * @return The enum numeric value on the wire for interactionType.
     */
    @java.lang.Override
    public int getInteractionTypeValue() {
      return interactionType_;
    }
    /**
     *
     *
     * <pre>
     * The use case most closely describing the audio content to be recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
     * </code>
     *
     * @param value The enum numeric value on the wire for interactionType to set.
     * @return This builder for chaining.
     */
    public Builder setInteractionTypeValue(int value) {
      interactionType_ = value;
      bitField0_ |= 0x00000001;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The use case most closely describing the audio content to be recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
     * </code>
     *
     * @return The interactionType.
     */
    @java.lang.Override
    public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType
        getInteractionType() {
      com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType result =
          com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.forNumber(
              interactionType_);
      return result == null
          ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType.UNRECOGNIZED
          : result;
    }
    /**
     *
     *
     * <pre>
     * The use case most closely describing the audio content to be recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
     * </code>
     *
     * @param value The interactionType to set.
     * @return This builder for chaining.
     */
    public Builder setInteractionType(
        com.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType value) {
      if (value == null) {
        throw new NullPointerException();
      }
      bitField0_ |= 0x00000001;
      interactionType_ = value.getNumber();
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The use case most closely describing the audio content to be recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType interaction_type = 1;
     * </code>
     *
     * @return This builder for chaining.
     */
    public Builder clearInteractionType() {
      bitField0_ = (bitField0_ & ~0x00000001);
      interactionType_ = 0;
      onChanged();
      return this;
    }

    private int industryNaicsCodeOfAudio_;
    /**
     *
     *
     * <pre>
     * The industry vertical to which this speech recognition request most
     * closely applies. This is most indicative of the topics contained
     * in the audio.  Use the 6-digit NAICS code to identify the industry
     * vertical - see https://www.naics.com/search/.
     * </pre>
     *
     * <code>uint32 industry_naics_code_of_audio = 3;</code>
     *
     * @return The industryNaicsCodeOfAudio.
     */
    @java.lang.Override
    public int getIndustryNaicsCodeOfAudio() {
      return industryNaicsCodeOfAudio_;
    }
    /**
     *
     *
     * <pre>
     * The industry vertical to which this speech recognition request most
     * closely applies. This is most indicative of the topics contained
     * in the audio.  Use the 6-digit NAICS code to identify the industry
     * vertical - see https://www.naics.com/search/.
     * </pre>
     *
     * <code>uint32 industry_naics_code_of_audio = 3;</code>
     *
     * @param value The industryNaicsCodeOfAudio to set.
     * @return This builder for chaining.
     */
    public Builder setIndustryNaicsCodeOfAudio(int value) {

      industryNaicsCodeOfAudio_ = value;
      bitField0_ |= 0x00000002;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The industry vertical to which this speech recognition request most
     * closely applies. This is most indicative of the topics contained
     * in the audio.  Use the 6-digit NAICS code to identify the industry
     * vertical - see https://www.naics.com/search/.
     * </pre>
     *
     * <code>uint32 industry_naics_code_of_audio = 3;</code>
     *
     * @return This builder for chaining.
     */
    public Builder clearIndustryNaicsCodeOfAudio() {
      bitField0_ = (bitField0_ & ~0x00000002);
      industryNaicsCodeOfAudio_ = 0;
      onChanged();
      return this;
    }

    private int microphoneDistance_ = 0;
    /**
     *
     *
     * <pre>
     * The audio type that most closely describes the audio being recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
     * </code>
     *
     * @return The enum numeric value on the wire for microphoneDistance.
     */
    @java.lang.Override
    public int getMicrophoneDistanceValue() {
      return microphoneDistance_;
    }
    /**
     *
     *
     * <pre>
     * The audio type that most closely describes the audio being recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
     * </code>
     *
     * @param value The enum numeric value on the wire for microphoneDistance to set.
     * @return This builder for chaining.
     */
    public Builder setMicrophoneDistanceValue(int value) {
      microphoneDistance_ = value;
      bitField0_ |= 0x00000004;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The audio type that most closely describes the audio being recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
     * </code>
     *
     * @return The microphoneDistance.
     */
    @java.lang.Override
    public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance
        getMicrophoneDistance() {
      com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance result =
          com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.forNumber(
              microphoneDistance_);
      return result == null
          ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance.UNRECOGNIZED
          : result;
    }
    /**
     *
     *
     * <pre>
     * The audio type that most closely describes the audio being recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
     * </code>
     *
     * @param value The microphoneDistance to set.
     * @return This builder for chaining.
     */
    public Builder setMicrophoneDistance(
        com.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance value) {
      if (value == null) {
        throw new NullPointerException();
      }
      bitField0_ |= 0x00000004;
      microphoneDistance_ = value.getNumber();
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The audio type that most closely describes the audio being recognized.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance microphone_distance = 4;
     * </code>
     *
     * @return This builder for chaining.
     */
    public Builder clearMicrophoneDistance() {
      bitField0_ = (bitField0_ & ~0x00000004);
      microphoneDistance_ = 0;
      onChanged();
      return this;
    }

    private int originalMediaType_ = 0;
    /**
     *
     *
     * <pre>
     * The original media the speech was recorded on.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
     * </code>
     *
     * @return The enum numeric value on the wire for originalMediaType.
     */
    @java.lang.Override
    public int getOriginalMediaTypeValue() {
      return originalMediaType_;
    }
    /**
     *
     *
     * <pre>
     * The original media the speech was recorded on.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
     * </code>
     *
     * @param value The enum numeric value on the wire for originalMediaType to set.
     * @return This builder for chaining.
     */
    public Builder setOriginalMediaTypeValue(int value) {
      originalMediaType_ = value;
      bitField0_ |= 0x00000008;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The original media the speech was recorded on.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
     * </code>
     *
     * @return The originalMediaType.
     */
    @java.lang.Override
    public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType
        getOriginalMediaType() {
      com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType result =
          com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.forNumber(
              originalMediaType_);
      return result == null
          ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType.UNRECOGNIZED
          : result;
    }
    /**
     *
     *
     * <pre>
     * The original media the speech was recorded on.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
     * </code>
     *
     * @param value The originalMediaType to set.
     * @return This builder for chaining.
     */
    public Builder setOriginalMediaType(
        com.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType value) {
      if (value == null) {
        throw new NullPointerException();
      }
      bitField0_ |= 0x00000008;
      originalMediaType_ = value.getNumber();
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The original media the speech was recorded on.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType original_media_type = 5;
     * </code>
     *
     * @return This builder for chaining.
     */
    public Builder clearOriginalMediaType() {
      bitField0_ = (bitField0_ & ~0x00000008);
      originalMediaType_ = 0;
      onChanged();
      return this;
    }

    private int recordingDeviceType_ = 0;
    /**
     *
     *
     * <pre>
     * The type of device the speech was recorded with.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
     * </code>
     *
     * @return The enum numeric value on the wire for recordingDeviceType.
     */
    @java.lang.Override
    public int getRecordingDeviceTypeValue() {
      return recordingDeviceType_;
    }
    /**
     *
     *
     * <pre>
     * The type of device the speech was recorded with.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
     * </code>
     *
     * @param value The enum numeric value on the wire for recordingDeviceType to set.
     * @return This builder for chaining.
     */
    public Builder setRecordingDeviceTypeValue(int value) {
      recordingDeviceType_ = value;
      bitField0_ |= 0x00000010;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The type of device the speech was recorded with.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
     * </code>
     *
     * @return The recordingDeviceType.
     */
    @java.lang.Override
    public com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType
        getRecordingDeviceType() {
      com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType result =
          com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.forNumber(
              recordingDeviceType_);
      return result == null
          ? com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType.UNRECOGNIZED
          : result;
    }
    /**
     *
     *
     * <pre>
     * The type of device the speech was recorded with.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
     * </code>
     *
     * @param value The recordingDeviceType to set.
     * @return This builder for chaining.
     */
    public Builder setRecordingDeviceType(
        com.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType value) {
      if (value == null) {
        throw new NullPointerException();
      }
      bitField0_ |= 0x00000010;
      recordingDeviceType_ = value.getNumber();
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The type of device the speech was recorded with.
     * </pre>
     *
     * <code>
     * .google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType recording_device_type = 6;
     * </code>
     *
     * @return This builder for chaining.
     */
    public Builder clearRecordingDeviceType() {
      bitField0_ = (bitField0_ & ~0x00000010);
      recordingDeviceType_ = 0;
      onChanged();
      return this;
    }

    private java.lang.Object recordingDeviceName_ = "";
    /**
     *
     *
     * <pre>
     * The device used to make the recording.  Examples 'Nexus 5X' or
     * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
     * 'Cardioid Microphone'.
     * </pre>
     *
     * <code>string recording_device_name = 7;</code>
     *
     * @return The recordingDeviceName.
     */
    public java.lang.String getRecordingDeviceName() {
      java.lang.Object ref = recordingDeviceName_;
      if (!(ref instanceof java.lang.String)) {
        com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        recordingDeviceName_ = s;
        return s;
      } else {
        return (java.lang.String) ref;
      }
    }
    /**
     *
     *
     * <pre>
     * The device used to make the recording.  Examples 'Nexus 5X' or
     * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
     * 'Cardioid Microphone'.
     * </pre>
     *
     * <code>string recording_device_name = 7;</code>
     *
     * @return The bytes for recordingDeviceName.
     */
    public com.google.protobuf.ByteString getRecordingDeviceNameBytes() {
      java.lang.Object ref = recordingDeviceName_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b =
            com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
        recordingDeviceName_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    /**
     *
     *
     * <pre>
     * The device used to make the recording.  Examples 'Nexus 5X' or
     * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
     * 'Cardioid Microphone'.
     * </pre>
     *
     * <code>string recording_device_name = 7;</code>
     *
     * @param value The recordingDeviceName to set.
     * @return This builder for chaining.
     */
    public Builder setRecordingDeviceName(java.lang.String value) {
      if (value == null) {
        throw new NullPointerException();
      }
      recordingDeviceName_ = value;
      bitField0_ |= 0x00000020;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The device used to make the recording.  Examples 'Nexus 5X' or
     * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
     * 'Cardioid Microphone'.
     * </pre>
     *
     * <code>string recording_device_name = 7;</code>
     *
     * @return This builder for chaining.
     */
    public Builder clearRecordingDeviceName() {
      recordingDeviceName_ = getDefaultInstance().getRecordingDeviceName();
      bitField0_ = (bitField0_ & ~0x00000020);
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * The device used to make the recording.  Examples 'Nexus 5X' or
     * 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or
     * 'Cardioid Microphone'.
     * </pre>
     *
     * <code>string recording_device_name = 7;</code>
     *
     * @param value The bytes for recordingDeviceName to set.
     * @return This builder for chaining.
     */
    public Builder setRecordingDeviceNameBytes(com.google.protobuf.ByteString value) {
      if (value == null) {
        throw new NullPointerException();
      }
      checkByteStringIsUtf8(value);
      recordingDeviceName_ = value;
      bitField0_ |= 0x00000020;
      onChanged();
      return this;
    }

    private java.lang.Object originalMimeType_ = "";
    /**
     *
     *
     * <pre>
     * Mime type of the original audio file.  For example `audio/m4a`,
     * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
     * A list of possible audio mime types is maintained at
     * http://www.iana.org/assignments/media-types/media-types.xhtml#audio
     * </pre>
     *
     * <code>string original_mime_type = 8;</code>
     *
     * @return The originalMimeType.
     */
    public java.lang.String getOriginalMimeType() {
      java.lang.Object ref = originalMimeType_;
      if (!(ref instanceof java.lang.String)) {
        com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        originalMimeType_ = s;
        return s;
      } else {
        return (java.lang.String) ref;
      }
    }
    /**
     *
     *
     * <pre>
     * Mime type of the original audio file.  For example `audio/m4a`,
     * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
     * A list of possible audio mime types is maintained at
     * http://www.iana.org/assignments/media-types/media-types.xhtml#audio
     * </pre>
     *
     * <code>string original_mime_type = 8;</code>
     *
     * @return The bytes for originalMimeType.
     */
    public com.google.protobuf.ByteString getOriginalMimeTypeBytes() {
      java.lang.Object ref = originalMimeType_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b =
            com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
        originalMimeType_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    /**
     *
     *
     * <pre>
     * Mime type of the original audio file.  For example `audio/m4a`,
     * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
     * A list of possible audio mime types is maintained at
     * http://www.iana.org/assignments/media-types/media-types.xhtml#audio
     * </pre>
     *
     * <code>string original_mime_type = 8;</code>
     *
     * @param value The originalMimeType to set.
     * @return This builder for chaining.
     */
    public Builder setOriginalMimeType(java.lang.String value) {
      if (value == null) {
        throw new NullPointerException();
      }
      originalMimeType_ = value;
      bitField0_ |= 0x00000040;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Mime type of the original audio file.  For example `audio/m4a`,
     * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
     * A list of possible audio mime types is maintained at
     * http://www.iana.org/assignments/media-types/media-types.xhtml#audio
     * </pre>
     *
     * <code>string original_mime_type = 8;</code>
     *
     * @return This builder for chaining.
     */
    public Builder clearOriginalMimeType() {
      originalMimeType_ = getDefaultInstance().getOriginalMimeType();
      bitField0_ = (bitField0_ & ~0x00000040);
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Mime type of the original audio file.  For example `audio/m4a`,
     * `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`.
     * A list of possible audio mime types is maintained at
     * http://www.iana.org/assignments/media-types/media-types.xhtml#audio
     * </pre>
     *
     * <code>string original_mime_type = 8;</code>
     *
     * @param value The bytes for originalMimeType to set.
     * @return This builder for chaining.
     */
    public Builder setOriginalMimeTypeBytes(com.google.protobuf.ByteString value) {
      if (value == null) {
        throw new NullPointerException();
      }
      checkByteStringIsUtf8(value);
      originalMimeType_ = value;
      bitField0_ |= 0x00000040;
      onChanged();
      return this;
    }

    private long obfuscatedId_;
    /**
     *
     *
     * <pre>
     * Obfuscated (privacy-protected) ID of the user, to identify number of
     * unique users using the service.
     * </pre>
     *
     * <code>int64 obfuscated_id = 9 [deprecated = true];</code>
     *
     * @deprecated google.cloud.speech.v1p1beta1.RecognitionMetadata.obfuscated_id is deprecated.
     *     See google/cloud/speech/v1p1beta1/cloud_speech.proto;l=633
     * @return The obfuscatedId.
     */
    @java.lang.Override
    @java.lang.Deprecated
    public long getObfuscatedId() {
      return obfuscatedId_;
    }
    /**
     *
     *
     * <pre>
     * Obfuscated (privacy-protected) ID of the user, to identify number of
     * unique users using the service.
     * </pre>
     *
     * <code>int64 obfuscated_id = 9 [deprecated = true];</code>
     *
     * @deprecated google.cloud.speech.v1p1beta1.RecognitionMetadata.obfuscated_id is deprecated.
     *     See google/cloud/speech/v1p1beta1/cloud_speech.proto;l=633
     * @param value The obfuscatedId to set.
     * @return This builder for chaining.
     */
    @java.lang.Deprecated
    public Builder setObfuscatedId(long value) {

      obfuscatedId_ = value;
      bitField0_ |= 0x00000080;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Obfuscated (privacy-protected) ID of the user, to identify number of
     * unique users using the service.
     * </pre>
     *
     * <code>int64 obfuscated_id = 9 [deprecated = true];</code>
     *
     * @deprecated google.cloud.speech.v1p1beta1.RecognitionMetadata.obfuscated_id is deprecated.
     *     See google/cloud/speech/v1p1beta1/cloud_speech.proto;l=633
     * @return This builder for chaining.
     */
    @java.lang.Deprecated
    public Builder clearObfuscatedId() {
      bitField0_ = (bitField0_ & ~0x00000080);
      obfuscatedId_ = 0L;
      onChanged();
      return this;
    }

    private java.lang.Object audioTopic_ = "";
    /**
     *
     *
     * <pre>
     * Description of the content. Eg. "Recordings of federal supreme court
     * hearings from 2012".
     * </pre>
     *
     * <code>string audio_topic = 10;</code>
     *
     * @return The audioTopic.
     */
    public java.lang.String getAudioTopic() {
      java.lang.Object ref = audioTopic_;
      if (!(ref instanceof java.lang.String)) {
        com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        audioTopic_ = s;
        return s;
      } else {
        return (java.lang.String) ref;
      }
    }
    /**
     *
     *
     * <pre>
     * Description of the content. Eg. "Recordings of federal supreme court
     * hearings from 2012".
     * </pre>
     *
     * <code>string audio_topic = 10;</code>
     *
     * @return The bytes for audioTopic.
     */
    public com.google.protobuf.ByteString getAudioTopicBytes() {
      java.lang.Object ref = audioTopic_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b =
            com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
        audioTopic_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    /**
     *
     *
     * <pre>
     * Description of the content. Eg. "Recordings of federal supreme court
     * hearings from 2012".
     * </pre>
     *
     * <code>string audio_topic = 10;</code>
     *
     * @param value The audioTopic to set.
     * @return This builder for chaining.
     */
    public Builder setAudioTopic(java.lang.String value) {
      if (value == null) {
        throw new NullPointerException();
      }
      audioTopic_ = value;
      bitField0_ |= 0x00000100;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Description of the content. Eg. "Recordings of federal supreme court
     * hearings from 2012".
     * </pre>
     *
     * <code>string audio_topic = 10;</code>
     *
     * @return This builder for chaining.
     */
    public Builder clearAudioTopic() {
      audioTopic_ = getDefaultInstance().getAudioTopic();
      bitField0_ = (bitField0_ & ~0x00000100);
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Description of the content. Eg. "Recordings of federal supreme court
     * hearings from 2012".
     * </pre>
     *
     * <code>string audio_topic = 10;</code>
     *
     * @param value The bytes for audioTopic to set.
     * @return This builder for chaining.
     */
    public Builder setAudioTopicBytes(com.google.protobuf.ByteString value) {
      if (value == null) {
        throw new NullPointerException();
      }
      checkByteStringIsUtf8(value);
      audioTopic_ = value;
      bitField0_ |= 0x00000100;
      onChanged();
      return this;
    }

    @java.lang.Override
    public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
      return super.setUnknownFields(unknownFields);
    }

    @java.lang.Override
    public final Builder mergeUnknownFields(
        final com.google.protobuf.UnknownFieldSet unknownFields) {
      return super.mergeUnknownFields(unknownFields);
    }

    // @@protoc_insertion_point(builder_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata)
  }

  // @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.RecognitionMetadata)
  private static final com.google.cloud.speech.v1p1beta1.RecognitionMetadata DEFAULT_INSTANCE;

  static {
    DEFAULT_INSTANCE = new com.google.cloud.speech.v1p1beta1.RecognitionMetadata();
  }

  public static com.google.cloud.speech.v1p1beta1.RecognitionMetadata getDefaultInstance() {
    return DEFAULT_INSTANCE;
  }

  private static final com.google.protobuf.Parser<RecognitionMetadata> PARSER =
      new com.google.protobuf.AbstractParser<RecognitionMetadata>() {
        @java.lang.Override
        public RecognitionMetadata parsePartialFrom(
            com.google.protobuf.CodedInputStream input,
            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
            throws com.google.protobuf.InvalidProtocolBufferException {
          Builder builder = newBuilder();
          try {
            builder.mergeFrom(input, extensionRegistry);
          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
            throw e.setUnfinishedMessage(builder.buildPartial());
          } catch (com.google.protobuf.UninitializedMessageException e) {
            throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
          } catch (java.io.IOException e) {
            throw new com.google.protobuf.InvalidProtocolBufferException(e)
                .setUnfinishedMessage(builder.buildPartial());
          }
          return builder.buildPartial();
        }
      };

  public static com.google.protobuf.Parser<RecognitionMetadata> parser() {
    return PARSER;
  }

  @java.lang.Override
  public com.google.protobuf.Parser<RecognitionMetadata> getParserForType() {
    return PARSER;
  }

  @java.lang.Override
  public com.google.cloud.speech.v1p1beta1.RecognitionMetadata getDefaultInstanceForType() {
    return DEFAULT_INSTANCE;
  }
}
