/*
 * Copyright 2020 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: google/cloud/automl/v1beta1/io.proto

package com.google.cloud.automl.v1beta1;

/**
 *
 *
 * <pre>
 * Input configuration for BatchPredict Action.
 * The format of input depends on the ML problem of the model used for
 * prediction. As input source the
 * [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source]
 * is expected, unless specified otherwise.
 * The formats are represented in EBNF with commas being literal and with
 * non-terminal symbols defined near the end of this comment. The formats
 * are:
 *  *  For Image Classification:
 *         CSV file(s) with each line having just a single column:
 *           GCS_FILE_PATH
 *           which leads to image of up to 30MB in size. Supported
 *           extensions: .JPEG, .GIF, .PNG. This path is treated as the ID in
 *           the Batch predict output.
 *         Three sample rows:
 *           gs://folder/image1.jpeg
 *           gs://folder/image2.gif
 *           gs://folder/image3.png
 *  *  For Image Object Detection:
 *         CSV file(s) with each line having just a single column:
 *           GCS_FILE_PATH
 *           which leads to image of up to 30MB in size. Supported
 *           extensions: .JPEG, .GIF, .PNG. This path is treated as the ID in
 *           the Batch predict output.
 *         Three sample rows:
 *           gs://folder/image1.jpeg
 *           gs://folder/image2.gif
 *           gs://folder/image3.png
 *  *  For Video Classification:
 *         CSV file(s) with each line in format:
 *           GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END
 *           GCS_FILE_PATH leads to video of up to 50GB in size and up to 3h
 *           duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
 *           TIME_SEGMENT_START and TIME_SEGMENT_END must be within the
 *           length of the video, and end has to be after the start.
 *         Three sample rows:
 *           gs://folder/video1.mp4,10,40
 *           gs://folder/video1.mp4,20,60
 *           gs://folder/vid2.mov,0,inf
 *  *  For Video Object Tracking:
 *         CSV file(s) with each line in format:
 *           GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END
 *           GCS_FILE_PATH leads to video of up to 50GB in size and up to 3h
 *           duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
 *           TIME_SEGMENT_START and TIME_SEGMENT_END must be within the
 *           length of the video, and end has to be after the start.
 *         Three sample rows:
 *           gs://folder/video1.mp4,10,240
 *           gs://folder/video1.mp4,300,360
 *           gs://folder/vid2.mov,0,inf
 *  *  For Text Classification:
 *         CSV file(s) with each line having just a single column:
 *           GCS_FILE_PATH | TEXT_SNIPPET
 *         Any given text file can have size upto 128kB.
 *         Any given text snippet content must have 60,000 characters or less.
 *         Three sample rows:
 *           gs://folder/text1.txt
 *           "Some text content to predict"
 *           gs://folder/text3.pdf
 *         Supported file extensions: .txt, .pdf
 *  *  For Text Sentiment:
 *         CSV file(s) with each line having just a single column:
 *           GCS_FILE_PATH | TEXT_SNIPPET
 *         Any given text file can have size upto 128kB.
 *         Any given text snippet content must have 500 characters or less.
 *         Three sample rows:
 *           gs://folder/text1.txt
 *           "Some text content to predict"
 *           gs://folder/text3.pdf
 *         Supported file extensions: .txt, .pdf
 *  * For Text Extraction
 *         .JSONL (i.e. JSON Lines) file(s) which either provide text in-line or
 *         as documents (for a single BatchPredict call only one of the these
 *         formats may be used).
 *         The in-line .JSONL file(s) contain per line a proto that
 *           wraps a temporary user-assigned TextSnippet ID (string up to 2000
 *           characters long) called "id", a TextSnippet proto (in
 *           json representation) and zero or more TextFeature protos. Any given
 *           text snippet content must have 30,000 characters or less, and also
 *           be UTF-8 NFC encoded (ASCII already is). The IDs provided should be
 *           unique.
 *         The document .JSONL file(s) contain, per line, a proto that wraps a
 *           Document proto with input_config set. Only PDF documents are
 *           supported now, and each document must be up to 2MB large.
 *         Any given .JSONL file must be 100MB or smaller, and no more than 20
 *         files may be given.
 *         Sample in-line JSON Lines file (presented here with artificial line
 *         breaks, but the only actual line break is denoted by &#92;n):
 *           {
 *             "id": "my_first_id",
 *             "text_snippet": { "content": "dog car cat"},
 *             "text_features": [
 *               {
 *                 "text_segment": {"start_offset": 4, "end_offset": 6},
 *                 "structural_type": PARAGRAPH,
 *                 "bounding_poly": {
 *                   "normalized_vertices": [
 *                     {"x": 0.1, "y": 0.1},
 *                     {"x": 0.1, "y": 0.3},
 *                     {"x": 0.3, "y": 0.3},
 *                     {"x": 0.3, "y": 0.1},
 *                   ]
 *                 },
 *               }
 *             ],
 *           }&#92;n
 *           {
 *             "id": "2",
 *             "text_snippet": {
 *               "content": "An elaborate content",
 *               "mime_type": "text/plain"
 *             }
 *           }
 *         Sample document JSON Lines file (presented here with artificial line
 *         breaks, but the only actual line break is denoted by &#92;n).:
 *           {
 *             "document": {
 *               "input_config": {
 *                 "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ]
 *                 }
 *               }
 *             }
 *           }&#92;n
 *           {
 *             "document": {
 *               "input_config": {
 *                 "gcs_source": { "input_uris": [ "gs://folder/document2.pdf" ]
 *                 }
 *               }
 *             }
 *           }
 *  *  For Tables:
 *         Either
 *         [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] or
 * [bigquery_source][google.cloud.automl.v1beta1.InputConfig.bigquery_source].
 *         GCS case:
 *           CSV file(s), each by itself 10GB or smaller and total size must be
 *           100GB or smaller, where first file must have a header containing
 *           column names. If the first row of a subsequent file is the same as
 *           the header, then it is also treated as a header. All other rows
 *           contain values for the corresponding columns.
 *           The column names must contain the model's
 * [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs]
 * [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name]
 *           (order doesn't matter). The columns corresponding to the model's
 *           input feature column specs must contain values compatible with the
 *           column spec's data types. Prediction on all the rows, i.e. the CSV
 *           lines, will be attempted. For FORECASTING
 * [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]:
 *           all columns having
 * [TIME_SERIES_AVAILABLE_PAST_ONLY][google.cloud.automl.v1beta1.ColumnSpec.ForecastingMetadata.ColumnType]
 *           type will be ignored.
 *           First three sample rows of a CSV file:
 *             "First Name","Last Name","Dob","Addresses"
 * "John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]"
 * "Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]}
 *         BigQuery case:
 *           An URI of a BigQuery table. The user data size of the BigQuery
 *           table must be 100GB or smaller.
 *           The column names must contain the model's
 * [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs]
 * [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name]
 *           (order doesn't matter). The columns corresponding to the model's
 *           input feature column specs must contain values compatible with the
 *           column spec's data types. Prediction on all the rows of the table
 *           will be attempted. For FORECASTING
 * [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]:
 *           all columns having
 * [TIME_SERIES_AVAILABLE_PAST_ONLY][google.cloud.automl.v1beta1.ColumnSpec.ForecastingMetadata.ColumnType]
 *           type will be ignored.
 *  Definitions:
 *  GCS_FILE_PATH = A path to file on GCS, e.g. "gs://folder/video.avi".
 *  TEXT_SNIPPET = A content of a text snippet, UTF-8 encoded, enclosed within
 *                 double quotes ("")
 *  TIME_SEGMENT_START = TIME_OFFSET
 *                       Expresses a beginning, inclusive, of a time segment
 *                       within an
 *                       example that has a time dimension (e.g. video).
 *  TIME_SEGMENT_END = TIME_OFFSET
 *                     Expresses an end, exclusive, of a time segment within
 *                     an example that has a time dimension (e.g. video).
 *  TIME_OFFSET = A number of seconds as measured from the start of an
 *                example (e.g. video). Fractions are allowed, up to a
 *                microsecond precision. "inf" is allowed and it means the end
 *                of the example.
 *  Errors:
 *  If any of the provided CSV files can't be parsed or if more than certain
 *  percent of CSV rows cannot be processed then the operation fails and
 *  prediction does not happen. Regardless of overall success or failure the
 *  per-row failures, up to a certain count cap, will be listed in
 *  Operation.metadata.partial_failures.
 * </pre>
 *
 * Protobuf type {@code google.cloud.automl.v1beta1.BatchPredictInputConfig}
 */
public final class BatchPredictInputConfig extends com.google.protobuf.GeneratedMessageV3
    implements
    // @@protoc_insertion_point(message_implements:google.cloud.automl.v1beta1.BatchPredictInputConfig)
    BatchPredictInputConfigOrBuilder {
  private static final long serialVersionUID = 0L;
  // Use BatchPredictInputConfig.newBuilder() to construct.
  private BatchPredictInputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
    super(builder);
  }

  private BatchPredictInputConfig() {}

  @java.lang.Override
  @SuppressWarnings({"unused"})
  protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
    return new BatchPredictInputConfig();
  }

  @java.lang.Override
  public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
    return this.unknownFields;
  }

  public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
    return com.google.cloud.automl.v1beta1.Io
        .internal_static_google_cloud_automl_v1beta1_BatchPredictInputConfig_descriptor;
  }

  @java.lang.Override
  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
      internalGetFieldAccessorTable() {
    return com.google.cloud.automl.v1beta1.Io
        .internal_static_google_cloud_automl_v1beta1_BatchPredictInputConfig_fieldAccessorTable
        .ensureFieldAccessorsInitialized(
            com.google.cloud.automl.v1beta1.BatchPredictInputConfig.class,
            com.google.cloud.automl.v1beta1.BatchPredictInputConfig.Builder.class);
  }

  private int sourceCase_ = 0;
  private java.lang.Object source_;

  public enum SourceCase
      implements
          com.google.protobuf.Internal.EnumLite,
          com.google.protobuf.AbstractMessage.InternalOneOfEnum {
    GCS_SOURCE(1),
    BIGQUERY_SOURCE(2),
    SOURCE_NOT_SET(0);
    private final int value;

    private SourceCase(int value) {
      this.value = value;
    }
    /**
     * @param value The number of the enum to look for.
     * @return The enum associated with the given number.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static SourceCase valueOf(int value) {
      return forNumber(value);
    }

    public static SourceCase forNumber(int value) {
      switch (value) {
        case 1:
          return GCS_SOURCE;
        case 2:
          return BIGQUERY_SOURCE;
        case 0:
          return SOURCE_NOT_SET;
        default:
          return null;
      }
    }

    public int getNumber() {
      return this.value;
    }
  };

  public SourceCase getSourceCase() {
    return SourceCase.forNumber(sourceCase_);
  }

  public static final int GCS_SOURCE_FIELD_NUMBER = 1;
  /**
   *
   *
   * <pre>
   * The Google Cloud Storage location for the input content.
   * </pre>
   *
   * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
   *
   * @return Whether the gcsSource field is set.
   */
  @java.lang.Override
  public boolean hasGcsSource() {
    return sourceCase_ == 1;
  }
  /**
   *
   *
   * <pre>
   * The Google Cloud Storage location for the input content.
   * </pre>
   *
   * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
   *
   * @return The gcsSource.
   */
  @java.lang.Override
  public com.google.cloud.automl.v1beta1.GcsSource getGcsSource() {
    if (sourceCase_ == 1) {
      return (com.google.cloud.automl.v1beta1.GcsSource) source_;
    }
    return com.google.cloud.automl.v1beta1.GcsSource.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * The Google Cloud Storage location for the input content.
   * </pre>
   *
   * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
   */
  @java.lang.Override
  public com.google.cloud.automl.v1beta1.GcsSourceOrBuilder getGcsSourceOrBuilder() {
    if (sourceCase_ == 1) {
      return (com.google.cloud.automl.v1beta1.GcsSource) source_;
    }
    return com.google.cloud.automl.v1beta1.GcsSource.getDefaultInstance();
  }

  public static final int BIGQUERY_SOURCE_FIELD_NUMBER = 2;
  /**
   *
   *
   * <pre>
   * The BigQuery location for the input content.
   * </pre>
   *
   * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
   *
   * @return Whether the bigquerySource field is set.
   */
  @java.lang.Override
  public boolean hasBigquerySource() {
    return sourceCase_ == 2;
  }
  /**
   *
   *
   * <pre>
   * The BigQuery location for the input content.
   * </pre>
   *
   * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
   *
   * @return The bigquerySource.
   */
  @java.lang.Override
  public com.google.cloud.automl.v1beta1.BigQuerySource getBigquerySource() {
    if (sourceCase_ == 2) {
      return (com.google.cloud.automl.v1beta1.BigQuerySource) source_;
    }
    return com.google.cloud.automl.v1beta1.BigQuerySource.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * The BigQuery location for the input content.
   * </pre>
   *
   * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
   */
  @java.lang.Override
  public com.google.cloud.automl.v1beta1.BigQuerySourceOrBuilder getBigquerySourceOrBuilder() {
    if (sourceCase_ == 2) {
      return (com.google.cloud.automl.v1beta1.BigQuerySource) source_;
    }
    return com.google.cloud.automl.v1beta1.BigQuerySource.getDefaultInstance();
  }

  private byte memoizedIsInitialized = -1;

  @java.lang.Override
  public final boolean isInitialized() {
    byte isInitialized = memoizedIsInitialized;
    if (isInitialized == 1) return true;
    if (isInitialized == 0) return false;

    memoizedIsInitialized = 1;
    return true;
  }

  @java.lang.Override
  public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
    if (sourceCase_ == 1) {
      output.writeMessage(1, (com.google.cloud.automl.v1beta1.GcsSource) source_);
    }
    if (sourceCase_ == 2) {
      output.writeMessage(2, (com.google.cloud.automl.v1beta1.BigQuerySource) source_);
    }
    getUnknownFields().writeTo(output);
  }

  @java.lang.Override
  public int getSerializedSize() {
    int size = memoizedSize;
    if (size != -1) return size;

    size = 0;
    if (sourceCase_ == 1) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              1, (com.google.cloud.automl.v1beta1.GcsSource) source_);
    }
    if (sourceCase_ == 2) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              2, (com.google.cloud.automl.v1beta1.BigQuerySource) source_);
    }
    size += getUnknownFields().getSerializedSize();
    memoizedSize = size;
    return size;
  }

  @java.lang.Override
  public boolean equals(final java.lang.Object obj) {
    if (obj == this) {
      return true;
    }
    if (!(obj instanceof com.google.cloud.automl.v1beta1.BatchPredictInputConfig)) {
      return super.equals(obj);
    }
    com.google.cloud.automl.v1beta1.BatchPredictInputConfig other =
        (com.google.cloud.automl.v1beta1.BatchPredictInputConfig) obj;

    if (!getSourceCase().equals(other.getSourceCase())) return false;
    switch (sourceCase_) {
      case 1:
        if (!getGcsSource().equals(other.getGcsSource())) return false;
        break;
      case 2:
        if (!getBigquerySource().equals(other.getBigquerySource())) return false;
        break;
      case 0:
      default:
    }
    if (!getUnknownFields().equals(other.getUnknownFields())) return false;
    return true;
  }

  @java.lang.Override
  public int hashCode() {
    if (memoizedHashCode != 0) {
      return memoizedHashCode;
    }
    int hash = 41;
    hash = (19 * hash) + getDescriptor().hashCode();
    switch (sourceCase_) {
      case 1:
        hash = (37 * hash) + GCS_SOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getGcsSource().hashCode();
        break;
      case 2:
        hash = (37 * hash) + BIGQUERY_SOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getBigquerySource().hashCode();
        break;
      case 0:
      default:
    }
    hash = (29 * hash) + getUnknownFields().hashCode();
    memoizedHashCode = hash;
    return hash;
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(
      java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(
      java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(
      com.google.protobuf.ByteString data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(
      com.google.protobuf.ByteString data,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(byte[] data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(
      byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(
      java.io.InputStream input) throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(
      java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
        PARSER, input, extensionRegistry);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseDelimitedFrom(
      java.io.InputStream input) throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseDelimitedFrom(
      java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
        PARSER, input, extensionRegistry);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(
      com.google.protobuf.CodedInputStream input) throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig parseFrom(
      com.google.protobuf.CodedInputStream input,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
        PARSER, input, extensionRegistry);
  }

  @java.lang.Override
  public Builder newBuilderForType() {
    return newBuilder();
  }

  public static Builder newBuilder() {
    return DEFAULT_INSTANCE.toBuilder();
  }

  public static Builder newBuilder(
      com.google.cloud.automl.v1beta1.BatchPredictInputConfig prototype) {
    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
  }

  @java.lang.Override
  public Builder toBuilder() {
    return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
  }

  @java.lang.Override
  protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
    Builder builder = new Builder(parent);
    return builder;
  }
  /**
   *
   *
   * <pre>
   * Input configuration for BatchPredict Action.
   * The format of input depends on the ML problem of the model used for
   * prediction. As input source the
   * [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source]
   * is expected, unless specified otherwise.
   * The formats are represented in EBNF with commas being literal and with
   * non-terminal symbols defined near the end of this comment. The formats
   * are:
   *  *  For Image Classification:
   *         CSV file(s) with each line having just a single column:
   *           GCS_FILE_PATH
   *           which leads to image of up to 30MB in size. Supported
   *           extensions: .JPEG, .GIF, .PNG. This path is treated as the ID in
   *           the Batch predict output.
   *         Three sample rows:
   *           gs://folder/image1.jpeg
   *           gs://folder/image2.gif
   *           gs://folder/image3.png
   *  *  For Image Object Detection:
   *         CSV file(s) with each line having just a single column:
   *           GCS_FILE_PATH
   *           which leads to image of up to 30MB in size. Supported
   *           extensions: .JPEG, .GIF, .PNG. This path is treated as the ID in
   *           the Batch predict output.
   *         Three sample rows:
   *           gs://folder/image1.jpeg
   *           gs://folder/image2.gif
   *           gs://folder/image3.png
   *  *  For Video Classification:
   *         CSV file(s) with each line in format:
   *           GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END
   *           GCS_FILE_PATH leads to video of up to 50GB in size and up to 3h
   *           duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
   *           TIME_SEGMENT_START and TIME_SEGMENT_END must be within the
   *           length of the video, and end has to be after the start.
   *         Three sample rows:
   *           gs://folder/video1.mp4,10,40
   *           gs://folder/video1.mp4,20,60
   *           gs://folder/vid2.mov,0,inf
   *  *  For Video Object Tracking:
   *         CSV file(s) with each line in format:
   *           GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END
   *           GCS_FILE_PATH leads to video of up to 50GB in size and up to 3h
   *           duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI.
   *           TIME_SEGMENT_START and TIME_SEGMENT_END must be within the
   *           length of the video, and end has to be after the start.
   *         Three sample rows:
   *           gs://folder/video1.mp4,10,240
   *           gs://folder/video1.mp4,300,360
   *           gs://folder/vid2.mov,0,inf
   *  *  For Text Classification:
   *         CSV file(s) with each line having just a single column:
   *           GCS_FILE_PATH | TEXT_SNIPPET
   *         Any given text file can have size upto 128kB.
   *         Any given text snippet content must have 60,000 characters or less.
   *         Three sample rows:
   *           gs://folder/text1.txt
   *           "Some text content to predict"
   *           gs://folder/text3.pdf
   *         Supported file extensions: .txt, .pdf
   *  *  For Text Sentiment:
   *         CSV file(s) with each line having just a single column:
   *           GCS_FILE_PATH | TEXT_SNIPPET
   *         Any given text file can have size upto 128kB.
   *         Any given text snippet content must have 500 characters or less.
   *         Three sample rows:
   *           gs://folder/text1.txt
   *           "Some text content to predict"
   *           gs://folder/text3.pdf
   *         Supported file extensions: .txt, .pdf
   *  * For Text Extraction
   *         .JSONL (i.e. JSON Lines) file(s) which either provide text in-line or
   *         as documents (for a single BatchPredict call only one of the these
   *         formats may be used).
   *         The in-line .JSONL file(s) contain per line a proto that
   *           wraps a temporary user-assigned TextSnippet ID (string up to 2000
   *           characters long) called "id", a TextSnippet proto (in
   *           json representation) and zero or more TextFeature protos. Any given
   *           text snippet content must have 30,000 characters or less, and also
   *           be UTF-8 NFC encoded (ASCII already is). The IDs provided should be
   *           unique.
   *         The document .JSONL file(s) contain, per line, a proto that wraps a
   *           Document proto with input_config set. Only PDF documents are
   *           supported now, and each document must be up to 2MB large.
   *         Any given .JSONL file must be 100MB or smaller, and no more than 20
   *         files may be given.
   *         Sample in-line JSON Lines file (presented here with artificial line
   *         breaks, but the only actual line break is denoted by &#92;n):
   *           {
   *             "id": "my_first_id",
   *             "text_snippet": { "content": "dog car cat"},
   *             "text_features": [
   *               {
   *                 "text_segment": {"start_offset": 4, "end_offset": 6},
   *                 "structural_type": PARAGRAPH,
   *                 "bounding_poly": {
   *                   "normalized_vertices": [
   *                     {"x": 0.1, "y": 0.1},
   *                     {"x": 0.1, "y": 0.3},
   *                     {"x": 0.3, "y": 0.3},
   *                     {"x": 0.3, "y": 0.1},
   *                   ]
   *                 },
   *               }
   *             ],
   *           }&#92;n
   *           {
   *             "id": "2",
   *             "text_snippet": {
   *               "content": "An elaborate content",
   *               "mime_type": "text/plain"
   *             }
   *           }
   *         Sample document JSON Lines file (presented here with artificial line
   *         breaks, but the only actual line break is denoted by &#92;n).:
   *           {
   *             "document": {
   *               "input_config": {
   *                 "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ]
   *                 }
   *               }
   *             }
   *           }&#92;n
   *           {
   *             "document": {
   *               "input_config": {
   *                 "gcs_source": { "input_uris": [ "gs://folder/document2.pdf" ]
   *                 }
   *               }
   *             }
   *           }
   *  *  For Tables:
   *         Either
   *         [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] or
   * [bigquery_source][google.cloud.automl.v1beta1.InputConfig.bigquery_source].
   *         GCS case:
   *           CSV file(s), each by itself 10GB or smaller and total size must be
   *           100GB or smaller, where first file must have a header containing
   *           column names. If the first row of a subsequent file is the same as
   *           the header, then it is also treated as a header. All other rows
   *           contain values for the corresponding columns.
   *           The column names must contain the model's
   * [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs]
   * [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name]
   *           (order doesn't matter). The columns corresponding to the model's
   *           input feature column specs must contain values compatible with the
   *           column spec's data types. Prediction on all the rows, i.e. the CSV
   *           lines, will be attempted. For FORECASTING
   * [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]:
   *           all columns having
   * [TIME_SERIES_AVAILABLE_PAST_ONLY][google.cloud.automl.v1beta1.ColumnSpec.ForecastingMetadata.ColumnType]
   *           type will be ignored.
   *           First three sample rows of a CSV file:
   *             "First Name","Last Name","Dob","Addresses"
   * "John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]"
   * "Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]}
   *         BigQuery case:
   *           An URI of a BigQuery table. The user data size of the BigQuery
   *           table must be 100GB or smaller.
   *           The column names must contain the model's
   * [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs]
   * [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name]
   *           (order doesn't matter). The columns corresponding to the model's
   *           input feature column specs must contain values compatible with the
   *           column spec's data types. Prediction on all the rows of the table
   *           will be attempted. For FORECASTING
   * [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]:
   *           all columns having
   * [TIME_SERIES_AVAILABLE_PAST_ONLY][google.cloud.automl.v1beta1.ColumnSpec.ForecastingMetadata.ColumnType]
   *           type will be ignored.
   *  Definitions:
   *  GCS_FILE_PATH = A path to file on GCS, e.g. "gs://folder/video.avi".
   *  TEXT_SNIPPET = A content of a text snippet, UTF-8 encoded, enclosed within
   *                 double quotes ("")
   *  TIME_SEGMENT_START = TIME_OFFSET
   *                       Expresses a beginning, inclusive, of a time segment
   *                       within an
   *                       example that has a time dimension (e.g. video).
   *  TIME_SEGMENT_END = TIME_OFFSET
   *                     Expresses an end, exclusive, of a time segment within
   *                     an example that has a time dimension (e.g. video).
   *  TIME_OFFSET = A number of seconds as measured from the start of an
   *                example (e.g. video). Fractions are allowed, up to a
   *                microsecond precision. "inf" is allowed and it means the end
   *                of the example.
   *  Errors:
   *  If any of the provided CSV files can't be parsed or if more than certain
   *  percent of CSV rows cannot be processed then the operation fails and
   *  prediction does not happen. Regardless of overall success or failure the
   *  per-row failures, up to a certain count cap, will be listed in
   *  Operation.metadata.partial_failures.
   * </pre>
   *
   * Protobuf type {@code google.cloud.automl.v1beta1.BatchPredictInputConfig}
   */
  public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
      implements
      // @@protoc_insertion_point(builder_implements:google.cloud.automl.v1beta1.BatchPredictInputConfig)
      com.google.cloud.automl.v1beta1.BatchPredictInputConfigOrBuilder {
    public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
      return com.google.cloud.automl.v1beta1.Io
          .internal_static_google_cloud_automl_v1beta1_BatchPredictInputConfig_descriptor;
    }

    @java.lang.Override
    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return com.google.cloud.automl.v1beta1.Io
          .internal_static_google_cloud_automl_v1beta1_BatchPredictInputConfig_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              com.google.cloud.automl.v1beta1.BatchPredictInputConfig.class,
              com.google.cloud.automl.v1beta1.BatchPredictInputConfig.Builder.class);
    }

    // Construct using com.google.cloud.automl.v1beta1.BatchPredictInputConfig.newBuilder()
    private Builder() {}

    private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
      super(parent);
    }

    @java.lang.Override
    public Builder clear() {
      super.clear();
      bitField0_ = 0;
      if (gcsSourceBuilder_ != null) {
        gcsSourceBuilder_.clear();
      }
      if (bigquerySourceBuilder_ != null) {
        bigquerySourceBuilder_.clear();
      }
      sourceCase_ = 0;
      source_ = null;
      return this;
    }

    @java.lang.Override
    public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
      return com.google.cloud.automl.v1beta1.Io
          .internal_static_google_cloud_automl_v1beta1_BatchPredictInputConfig_descriptor;
    }

    @java.lang.Override
    public com.google.cloud.automl.v1beta1.BatchPredictInputConfig getDefaultInstanceForType() {
      return com.google.cloud.automl.v1beta1.BatchPredictInputConfig.getDefaultInstance();
    }

    @java.lang.Override
    public com.google.cloud.automl.v1beta1.BatchPredictInputConfig build() {
      com.google.cloud.automl.v1beta1.BatchPredictInputConfig result = buildPartial();
      if (!result.isInitialized()) {
        throw newUninitializedMessageException(result);
      }
      return result;
    }

    @java.lang.Override
    public com.google.cloud.automl.v1beta1.BatchPredictInputConfig buildPartial() {
      com.google.cloud.automl.v1beta1.BatchPredictInputConfig result =
          new com.google.cloud.automl.v1beta1.BatchPredictInputConfig(this);
      if (bitField0_ != 0) {
        buildPartial0(result);
      }
      buildPartialOneofs(result);
      onBuilt();
      return result;
    }

    private void buildPartial0(com.google.cloud.automl.v1beta1.BatchPredictInputConfig result) {
      int from_bitField0_ = bitField0_;
    }

    private void buildPartialOneofs(
        com.google.cloud.automl.v1beta1.BatchPredictInputConfig result) {
      result.sourceCase_ = sourceCase_;
      result.source_ = this.source_;
      if (sourceCase_ == 1 && gcsSourceBuilder_ != null) {
        result.source_ = gcsSourceBuilder_.build();
      }
      if (sourceCase_ == 2 && bigquerySourceBuilder_ != null) {
        result.source_ = bigquerySourceBuilder_.build();
      }
    }

    @java.lang.Override
    public Builder clone() {
      return super.clone();
    }

    @java.lang.Override
    public Builder setField(
        com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
      return super.setField(field, value);
    }

    @java.lang.Override
    public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
      return super.clearField(field);
    }

    @java.lang.Override
    public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
      return super.clearOneof(oneof);
    }

    @java.lang.Override
    public Builder setRepeatedField(
        com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
      return super.setRepeatedField(field, index, value);
    }

    @java.lang.Override
    public Builder addRepeatedField(
        com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
      return super.addRepeatedField(field, value);
    }

    @java.lang.Override
    public Builder mergeFrom(com.google.protobuf.Message other) {
      if (other instanceof com.google.cloud.automl.v1beta1.BatchPredictInputConfig) {
        return mergeFrom((com.google.cloud.automl.v1beta1.BatchPredictInputConfig) other);
      } else {
        super.mergeFrom(other);
        return this;
      }
    }

    public Builder mergeFrom(com.google.cloud.automl.v1beta1.BatchPredictInputConfig other) {
      if (other == com.google.cloud.automl.v1beta1.BatchPredictInputConfig.getDefaultInstance())
        return this;
      switch (other.getSourceCase()) {
        case GCS_SOURCE:
          {
            mergeGcsSource(other.getGcsSource());
            break;
          }
        case BIGQUERY_SOURCE:
          {
            mergeBigquerySource(other.getBigquerySource());
            break;
          }
        case SOURCE_NOT_SET:
          {
            break;
          }
      }
      this.mergeUnknownFields(other.getUnknownFields());
      onChanged();
      return this;
    }

    @java.lang.Override
    public final boolean isInitialized() {
      return true;
    }

    @java.lang.Override
    public Builder mergeFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10:
              {
                input.readMessage(getGcsSourceFieldBuilder().getBuilder(), extensionRegistry);
                sourceCase_ = 1;
                break;
              } // case 10
            case 18:
              {
                input.readMessage(getBigquerySourceFieldBuilder().getBuilder(), extensionRegistry);
                sourceCase_ = 2;
                break;
              } // case 18
            default:
              {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
          } // switch (tag)
        } // while (!done)
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.unwrapIOException();
      } finally {
        onChanged();
      } // finally
      return this;
    }

    private int sourceCase_ = 0;
    private java.lang.Object source_;

    public SourceCase getSourceCase() {
      return SourceCase.forNumber(sourceCase_);
    }

    public Builder clearSource() {
      sourceCase_ = 0;
      source_ = null;
      onChanged();
      return this;
    }

    private int bitField0_;

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.automl.v1beta1.GcsSource,
            com.google.cloud.automl.v1beta1.GcsSource.Builder,
            com.google.cloud.automl.v1beta1.GcsSourceOrBuilder>
        gcsSourceBuilder_;
    /**
     *
     *
     * <pre>
     * The Google Cloud Storage location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
     *
     * @return Whether the gcsSource field is set.
     */
    @java.lang.Override
    public boolean hasGcsSource() {
      return sourceCase_ == 1;
    }
    /**
     *
     *
     * <pre>
     * The Google Cloud Storage location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
     *
     * @return The gcsSource.
     */
    @java.lang.Override
    public com.google.cloud.automl.v1beta1.GcsSource getGcsSource() {
      if (gcsSourceBuilder_ == null) {
        if (sourceCase_ == 1) {
          return (com.google.cloud.automl.v1beta1.GcsSource) source_;
        }
        return com.google.cloud.automl.v1beta1.GcsSource.getDefaultInstance();
      } else {
        if (sourceCase_ == 1) {
          return gcsSourceBuilder_.getMessage();
        }
        return com.google.cloud.automl.v1beta1.GcsSource.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * The Google Cloud Storage location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
     */
    public Builder setGcsSource(com.google.cloud.automl.v1beta1.GcsSource value) {
      if (gcsSourceBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        source_ = value;
        onChanged();
      } else {
        gcsSourceBuilder_.setMessage(value);
      }
      sourceCase_ = 1;
      return this;
    }
    /**
     *
     *
     * <pre>
     * The Google Cloud Storage location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
     */
    public Builder setGcsSource(com.google.cloud.automl.v1beta1.GcsSource.Builder builderForValue) {
      if (gcsSourceBuilder_ == null) {
        source_ = builderForValue.build();
        onChanged();
      } else {
        gcsSourceBuilder_.setMessage(builderForValue.build());
      }
      sourceCase_ = 1;
      return this;
    }
    /**
     *
     *
     * <pre>
     * The Google Cloud Storage location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
     */
    public Builder mergeGcsSource(com.google.cloud.automl.v1beta1.GcsSource value) {
      if (gcsSourceBuilder_ == null) {
        if (sourceCase_ == 1
            && source_ != com.google.cloud.automl.v1beta1.GcsSource.getDefaultInstance()) {
          source_ =
              com.google.cloud.automl.v1beta1.GcsSource.newBuilder(
                      (com.google.cloud.automl.v1beta1.GcsSource) source_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          source_ = value;
        }
        onChanged();
      } else {
        if (sourceCase_ == 1) {
          gcsSourceBuilder_.mergeFrom(value);
        } else {
          gcsSourceBuilder_.setMessage(value);
        }
      }
      sourceCase_ = 1;
      return this;
    }
    /**
     *
     *
     * <pre>
     * The Google Cloud Storage location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
     */
    public Builder clearGcsSource() {
      if (gcsSourceBuilder_ == null) {
        if (sourceCase_ == 1) {
          sourceCase_ = 0;
          source_ = null;
          onChanged();
        }
      } else {
        if (sourceCase_ == 1) {
          sourceCase_ = 0;
          source_ = null;
        }
        gcsSourceBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * The Google Cloud Storage location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
     */
    public com.google.cloud.automl.v1beta1.GcsSource.Builder getGcsSourceBuilder() {
      return getGcsSourceFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * The Google Cloud Storage location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
     */
    @java.lang.Override
    public com.google.cloud.automl.v1beta1.GcsSourceOrBuilder getGcsSourceOrBuilder() {
      if ((sourceCase_ == 1) && (gcsSourceBuilder_ != null)) {
        return gcsSourceBuilder_.getMessageOrBuilder();
      } else {
        if (sourceCase_ == 1) {
          return (com.google.cloud.automl.v1beta1.GcsSource) source_;
        }
        return com.google.cloud.automl.v1beta1.GcsSource.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * The Google Cloud Storage location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.GcsSource gcs_source = 1;</code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.automl.v1beta1.GcsSource,
            com.google.cloud.automl.v1beta1.GcsSource.Builder,
            com.google.cloud.automl.v1beta1.GcsSourceOrBuilder>
        getGcsSourceFieldBuilder() {
      if (gcsSourceBuilder_ == null) {
        if (!(sourceCase_ == 1)) {
          source_ = com.google.cloud.automl.v1beta1.GcsSource.getDefaultInstance();
        }
        gcsSourceBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.automl.v1beta1.GcsSource,
                com.google.cloud.automl.v1beta1.GcsSource.Builder,
                com.google.cloud.automl.v1beta1.GcsSourceOrBuilder>(
                (com.google.cloud.automl.v1beta1.GcsSource) source_,
                getParentForChildren(),
                isClean());
        source_ = null;
      }
      sourceCase_ = 1;
      onChanged();
      return gcsSourceBuilder_;
    }

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.automl.v1beta1.BigQuerySource,
            com.google.cloud.automl.v1beta1.BigQuerySource.Builder,
            com.google.cloud.automl.v1beta1.BigQuerySourceOrBuilder>
        bigquerySourceBuilder_;
    /**
     *
     *
     * <pre>
     * The BigQuery location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
     *
     * @return Whether the bigquerySource field is set.
     */
    @java.lang.Override
    public boolean hasBigquerySource() {
      return sourceCase_ == 2;
    }
    /**
     *
     *
     * <pre>
     * The BigQuery location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
     *
     * @return The bigquerySource.
     */
    @java.lang.Override
    public com.google.cloud.automl.v1beta1.BigQuerySource getBigquerySource() {
      if (bigquerySourceBuilder_ == null) {
        if (sourceCase_ == 2) {
          return (com.google.cloud.automl.v1beta1.BigQuerySource) source_;
        }
        return com.google.cloud.automl.v1beta1.BigQuerySource.getDefaultInstance();
      } else {
        if (sourceCase_ == 2) {
          return bigquerySourceBuilder_.getMessage();
        }
        return com.google.cloud.automl.v1beta1.BigQuerySource.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * The BigQuery location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
     */
    public Builder setBigquerySource(com.google.cloud.automl.v1beta1.BigQuerySource value) {
      if (bigquerySourceBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        source_ = value;
        onChanged();
      } else {
        bigquerySourceBuilder_.setMessage(value);
      }
      sourceCase_ = 2;
      return this;
    }
    /**
     *
     *
     * <pre>
     * The BigQuery location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
     */
    public Builder setBigquerySource(
        com.google.cloud.automl.v1beta1.BigQuerySource.Builder builderForValue) {
      if (bigquerySourceBuilder_ == null) {
        source_ = builderForValue.build();
        onChanged();
      } else {
        bigquerySourceBuilder_.setMessage(builderForValue.build());
      }
      sourceCase_ = 2;
      return this;
    }
    /**
     *
     *
     * <pre>
     * The BigQuery location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
     */
    public Builder mergeBigquerySource(com.google.cloud.automl.v1beta1.BigQuerySource value) {
      if (bigquerySourceBuilder_ == null) {
        if (sourceCase_ == 2
            && source_ != com.google.cloud.automl.v1beta1.BigQuerySource.getDefaultInstance()) {
          source_ =
              com.google.cloud.automl.v1beta1.BigQuerySource.newBuilder(
                      (com.google.cloud.automl.v1beta1.BigQuerySource) source_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          source_ = value;
        }
        onChanged();
      } else {
        if (sourceCase_ == 2) {
          bigquerySourceBuilder_.mergeFrom(value);
        } else {
          bigquerySourceBuilder_.setMessage(value);
        }
      }
      sourceCase_ = 2;
      return this;
    }
    /**
     *
     *
     * <pre>
     * The BigQuery location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
     */
    public Builder clearBigquerySource() {
      if (bigquerySourceBuilder_ == null) {
        if (sourceCase_ == 2) {
          sourceCase_ = 0;
          source_ = null;
          onChanged();
        }
      } else {
        if (sourceCase_ == 2) {
          sourceCase_ = 0;
          source_ = null;
        }
        bigquerySourceBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * The BigQuery location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
     */
    public com.google.cloud.automl.v1beta1.BigQuerySource.Builder getBigquerySourceBuilder() {
      return getBigquerySourceFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * The BigQuery location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
     */
    @java.lang.Override
    public com.google.cloud.automl.v1beta1.BigQuerySourceOrBuilder getBigquerySourceOrBuilder() {
      if ((sourceCase_ == 2) && (bigquerySourceBuilder_ != null)) {
        return bigquerySourceBuilder_.getMessageOrBuilder();
      } else {
        if (sourceCase_ == 2) {
          return (com.google.cloud.automl.v1beta1.BigQuerySource) source_;
        }
        return com.google.cloud.automl.v1beta1.BigQuerySource.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * The BigQuery location for the input content.
     * </pre>
     *
     * <code>.google.cloud.automl.v1beta1.BigQuerySource bigquery_source = 2;</code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.automl.v1beta1.BigQuerySource,
            com.google.cloud.automl.v1beta1.BigQuerySource.Builder,
            com.google.cloud.automl.v1beta1.BigQuerySourceOrBuilder>
        getBigquerySourceFieldBuilder() {
      if (bigquerySourceBuilder_ == null) {
        if (!(sourceCase_ == 2)) {
          source_ = com.google.cloud.automl.v1beta1.BigQuerySource.getDefaultInstance();
        }
        bigquerySourceBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.automl.v1beta1.BigQuerySource,
                com.google.cloud.automl.v1beta1.BigQuerySource.Builder,
                com.google.cloud.automl.v1beta1.BigQuerySourceOrBuilder>(
                (com.google.cloud.automl.v1beta1.BigQuerySource) source_,
                getParentForChildren(),
                isClean());
        source_ = null;
      }
      sourceCase_ = 2;
      onChanged();
      return bigquerySourceBuilder_;
    }

    @java.lang.Override
    public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
      return super.setUnknownFields(unknownFields);
    }

    @java.lang.Override
    public final Builder mergeUnknownFields(
        final com.google.protobuf.UnknownFieldSet unknownFields) {
      return super.mergeUnknownFields(unknownFields);
    }

    // @@protoc_insertion_point(builder_scope:google.cloud.automl.v1beta1.BatchPredictInputConfig)
  }

  // @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.BatchPredictInputConfig)
  private static final com.google.cloud.automl.v1beta1.BatchPredictInputConfig DEFAULT_INSTANCE;

  static {
    DEFAULT_INSTANCE = new com.google.cloud.automl.v1beta1.BatchPredictInputConfig();
  }

  public static com.google.cloud.automl.v1beta1.BatchPredictInputConfig getDefaultInstance() {
    return DEFAULT_INSTANCE;
  }

  private static final com.google.protobuf.Parser<BatchPredictInputConfig> PARSER =
      new com.google.protobuf.AbstractParser<BatchPredictInputConfig>() {
        @java.lang.Override
        public BatchPredictInputConfig parsePartialFrom(
            com.google.protobuf.CodedInputStream input,
            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
            throws com.google.protobuf.InvalidProtocolBufferException {
          Builder builder = newBuilder();
          try {
            builder.mergeFrom(input, extensionRegistry);
          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
            throw e.setUnfinishedMessage(builder.buildPartial());
          } catch (com.google.protobuf.UninitializedMessageException e) {
            throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
          } catch (java.io.IOException e) {
            throw new com.google.protobuf.InvalidProtocolBufferException(e)
                .setUnfinishedMessage(builder.buildPartial());
          }
          return builder.buildPartial();
        }
      };

  public static com.google.protobuf.Parser<BatchPredictInputConfig> parser() {
    return PARSER;
  }

  @java.lang.Override
  public com.google.protobuf.Parser<BatchPredictInputConfig> getParserForType() {
    return PARSER;
  }

  @java.lang.Override
  public com.google.cloud.automl.v1beta1.BatchPredictInputConfig getDefaultInstanceForType() {
    return DEFAULT_INSTANCE;
  }
}
