/*
 * Copyright 2020 Google LLC
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     https://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: google/cloud/dataproc/v1/workflow_templates.proto

package com.google.cloud.dataproc.v1;

/**
 *
 *
 * <pre>
 * A job executed by the workflow.
 * </pre>
 *
 * Protobuf type {@code google.cloud.dataproc.v1.OrderedJob}
 */
public final class OrderedJob extends com.google.protobuf.GeneratedMessageV3
    implements
    // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.OrderedJob)
    OrderedJobOrBuilder {
  private static final long serialVersionUID = 0L;
  // Use OrderedJob.newBuilder() to construct.
  private OrderedJob(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
    super(builder);
  }

  private OrderedJob() {
    stepId_ = "";
    prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY;
  }

  @java.lang.Override
  @SuppressWarnings({"unused"})
  protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
    return new OrderedJob();
  }

  @java.lang.Override
  public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
    return this.unknownFields;
  }

  public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
    return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
        .internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor;
  }

  @SuppressWarnings({"rawtypes"})
  @java.lang.Override
  protected com.google.protobuf.MapField internalGetMapField(int number) {
    switch (number) {
      case 8:
        return internalGetLabels();
      default:
        throw new RuntimeException("Invalid map field number: " + number);
    }
  }

  @java.lang.Override
  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
      internalGetFieldAccessorTable() {
    return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
        .internal_static_google_cloud_dataproc_v1_OrderedJob_fieldAccessorTable
        .ensureFieldAccessorsInitialized(
            com.google.cloud.dataproc.v1.OrderedJob.class,
            com.google.cloud.dataproc.v1.OrderedJob.Builder.class);
  }

  private int jobTypeCase_ = 0;
  private java.lang.Object jobType_;

  public enum JobTypeCase
      implements
          com.google.protobuf.Internal.EnumLite,
          com.google.protobuf.AbstractMessage.InternalOneOfEnum {
    HADOOP_JOB(2),
    SPARK_JOB(3),
    PYSPARK_JOB(4),
    HIVE_JOB(5),
    PIG_JOB(6),
    SPARK_R_JOB(11),
    SPARK_SQL_JOB(7),
    PRESTO_JOB(12),
    JOBTYPE_NOT_SET(0);
    private final int value;

    private JobTypeCase(int value) {
      this.value = value;
    }
    /**
     * @param value The number of the enum to look for.
     * @return The enum associated with the given number.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static JobTypeCase valueOf(int value) {
      return forNumber(value);
    }

    public static JobTypeCase forNumber(int value) {
      switch (value) {
        case 2:
          return HADOOP_JOB;
        case 3:
          return SPARK_JOB;
        case 4:
          return PYSPARK_JOB;
        case 5:
          return HIVE_JOB;
        case 6:
          return PIG_JOB;
        case 11:
          return SPARK_R_JOB;
        case 7:
          return SPARK_SQL_JOB;
        case 12:
          return PRESTO_JOB;
        case 0:
          return JOBTYPE_NOT_SET;
        default:
          return null;
      }
    }

    public int getNumber() {
      return this.value;
    }
  };

  public JobTypeCase getJobTypeCase() {
    return JobTypeCase.forNumber(jobTypeCase_);
  }

  public static final int STEP_ID_FIELD_NUMBER = 1;

  @SuppressWarnings("serial")
  private volatile java.lang.Object stepId_ = "";
  /**
   *
   *
   * <pre>
   * Required. The step id. The id must be unique among all jobs
   * within the template.
   * The step id is used as prefix for job id, as job
   * `goog-dataproc-workflow-step-id` label, and in
   * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
   * field from other steps.
   * The id must contain only letters (a-z, A-Z), numbers (0-9),
   * underscores (_), and hyphens (-). Cannot begin or end with underscore
   * or hyphen. Must consist of between 3 and 50 characters.
   * </pre>
   *
   * <code>string step_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
   *
   * @return The stepId.
   */
  @java.lang.Override
  public java.lang.String getStepId() {
    java.lang.Object ref = stepId_;
    if (ref instanceof java.lang.String) {
      return (java.lang.String) ref;
    } else {
      com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
      java.lang.String s = bs.toStringUtf8();
      stepId_ = s;
      return s;
    }
  }
  /**
   *
   *
   * <pre>
   * Required. The step id. The id must be unique among all jobs
   * within the template.
   * The step id is used as prefix for job id, as job
   * `goog-dataproc-workflow-step-id` label, and in
   * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
   * field from other steps.
   * The id must contain only letters (a-z, A-Z), numbers (0-9),
   * underscores (_), and hyphens (-). Cannot begin or end with underscore
   * or hyphen. Must consist of between 3 and 50 characters.
   * </pre>
   *
   * <code>string step_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
   *
   * @return The bytes for stepId.
   */
  @java.lang.Override
  public com.google.protobuf.ByteString getStepIdBytes() {
    java.lang.Object ref = stepId_;
    if (ref instanceof java.lang.String) {
      com.google.protobuf.ByteString b =
          com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
      stepId_ = b;
      return b;
    } else {
      return (com.google.protobuf.ByteString) ref;
    }
  }

  public static final int HADOOP_JOB_FIELD_NUMBER = 2;
  /**
   *
   *
   * <pre>
   * Optional. Job is a Hadoop job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return Whether the hadoopJob field is set.
   */
  @java.lang.Override
  public boolean hasHadoopJob() {
    return jobTypeCase_ == 2;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Hadoop job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The hadoopJob.
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.HadoopJob getHadoopJob() {
    if (jobTypeCase_ == 2) {
      return (com.google.cloud.dataproc.v1.HadoopJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Hadoop job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.HadoopJobOrBuilder getHadoopJobOrBuilder() {
    if (jobTypeCase_ == 2) {
      return (com.google.cloud.dataproc.v1.HadoopJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
  }

  public static final int SPARK_JOB_FIELD_NUMBER = 3;
  /**
   *
   *
   * <pre>
   * Optional. Job is a Spark job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return Whether the sparkJob field is set.
   */
  @java.lang.Override
  public boolean hasSparkJob() {
    return jobTypeCase_ == 3;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Spark job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The sparkJob.
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.SparkJob getSparkJob() {
    if (jobTypeCase_ == 3) {
      return (com.google.cloud.dataproc.v1.SparkJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Spark job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.SparkJobOrBuilder getSparkJobOrBuilder() {
    if (jobTypeCase_ == 3) {
      return (com.google.cloud.dataproc.v1.SparkJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
  }

  public static final int PYSPARK_JOB_FIELD_NUMBER = 4;
  /**
   *
   *
   * <pre>
   * Optional. Job is a PySpark job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return Whether the pysparkJob field is set.
   */
  @java.lang.Override
  public boolean hasPysparkJob() {
    return jobTypeCase_ == 4;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a PySpark job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The pysparkJob.
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.PySparkJob getPysparkJob() {
    if (jobTypeCase_ == 4) {
      return (com.google.cloud.dataproc.v1.PySparkJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a PySpark job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.PySparkJobOrBuilder getPysparkJobOrBuilder() {
    if (jobTypeCase_ == 4) {
      return (com.google.cloud.dataproc.v1.PySparkJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
  }

  public static final int HIVE_JOB_FIELD_NUMBER = 5;
  /**
   *
   *
   * <pre>
   * Optional. Job is a Hive job.
   * </pre>
   *
   * <code>.google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return Whether the hiveJob field is set.
   */
  @java.lang.Override
  public boolean hasHiveJob() {
    return jobTypeCase_ == 5;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Hive job.
   * </pre>
   *
   * <code>.google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The hiveJob.
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.HiveJob getHiveJob() {
    if (jobTypeCase_ == 5) {
      return (com.google.cloud.dataproc.v1.HiveJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Hive job.
   * </pre>
   *
   * <code>.google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.HiveJobOrBuilder getHiveJobOrBuilder() {
    if (jobTypeCase_ == 5) {
      return (com.google.cloud.dataproc.v1.HiveJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
  }

  public static final int PIG_JOB_FIELD_NUMBER = 6;
  /**
   *
   *
   * <pre>
   * Optional. Job is a Pig job.
   * </pre>
   *
   * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return Whether the pigJob field is set.
   */
  @java.lang.Override
  public boolean hasPigJob() {
    return jobTypeCase_ == 6;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Pig job.
   * </pre>
   *
   * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The pigJob.
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.PigJob getPigJob() {
    if (jobTypeCase_ == 6) {
      return (com.google.cloud.dataproc.v1.PigJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Pig job.
   * </pre>
   *
   * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.PigJobOrBuilder getPigJobOrBuilder() {
    if (jobTypeCase_ == 6) {
      return (com.google.cloud.dataproc.v1.PigJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
  }

  public static final int SPARK_R_JOB_FIELD_NUMBER = 11;
  /**
   *
   *
   * <pre>
   * Optional. Job is a SparkR job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return Whether the sparkRJob field is set.
   */
  @java.lang.Override
  public boolean hasSparkRJob() {
    return jobTypeCase_ == 11;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a SparkR job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The sparkRJob.
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.SparkRJob getSparkRJob() {
    if (jobTypeCase_ == 11) {
      return (com.google.cloud.dataproc.v1.SparkRJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a SparkR job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.SparkRJobOrBuilder getSparkRJobOrBuilder() {
    if (jobTypeCase_ == 11) {
      return (com.google.cloud.dataproc.v1.SparkRJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
  }

  public static final int SPARK_SQL_JOB_FIELD_NUMBER = 7;
  /**
   *
   *
   * <pre>
   * Optional. Job is a SparkSql job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return Whether the sparkSqlJob field is set.
   */
  @java.lang.Override
  public boolean hasSparkSqlJob() {
    return jobTypeCase_ == 7;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a SparkSql job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The sparkSqlJob.
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.SparkSqlJob getSparkSqlJob() {
    if (jobTypeCase_ == 7) {
      return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a SparkSql job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() {
    if (jobTypeCase_ == 7) {
      return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
  }

  public static final int PRESTO_JOB_FIELD_NUMBER = 12;
  /**
   *
   *
   * <pre>
   * Optional. Job is a Presto job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return Whether the prestoJob field is set.
   */
  @java.lang.Override
  public boolean hasPrestoJob() {
    return jobTypeCase_ == 12;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Presto job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The prestoJob.
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.PrestoJob getPrestoJob() {
    if (jobTypeCase_ == 12) {
      return (com.google.cloud.dataproc.v1.PrestoJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
  }
  /**
   *
   *
   * <pre>
   * Optional. Job is a Presto job.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.PrestoJobOrBuilder getPrestoJobOrBuilder() {
    if (jobTypeCase_ == 12) {
      return (com.google.cloud.dataproc.v1.PrestoJob) jobType_;
    }
    return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
  }

  public static final int LABELS_FIELD_NUMBER = 8;

  private static final class LabelsDefaultEntryHolder {
    static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
        com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
            com.google.cloud.dataproc.v1.WorkflowTemplatesProto
                .internal_static_google_cloud_dataproc_v1_OrderedJob_LabelsEntry_descriptor,
            com.google.protobuf.WireFormat.FieldType.STRING,
            "",
            com.google.protobuf.WireFormat.FieldType.STRING,
            "");
  }

  @SuppressWarnings("serial")
  private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;

  private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
    if (labels_ == null) {
      return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
    }
    return labels_;
  }

  public int getLabelsCount() {
    return internalGetLabels().getMap().size();
  }
  /**
   *
   *
   * <pre>
   * Optional. The labels to associate with this job.
   * Label keys must be between 1 and 63 characters long, and must conform to
   * the following regular expression:
   * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
   * Label values must be between 1 and 63 characters long, and must conform to
   * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
   * No more than 32 labels can be associated with a given job.
   * </pre>
   *
   * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
   */
  @java.lang.Override
  public boolean containsLabels(java.lang.String key) {
    if (key == null) {
      throw new NullPointerException("map key");
    }
    return internalGetLabels().getMap().containsKey(key);
  }
  /** Use {@link #getLabelsMap()} instead. */
  @java.lang.Override
  @java.lang.Deprecated
  public java.util.Map<java.lang.String, java.lang.String> getLabels() {
    return getLabelsMap();
  }
  /**
   *
   *
   * <pre>
   * Optional. The labels to associate with this job.
   * Label keys must be between 1 and 63 characters long, and must conform to
   * the following regular expression:
   * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
   * Label values must be between 1 and 63 characters long, and must conform to
   * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
   * No more than 32 labels can be associated with a given job.
   * </pre>
   *
   * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
   */
  @java.lang.Override
  public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
    return internalGetLabels().getMap();
  }
  /**
   *
   *
   * <pre>
   * Optional. The labels to associate with this job.
   * Label keys must be between 1 and 63 characters long, and must conform to
   * the following regular expression:
   * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
   * Label values must be between 1 and 63 characters long, and must conform to
   * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
   * No more than 32 labels can be associated with a given job.
   * </pre>
   *
   * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
   */
  @java.lang.Override
  public /* nullable */ java.lang.String getLabelsOrDefault(
      java.lang.String key,
      /* nullable */
      java.lang.String defaultValue) {
    if (key == null) {
      throw new NullPointerException("map key");
    }
    java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
    return map.containsKey(key) ? map.get(key) : defaultValue;
  }
  /**
   *
   *
   * <pre>
   * Optional. The labels to associate with this job.
   * Label keys must be between 1 and 63 characters long, and must conform to
   * the following regular expression:
   * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
   * Label values must be between 1 and 63 characters long, and must conform to
   * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
   * No more than 32 labels can be associated with a given job.
   * </pre>
   *
   * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
   */
  @java.lang.Override
  public java.lang.String getLabelsOrThrow(java.lang.String key) {
    if (key == null) {
      throw new NullPointerException("map key");
    }
    java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
    if (!map.containsKey(key)) {
      throw new java.lang.IllegalArgumentException();
    }
    return map.get(key);
  }

  public static final int SCHEDULING_FIELD_NUMBER = 9;
  private com.google.cloud.dataproc.v1.JobScheduling scheduling_;
  /**
   *
   *
   * <pre>
   * Optional. Job scheduling configuration.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return Whether the scheduling field is set.
   */
  @java.lang.Override
  public boolean hasScheduling() {
    return scheduling_ != null;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job scheduling configuration.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The scheduling.
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.JobScheduling getScheduling() {
    return scheduling_ == null
        ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
        : scheduling_;
  }
  /**
   *
   *
   * <pre>
   * Optional. Job scheduling configuration.
   * </pre>
   *
   * <code>
   * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   */
  @java.lang.Override
  public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder() {
    return scheduling_ == null
        ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
        : scheduling_;
  }

  public static final int PREREQUISITE_STEP_IDS_FIELD_NUMBER = 10;

  @SuppressWarnings("serial")
  private com.google.protobuf.LazyStringList prerequisiteStepIds_;
  /**
   *
   *
   * <pre>
   * Optional. The optional list of prerequisite job step_ids.
   * If not specified, the job will start at the beginning of workflow.
   * </pre>
   *
   * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return A list containing the prerequisiteStepIds.
   */
  public com.google.protobuf.ProtocolStringList getPrerequisiteStepIdsList() {
    return prerequisiteStepIds_;
  }
  /**
   *
   *
   * <pre>
   * Optional. The optional list of prerequisite job step_ids.
   * If not specified, the job will start at the beginning of workflow.
   * </pre>
   *
   * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @return The count of prerequisiteStepIds.
   */
  public int getPrerequisiteStepIdsCount() {
    return prerequisiteStepIds_.size();
  }
  /**
   *
   *
   * <pre>
   * Optional. The optional list of prerequisite job step_ids.
   * If not specified, the job will start at the beginning of workflow.
   * </pre>
   *
   * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @param index The index of the element to return.
   * @return The prerequisiteStepIds at the given index.
   */
  public java.lang.String getPrerequisiteStepIds(int index) {
    return prerequisiteStepIds_.get(index);
  }
  /**
   *
   *
   * <pre>
   * Optional. The optional list of prerequisite job step_ids.
   * If not specified, the job will start at the beginning of workflow.
   * </pre>
   *
   * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
   * </code>
   *
   * @param index The index of the value to return.
   * @return The bytes of the prerequisiteStepIds at the given index.
   */
  public com.google.protobuf.ByteString getPrerequisiteStepIdsBytes(int index) {
    return prerequisiteStepIds_.getByteString(index);
  }

  private byte memoizedIsInitialized = -1;

  @java.lang.Override
  public final boolean isInitialized() {
    byte isInitialized = memoizedIsInitialized;
    if (isInitialized == 1) return true;
    if (isInitialized == 0) return false;

    memoizedIsInitialized = 1;
    return true;
  }

  @java.lang.Override
  public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stepId_)) {
      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, stepId_);
    }
    if (jobTypeCase_ == 2) {
      output.writeMessage(2, (com.google.cloud.dataproc.v1.HadoopJob) jobType_);
    }
    if (jobTypeCase_ == 3) {
      output.writeMessage(3, (com.google.cloud.dataproc.v1.SparkJob) jobType_);
    }
    if (jobTypeCase_ == 4) {
      output.writeMessage(4, (com.google.cloud.dataproc.v1.PySparkJob) jobType_);
    }
    if (jobTypeCase_ == 5) {
      output.writeMessage(5, (com.google.cloud.dataproc.v1.HiveJob) jobType_);
    }
    if (jobTypeCase_ == 6) {
      output.writeMessage(6, (com.google.cloud.dataproc.v1.PigJob) jobType_);
    }
    if (jobTypeCase_ == 7) {
      output.writeMessage(7, (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_);
    }
    com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
        output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 8);
    if (scheduling_ != null) {
      output.writeMessage(9, getScheduling());
    }
    for (int i = 0; i < prerequisiteStepIds_.size(); i++) {
      com.google.protobuf.GeneratedMessageV3.writeString(
          output, 10, prerequisiteStepIds_.getRaw(i));
    }
    if (jobTypeCase_ == 11) {
      output.writeMessage(11, (com.google.cloud.dataproc.v1.SparkRJob) jobType_);
    }
    if (jobTypeCase_ == 12) {
      output.writeMessage(12, (com.google.cloud.dataproc.v1.PrestoJob) jobType_);
    }
    getUnknownFields().writeTo(output);
  }

  @java.lang.Override
  public int getSerializedSize() {
    int size = memoizedSize;
    if (size != -1) return size;

    size = 0;
    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stepId_)) {
      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, stepId_);
    }
    if (jobTypeCase_ == 2) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              2, (com.google.cloud.dataproc.v1.HadoopJob) jobType_);
    }
    if (jobTypeCase_ == 3) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              3, (com.google.cloud.dataproc.v1.SparkJob) jobType_);
    }
    if (jobTypeCase_ == 4) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              4, (com.google.cloud.dataproc.v1.PySparkJob) jobType_);
    }
    if (jobTypeCase_ == 5) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              5, (com.google.cloud.dataproc.v1.HiveJob) jobType_);
    }
    if (jobTypeCase_ == 6) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              6, (com.google.cloud.dataproc.v1.PigJob) jobType_);
    }
    if (jobTypeCase_ == 7) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              7, (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_);
    }
    for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
        internalGetLabels().getMap().entrySet()) {
      com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
          LabelsDefaultEntryHolder.defaultEntry
              .newBuilderForType()
              .setKey(entry.getKey())
              .setValue(entry.getValue())
              .build();
      size += com.google.protobuf.CodedOutputStream.computeMessageSize(8, labels__);
    }
    if (scheduling_ != null) {
      size += com.google.protobuf.CodedOutputStream.computeMessageSize(9, getScheduling());
    }
    {
      int dataSize = 0;
      for (int i = 0; i < prerequisiteStepIds_.size(); i++) {
        dataSize += computeStringSizeNoTag(prerequisiteStepIds_.getRaw(i));
      }
      size += dataSize;
      size += 1 * getPrerequisiteStepIdsList().size();
    }
    if (jobTypeCase_ == 11) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              11, (com.google.cloud.dataproc.v1.SparkRJob) jobType_);
    }
    if (jobTypeCase_ == 12) {
      size +=
          com.google.protobuf.CodedOutputStream.computeMessageSize(
              12, (com.google.cloud.dataproc.v1.PrestoJob) jobType_);
    }
    size += getUnknownFields().getSerializedSize();
    memoizedSize = size;
    return size;
  }

  @java.lang.Override
  public boolean equals(final java.lang.Object obj) {
    if (obj == this) {
      return true;
    }
    if (!(obj instanceof com.google.cloud.dataproc.v1.OrderedJob)) {
      return super.equals(obj);
    }
    com.google.cloud.dataproc.v1.OrderedJob other = (com.google.cloud.dataproc.v1.OrderedJob) obj;

    if (!getStepId().equals(other.getStepId())) return false;
    if (!internalGetLabels().equals(other.internalGetLabels())) return false;
    if (hasScheduling() != other.hasScheduling()) return false;
    if (hasScheduling()) {
      if (!getScheduling().equals(other.getScheduling())) return false;
    }
    if (!getPrerequisiteStepIdsList().equals(other.getPrerequisiteStepIdsList())) return false;
    if (!getJobTypeCase().equals(other.getJobTypeCase())) return false;
    switch (jobTypeCase_) {
      case 2:
        if (!getHadoopJob().equals(other.getHadoopJob())) return false;
        break;
      case 3:
        if (!getSparkJob().equals(other.getSparkJob())) return false;
        break;
      case 4:
        if (!getPysparkJob().equals(other.getPysparkJob())) return false;
        break;
      case 5:
        if (!getHiveJob().equals(other.getHiveJob())) return false;
        break;
      case 6:
        if (!getPigJob().equals(other.getPigJob())) return false;
        break;
      case 11:
        if (!getSparkRJob().equals(other.getSparkRJob())) return false;
        break;
      case 7:
        if (!getSparkSqlJob().equals(other.getSparkSqlJob())) return false;
        break;
      case 12:
        if (!getPrestoJob().equals(other.getPrestoJob())) return false;
        break;
      case 0:
      default:
    }
    if (!getUnknownFields().equals(other.getUnknownFields())) return false;
    return true;
  }

  @java.lang.Override
  public int hashCode() {
    if (memoizedHashCode != 0) {
      return memoizedHashCode;
    }
    int hash = 41;
    hash = (19 * hash) + getDescriptor().hashCode();
    hash = (37 * hash) + STEP_ID_FIELD_NUMBER;
    hash = (53 * hash) + getStepId().hashCode();
    if (!internalGetLabels().getMap().isEmpty()) {
      hash = (37 * hash) + LABELS_FIELD_NUMBER;
      hash = (53 * hash) + internalGetLabels().hashCode();
    }
    if (hasScheduling()) {
      hash = (37 * hash) + SCHEDULING_FIELD_NUMBER;
      hash = (53 * hash) + getScheduling().hashCode();
    }
    if (getPrerequisiteStepIdsCount() > 0) {
      hash = (37 * hash) + PREREQUISITE_STEP_IDS_FIELD_NUMBER;
      hash = (53 * hash) + getPrerequisiteStepIdsList().hashCode();
    }
    switch (jobTypeCase_) {
      case 2:
        hash = (37 * hash) + HADOOP_JOB_FIELD_NUMBER;
        hash = (53 * hash) + getHadoopJob().hashCode();
        break;
      case 3:
        hash = (37 * hash) + SPARK_JOB_FIELD_NUMBER;
        hash = (53 * hash) + getSparkJob().hashCode();
        break;
      case 4:
        hash = (37 * hash) + PYSPARK_JOB_FIELD_NUMBER;
        hash = (53 * hash) + getPysparkJob().hashCode();
        break;
      case 5:
        hash = (37 * hash) + HIVE_JOB_FIELD_NUMBER;
        hash = (53 * hash) + getHiveJob().hashCode();
        break;
      case 6:
        hash = (37 * hash) + PIG_JOB_FIELD_NUMBER;
        hash = (53 * hash) + getPigJob().hashCode();
        break;
      case 11:
        hash = (37 * hash) + SPARK_R_JOB_FIELD_NUMBER;
        hash = (53 * hash) + getSparkRJob().hashCode();
        break;
      case 7:
        hash = (37 * hash) + SPARK_SQL_JOB_FIELD_NUMBER;
        hash = (53 * hash) + getSparkSqlJob().hashCode();
        break;
      case 12:
        hash = (37 * hash) + PRESTO_JOB_FIELD_NUMBER;
        hash = (53 * hash) + getPrestoJob().hashCode();
        break;
      case 0:
      default:
    }
    hash = (29 * hash) + getUnknownFields().hashCode();
    memoizedHashCode = hash;
    return hash;
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(java.nio.ByteBuffer data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
      java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
      com.google.protobuf.ByteString data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
      com.google.protobuf.ByteString data,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(byte[] data)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
      byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws com.google.protobuf.InvalidProtocolBufferException {
    return PARSER.parseFrom(data, extensionRegistry);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(java.io.InputStream input)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
      java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
        PARSER, input, extensionRegistry);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseDelimitedFrom(
      java.io.InputStream input) throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseDelimitedFrom(
      java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
        PARSER, input, extensionRegistry);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
      com.google.protobuf.CodedInputStream input) throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
  }

  public static com.google.cloud.dataproc.v1.OrderedJob parseFrom(
      com.google.protobuf.CodedInputStream input,
      com.google.protobuf.ExtensionRegistryLite extensionRegistry)
      throws java.io.IOException {
    return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
        PARSER, input, extensionRegistry);
  }

  @java.lang.Override
  public Builder newBuilderForType() {
    return newBuilder();
  }

  public static Builder newBuilder() {
    return DEFAULT_INSTANCE.toBuilder();
  }

  public static Builder newBuilder(com.google.cloud.dataproc.v1.OrderedJob prototype) {
    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
  }

  @java.lang.Override
  public Builder toBuilder() {
    return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
  }

  @java.lang.Override
  protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
    Builder builder = new Builder(parent);
    return builder;
  }
  /**
   *
   *
   * <pre>
   * A job executed by the workflow.
   * </pre>
   *
   * Protobuf type {@code google.cloud.dataproc.v1.OrderedJob}
   */
  public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
      implements
      // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.OrderedJob)
      com.google.cloud.dataproc.v1.OrderedJobOrBuilder {
    public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
      return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
          .internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor;
    }

    @SuppressWarnings({"rawtypes"})
    protected com.google.protobuf.MapField internalGetMapField(int number) {
      switch (number) {
        case 8:
          return internalGetLabels();
        default:
          throw new RuntimeException("Invalid map field number: " + number);
      }
    }

    @SuppressWarnings({"rawtypes"})
    protected com.google.protobuf.MapField internalGetMutableMapField(int number) {
      switch (number) {
        case 8:
          return internalGetMutableLabels();
        default:
          throw new RuntimeException("Invalid map field number: " + number);
      }
    }

    @java.lang.Override
    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
          .internal_static_google_cloud_dataproc_v1_OrderedJob_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              com.google.cloud.dataproc.v1.OrderedJob.class,
              com.google.cloud.dataproc.v1.OrderedJob.Builder.class);
    }

    // Construct using com.google.cloud.dataproc.v1.OrderedJob.newBuilder()
    private Builder() {}

    private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
      super(parent);
    }

    @java.lang.Override
    public Builder clear() {
      super.clear();
      bitField0_ = 0;
      stepId_ = "";
      if (hadoopJobBuilder_ != null) {
        hadoopJobBuilder_.clear();
      }
      if (sparkJobBuilder_ != null) {
        sparkJobBuilder_.clear();
      }
      if (pysparkJobBuilder_ != null) {
        pysparkJobBuilder_.clear();
      }
      if (hiveJobBuilder_ != null) {
        hiveJobBuilder_.clear();
      }
      if (pigJobBuilder_ != null) {
        pigJobBuilder_.clear();
      }
      if (sparkRJobBuilder_ != null) {
        sparkRJobBuilder_.clear();
      }
      if (sparkSqlJobBuilder_ != null) {
        sparkSqlJobBuilder_.clear();
      }
      if (prestoJobBuilder_ != null) {
        prestoJobBuilder_.clear();
      }
      internalGetMutableLabels().clear();
      scheduling_ = null;
      if (schedulingBuilder_ != null) {
        schedulingBuilder_.dispose();
        schedulingBuilder_ = null;
      }
      prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY;
      bitField0_ = (bitField0_ & ~0x00000800);
      jobTypeCase_ = 0;
      jobType_ = null;
      return this;
    }

    @java.lang.Override
    public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
      return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
          .internal_static_google_cloud_dataproc_v1_OrderedJob_descriptor;
    }

    @java.lang.Override
    public com.google.cloud.dataproc.v1.OrderedJob getDefaultInstanceForType() {
      return com.google.cloud.dataproc.v1.OrderedJob.getDefaultInstance();
    }

    @java.lang.Override
    public com.google.cloud.dataproc.v1.OrderedJob build() {
      com.google.cloud.dataproc.v1.OrderedJob result = buildPartial();
      if (!result.isInitialized()) {
        throw newUninitializedMessageException(result);
      }
      return result;
    }

    @java.lang.Override
    public com.google.cloud.dataproc.v1.OrderedJob buildPartial() {
      com.google.cloud.dataproc.v1.OrderedJob result =
          new com.google.cloud.dataproc.v1.OrderedJob(this);
      buildPartialRepeatedFields(result);
      if (bitField0_ != 0) {
        buildPartial0(result);
      }
      buildPartialOneofs(result);
      onBuilt();
      return result;
    }

    private void buildPartialRepeatedFields(com.google.cloud.dataproc.v1.OrderedJob result) {
      if (((bitField0_ & 0x00000800) != 0)) {
        prerequisiteStepIds_ = prerequisiteStepIds_.getUnmodifiableView();
        bitField0_ = (bitField0_ & ~0x00000800);
      }
      result.prerequisiteStepIds_ = prerequisiteStepIds_;
    }

    private void buildPartial0(com.google.cloud.dataproc.v1.OrderedJob result) {
      int from_bitField0_ = bitField0_;
      if (((from_bitField0_ & 0x00000001) != 0)) {
        result.stepId_ = stepId_;
      }
      if (((from_bitField0_ & 0x00000200) != 0)) {
        result.labels_ = internalGetLabels();
        result.labels_.makeImmutable();
      }
      if (((from_bitField0_ & 0x00000400) != 0)) {
        result.scheduling_ = schedulingBuilder_ == null ? scheduling_ : schedulingBuilder_.build();
      }
    }

    private void buildPartialOneofs(com.google.cloud.dataproc.v1.OrderedJob result) {
      result.jobTypeCase_ = jobTypeCase_;
      result.jobType_ = this.jobType_;
      if (jobTypeCase_ == 2 && hadoopJobBuilder_ != null) {
        result.jobType_ = hadoopJobBuilder_.build();
      }
      if (jobTypeCase_ == 3 && sparkJobBuilder_ != null) {
        result.jobType_ = sparkJobBuilder_.build();
      }
      if (jobTypeCase_ == 4 && pysparkJobBuilder_ != null) {
        result.jobType_ = pysparkJobBuilder_.build();
      }
      if (jobTypeCase_ == 5 && hiveJobBuilder_ != null) {
        result.jobType_ = hiveJobBuilder_.build();
      }
      if (jobTypeCase_ == 6 && pigJobBuilder_ != null) {
        result.jobType_ = pigJobBuilder_.build();
      }
      if (jobTypeCase_ == 11 && sparkRJobBuilder_ != null) {
        result.jobType_ = sparkRJobBuilder_.build();
      }
      if (jobTypeCase_ == 7 && sparkSqlJobBuilder_ != null) {
        result.jobType_ = sparkSqlJobBuilder_.build();
      }
      if (jobTypeCase_ == 12 && prestoJobBuilder_ != null) {
        result.jobType_ = prestoJobBuilder_.build();
      }
    }

    @java.lang.Override
    public Builder clone() {
      return super.clone();
    }

    @java.lang.Override
    public Builder setField(
        com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
      return super.setField(field, value);
    }

    @java.lang.Override
    public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
      return super.clearField(field);
    }

    @java.lang.Override
    public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
      return super.clearOneof(oneof);
    }

    @java.lang.Override
    public Builder setRepeatedField(
        com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
      return super.setRepeatedField(field, index, value);
    }

    @java.lang.Override
    public Builder addRepeatedField(
        com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
      return super.addRepeatedField(field, value);
    }

    @java.lang.Override
    public Builder mergeFrom(com.google.protobuf.Message other) {
      if (other instanceof com.google.cloud.dataproc.v1.OrderedJob) {
        return mergeFrom((com.google.cloud.dataproc.v1.OrderedJob) other);
      } else {
        super.mergeFrom(other);
        return this;
      }
    }

    public Builder mergeFrom(com.google.cloud.dataproc.v1.OrderedJob other) {
      if (other == com.google.cloud.dataproc.v1.OrderedJob.getDefaultInstance()) return this;
      if (!other.getStepId().isEmpty()) {
        stepId_ = other.stepId_;
        bitField0_ |= 0x00000001;
        onChanged();
      }
      internalGetMutableLabels().mergeFrom(other.internalGetLabels());
      bitField0_ |= 0x00000200;
      if (other.hasScheduling()) {
        mergeScheduling(other.getScheduling());
      }
      if (!other.prerequisiteStepIds_.isEmpty()) {
        if (prerequisiteStepIds_.isEmpty()) {
          prerequisiteStepIds_ = other.prerequisiteStepIds_;
          bitField0_ = (bitField0_ & ~0x00000800);
        } else {
          ensurePrerequisiteStepIdsIsMutable();
          prerequisiteStepIds_.addAll(other.prerequisiteStepIds_);
        }
        onChanged();
      }
      switch (other.getJobTypeCase()) {
        case HADOOP_JOB:
          {
            mergeHadoopJob(other.getHadoopJob());
            break;
          }
        case SPARK_JOB:
          {
            mergeSparkJob(other.getSparkJob());
            break;
          }
        case PYSPARK_JOB:
          {
            mergePysparkJob(other.getPysparkJob());
            break;
          }
        case HIVE_JOB:
          {
            mergeHiveJob(other.getHiveJob());
            break;
          }
        case PIG_JOB:
          {
            mergePigJob(other.getPigJob());
            break;
          }
        case SPARK_R_JOB:
          {
            mergeSparkRJob(other.getSparkRJob());
            break;
          }
        case SPARK_SQL_JOB:
          {
            mergeSparkSqlJob(other.getSparkSqlJob());
            break;
          }
        case PRESTO_JOB:
          {
            mergePrestoJob(other.getPrestoJob());
            break;
          }
        case JOBTYPE_NOT_SET:
          {
            break;
          }
      }
      this.mergeUnknownFields(other.getUnknownFields());
      onChanged();
      return this;
    }

    @java.lang.Override
    public final boolean isInitialized() {
      return true;
    }

    @java.lang.Override
    public Builder mergeFrom(
        com.google.protobuf.CodedInputStream input,
        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      if (extensionRegistry == null) {
        throw new java.lang.NullPointerException();
      }
      try {
        boolean done = false;
        while (!done) {
          int tag = input.readTag();
          switch (tag) {
            case 0:
              done = true;
              break;
            case 10:
              {
                stepId_ = input.readStringRequireUtf8();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
            case 18:
              {
                input.readMessage(getHadoopJobFieldBuilder().getBuilder(), extensionRegistry);
                jobTypeCase_ = 2;
                break;
              } // case 18
            case 26:
              {
                input.readMessage(getSparkJobFieldBuilder().getBuilder(), extensionRegistry);
                jobTypeCase_ = 3;
                break;
              } // case 26
            case 34:
              {
                input.readMessage(getPysparkJobFieldBuilder().getBuilder(), extensionRegistry);
                jobTypeCase_ = 4;
                break;
              } // case 34
            case 42:
              {
                input.readMessage(getHiveJobFieldBuilder().getBuilder(), extensionRegistry);
                jobTypeCase_ = 5;
                break;
              } // case 42
            case 50:
              {
                input.readMessage(getPigJobFieldBuilder().getBuilder(), extensionRegistry);
                jobTypeCase_ = 6;
                break;
              } // case 50
            case 58:
              {
                input.readMessage(getSparkSqlJobFieldBuilder().getBuilder(), extensionRegistry);
                jobTypeCase_ = 7;
                break;
              } // case 58
            case 66:
              {
                com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
                    input.readMessage(
                        LabelsDefaultEntryHolder.defaultEntry.getParserForType(),
                        extensionRegistry);
                internalGetMutableLabels()
                    .getMutableMap()
                    .put(labels__.getKey(), labels__.getValue());
                bitField0_ |= 0x00000200;
                break;
              } // case 66
            case 74:
              {
                input.readMessage(getSchedulingFieldBuilder().getBuilder(), extensionRegistry);
                bitField0_ |= 0x00000400;
                break;
              } // case 74
            case 82:
              {
                java.lang.String s = input.readStringRequireUtf8();
                ensurePrerequisiteStepIdsIsMutable();
                prerequisiteStepIds_.add(s);
                break;
              } // case 82
            case 90:
              {
                input.readMessage(getSparkRJobFieldBuilder().getBuilder(), extensionRegistry);
                jobTypeCase_ = 11;
                break;
              } // case 90
            case 98:
              {
                input.readMessage(getPrestoJobFieldBuilder().getBuilder(), extensionRegistry);
                jobTypeCase_ = 12;
                break;
              } // case 98
            default:
              {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
          } // switch (tag)
        } // while (!done)
      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
        throw e.unwrapIOException();
      } finally {
        onChanged();
      } // finally
      return this;
    }

    private int jobTypeCase_ = 0;
    private java.lang.Object jobType_;

    public JobTypeCase getJobTypeCase() {
      return JobTypeCase.forNumber(jobTypeCase_);
    }

    public Builder clearJobType() {
      jobTypeCase_ = 0;
      jobType_ = null;
      onChanged();
      return this;
    }

    private int bitField0_;

    private java.lang.Object stepId_ = "";
    /**
     *
     *
     * <pre>
     * Required. The step id. The id must be unique among all jobs
     * within the template.
     * The step id is used as prefix for job id, as job
     * `goog-dataproc-workflow-step-id` label, and in
     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
     * field from other steps.
     * The id must contain only letters (a-z, A-Z), numbers (0-9),
     * underscores (_), and hyphens (-). Cannot begin or end with underscore
     * or hyphen. Must consist of between 3 and 50 characters.
     * </pre>
     *
     * <code>string step_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
     *
     * @return The stepId.
     */
    public java.lang.String getStepId() {
      java.lang.Object ref = stepId_;
      if (!(ref instanceof java.lang.String)) {
        com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        stepId_ = s;
        return s;
      } else {
        return (java.lang.String) ref;
      }
    }
    /**
     *
     *
     * <pre>
     * Required. The step id. The id must be unique among all jobs
     * within the template.
     * The step id is used as prefix for job id, as job
     * `goog-dataproc-workflow-step-id` label, and in
     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
     * field from other steps.
     * The id must contain only letters (a-z, A-Z), numbers (0-9),
     * underscores (_), and hyphens (-). Cannot begin or end with underscore
     * or hyphen. Must consist of between 3 and 50 characters.
     * </pre>
     *
     * <code>string step_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
     *
     * @return The bytes for stepId.
     */
    public com.google.protobuf.ByteString getStepIdBytes() {
      java.lang.Object ref = stepId_;
      if (ref instanceof String) {
        com.google.protobuf.ByteString b =
            com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
        stepId_ = b;
        return b;
      } else {
        return (com.google.protobuf.ByteString) ref;
      }
    }
    /**
     *
     *
     * <pre>
     * Required. The step id. The id must be unique among all jobs
     * within the template.
     * The step id is used as prefix for job id, as job
     * `goog-dataproc-workflow-step-id` label, and in
     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
     * field from other steps.
     * The id must contain only letters (a-z, A-Z), numbers (0-9),
     * underscores (_), and hyphens (-). Cannot begin or end with underscore
     * or hyphen. Must consist of between 3 and 50 characters.
     * </pre>
     *
     * <code>string step_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
     *
     * @param value The stepId to set.
     * @return This builder for chaining.
     */
    public Builder setStepId(java.lang.String value) {
      if (value == null) {
        throw new NullPointerException();
      }
      stepId_ = value;
      bitField0_ |= 0x00000001;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Required. The step id. The id must be unique among all jobs
     * within the template.
     * The step id is used as prefix for job id, as job
     * `goog-dataproc-workflow-step-id` label, and in
     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
     * field from other steps.
     * The id must contain only letters (a-z, A-Z), numbers (0-9),
     * underscores (_), and hyphens (-). Cannot begin or end with underscore
     * or hyphen. Must consist of between 3 and 50 characters.
     * </pre>
     *
     * <code>string step_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
     *
     * @return This builder for chaining.
     */
    public Builder clearStepId() {
      stepId_ = getDefaultInstance().getStepId();
      bitField0_ = (bitField0_ & ~0x00000001);
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Required. The step id. The id must be unique among all jobs
     * within the template.
     * The step id is used as prefix for job id, as job
     * `goog-dataproc-workflow-step-id` label, and in
     * [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
     * field from other steps.
     * The id must contain only letters (a-z, A-Z), numbers (0-9),
     * underscores (_), and hyphens (-). Cannot begin or end with underscore
     * or hyphen. Must consist of between 3 and 50 characters.
     * </pre>
     *
     * <code>string step_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
     *
     * @param value The bytes for stepId to set.
     * @return This builder for chaining.
     */
    public Builder setStepIdBytes(com.google.protobuf.ByteString value) {
      if (value == null) {
        throw new NullPointerException();
      }
      checkByteStringIsUtf8(value);
      stepId_ = value;
      bitField0_ |= 0x00000001;
      onChanged();
      return this;
    }

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.HadoopJob,
            com.google.cloud.dataproc.v1.HadoopJob.Builder,
            com.google.cloud.dataproc.v1.HadoopJobOrBuilder>
        hadoopJobBuilder_;
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hadoop job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return Whether the hadoopJob field is set.
     */
    @java.lang.Override
    public boolean hasHadoopJob() {
      return jobTypeCase_ == 2;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hadoop job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The hadoopJob.
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.HadoopJob getHadoopJob() {
      if (hadoopJobBuilder_ == null) {
        if (jobTypeCase_ == 2) {
          return (com.google.cloud.dataproc.v1.HadoopJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
      } else {
        if (jobTypeCase_ == 2) {
          return hadoopJobBuilder_.getMessage();
        }
        return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hadoop job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value) {
      if (hadoopJobBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        jobType_ = value;
        onChanged();
      } else {
        hadoopJobBuilder_.setMessage(value);
      }
      jobTypeCase_ = 2;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hadoop job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setHadoopJob(com.google.cloud.dataproc.v1.HadoopJob.Builder builderForValue) {
      if (hadoopJobBuilder_ == null) {
        jobType_ = builderForValue.build();
        onChanged();
      } else {
        hadoopJobBuilder_.setMessage(builderForValue.build());
      }
      jobTypeCase_ = 2;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hadoop job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder mergeHadoopJob(com.google.cloud.dataproc.v1.HadoopJob value) {
      if (hadoopJobBuilder_ == null) {
        if (jobTypeCase_ == 2
            && jobType_ != com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance()) {
          jobType_ =
              com.google.cloud.dataproc.v1.HadoopJob.newBuilder(
                      (com.google.cloud.dataproc.v1.HadoopJob) jobType_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          jobType_ = value;
        }
        onChanged();
      } else {
        if (jobTypeCase_ == 2) {
          hadoopJobBuilder_.mergeFrom(value);
        } else {
          hadoopJobBuilder_.setMessage(value);
        }
      }
      jobTypeCase_ = 2;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hadoop job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder clearHadoopJob() {
      if (hadoopJobBuilder_ == null) {
        if (jobTypeCase_ == 2) {
          jobTypeCase_ = 0;
          jobType_ = null;
          onChanged();
        }
      } else {
        if (jobTypeCase_ == 2) {
          jobTypeCase_ = 0;
          jobType_ = null;
        }
        hadoopJobBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hadoop job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.HadoopJob.Builder getHadoopJobBuilder() {
      return getHadoopJobFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hadoop job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.HadoopJobOrBuilder getHadoopJobOrBuilder() {
      if ((jobTypeCase_ == 2) && (hadoopJobBuilder_ != null)) {
        return hadoopJobBuilder_.getMessageOrBuilder();
      } else {
        if (jobTypeCase_ == 2) {
          return (com.google.cloud.dataproc.v1.HadoopJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hadoop job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HadoopJob hadoop_job = 2 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.HadoopJob,
            com.google.cloud.dataproc.v1.HadoopJob.Builder,
            com.google.cloud.dataproc.v1.HadoopJobOrBuilder>
        getHadoopJobFieldBuilder() {
      if (hadoopJobBuilder_ == null) {
        if (!(jobTypeCase_ == 2)) {
          jobType_ = com.google.cloud.dataproc.v1.HadoopJob.getDefaultInstance();
        }
        hadoopJobBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.dataproc.v1.HadoopJob,
                com.google.cloud.dataproc.v1.HadoopJob.Builder,
                com.google.cloud.dataproc.v1.HadoopJobOrBuilder>(
                (com.google.cloud.dataproc.v1.HadoopJob) jobType_,
                getParentForChildren(),
                isClean());
        jobType_ = null;
      }
      jobTypeCase_ = 2;
      onChanged();
      return hadoopJobBuilder_;
    }

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.SparkJob,
            com.google.cloud.dataproc.v1.SparkJob.Builder,
            com.google.cloud.dataproc.v1.SparkJobOrBuilder>
        sparkJobBuilder_;
    /**
     *
     *
     * <pre>
     * Optional. Job is a Spark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return Whether the sparkJob field is set.
     */
    @java.lang.Override
    public boolean hasSparkJob() {
      return jobTypeCase_ == 3;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Spark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The sparkJob.
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.SparkJob getSparkJob() {
      if (sparkJobBuilder_ == null) {
        if (jobTypeCase_ == 3) {
          return (com.google.cloud.dataproc.v1.SparkJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
      } else {
        if (jobTypeCase_ == 3) {
          return sparkJobBuilder_.getMessage();
        }
        return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Spark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setSparkJob(com.google.cloud.dataproc.v1.SparkJob value) {
      if (sparkJobBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        jobType_ = value;
        onChanged();
      } else {
        sparkJobBuilder_.setMessage(value);
      }
      jobTypeCase_ = 3;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Spark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setSparkJob(com.google.cloud.dataproc.v1.SparkJob.Builder builderForValue) {
      if (sparkJobBuilder_ == null) {
        jobType_ = builderForValue.build();
        onChanged();
      } else {
        sparkJobBuilder_.setMessage(builderForValue.build());
      }
      jobTypeCase_ = 3;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Spark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder mergeSparkJob(com.google.cloud.dataproc.v1.SparkJob value) {
      if (sparkJobBuilder_ == null) {
        if (jobTypeCase_ == 3
            && jobType_ != com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance()) {
          jobType_ =
              com.google.cloud.dataproc.v1.SparkJob.newBuilder(
                      (com.google.cloud.dataproc.v1.SparkJob) jobType_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          jobType_ = value;
        }
        onChanged();
      } else {
        if (jobTypeCase_ == 3) {
          sparkJobBuilder_.mergeFrom(value);
        } else {
          sparkJobBuilder_.setMessage(value);
        }
      }
      jobTypeCase_ = 3;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Spark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder clearSparkJob() {
      if (sparkJobBuilder_ == null) {
        if (jobTypeCase_ == 3) {
          jobTypeCase_ = 0;
          jobType_ = null;
          onChanged();
        }
      } else {
        if (jobTypeCase_ == 3) {
          jobTypeCase_ = 0;
          jobType_ = null;
        }
        sparkJobBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Spark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.SparkJob.Builder getSparkJobBuilder() {
      return getSparkJobFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Spark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.SparkJobOrBuilder getSparkJobOrBuilder() {
      if ((jobTypeCase_ == 3) && (sparkJobBuilder_ != null)) {
        return sparkJobBuilder_.getMessageOrBuilder();
      } else {
        if (jobTypeCase_ == 3) {
          return (com.google.cloud.dataproc.v1.SparkJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Spark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkJob spark_job = 3 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.SparkJob,
            com.google.cloud.dataproc.v1.SparkJob.Builder,
            com.google.cloud.dataproc.v1.SparkJobOrBuilder>
        getSparkJobFieldBuilder() {
      if (sparkJobBuilder_ == null) {
        if (!(jobTypeCase_ == 3)) {
          jobType_ = com.google.cloud.dataproc.v1.SparkJob.getDefaultInstance();
        }
        sparkJobBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.dataproc.v1.SparkJob,
                com.google.cloud.dataproc.v1.SparkJob.Builder,
                com.google.cloud.dataproc.v1.SparkJobOrBuilder>(
                (com.google.cloud.dataproc.v1.SparkJob) jobType_,
                getParentForChildren(),
                isClean());
        jobType_ = null;
      }
      jobTypeCase_ = 3;
      onChanged();
      return sparkJobBuilder_;
    }

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.PySparkJob,
            com.google.cloud.dataproc.v1.PySparkJob.Builder,
            com.google.cloud.dataproc.v1.PySparkJobOrBuilder>
        pysparkJobBuilder_;
    /**
     *
     *
     * <pre>
     * Optional. Job is a PySpark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return Whether the pysparkJob field is set.
     */
    @java.lang.Override
    public boolean hasPysparkJob() {
      return jobTypeCase_ == 4;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a PySpark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The pysparkJob.
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.PySparkJob getPysparkJob() {
      if (pysparkJobBuilder_ == null) {
        if (jobTypeCase_ == 4) {
          return (com.google.cloud.dataproc.v1.PySparkJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
      } else {
        if (jobTypeCase_ == 4) {
          return pysparkJobBuilder_.getMessage();
        }
        return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a PySpark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setPysparkJob(com.google.cloud.dataproc.v1.PySparkJob value) {
      if (pysparkJobBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        jobType_ = value;
        onChanged();
      } else {
        pysparkJobBuilder_.setMessage(value);
      }
      jobTypeCase_ = 4;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a PySpark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setPysparkJob(com.google.cloud.dataproc.v1.PySparkJob.Builder builderForValue) {
      if (pysparkJobBuilder_ == null) {
        jobType_ = builderForValue.build();
        onChanged();
      } else {
        pysparkJobBuilder_.setMessage(builderForValue.build());
      }
      jobTypeCase_ = 4;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a PySpark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder mergePysparkJob(com.google.cloud.dataproc.v1.PySparkJob value) {
      if (pysparkJobBuilder_ == null) {
        if (jobTypeCase_ == 4
            && jobType_ != com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance()) {
          jobType_ =
              com.google.cloud.dataproc.v1.PySparkJob.newBuilder(
                      (com.google.cloud.dataproc.v1.PySparkJob) jobType_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          jobType_ = value;
        }
        onChanged();
      } else {
        if (jobTypeCase_ == 4) {
          pysparkJobBuilder_.mergeFrom(value);
        } else {
          pysparkJobBuilder_.setMessage(value);
        }
      }
      jobTypeCase_ = 4;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a PySpark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder clearPysparkJob() {
      if (pysparkJobBuilder_ == null) {
        if (jobTypeCase_ == 4) {
          jobTypeCase_ = 0;
          jobType_ = null;
          onChanged();
        }
      } else {
        if (jobTypeCase_ == 4) {
          jobTypeCase_ = 0;
          jobType_ = null;
        }
        pysparkJobBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a PySpark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.PySparkJob.Builder getPysparkJobBuilder() {
      return getPysparkJobFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a PySpark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.PySparkJobOrBuilder getPysparkJobOrBuilder() {
      if ((jobTypeCase_ == 4) && (pysparkJobBuilder_ != null)) {
        return pysparkJobBuilder_.getMessageOrBuilder();
      } else {
        if (jobTypeCase_ == 4) {
          return (com.google.cloud.dataproc.v1.PySparkJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a PySpark job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PySparkJob pyspark_job = 4 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.PySparkJob,
            com.google.cloud.dataproc.v1.PySparkJob.Builder,
            com.google.cloud.dataproc.v1.PySparkJobOrBuilder>
        getPysparkJobFieldBuilder() {
      if (pysparkJobBuilder_ == null) {
        if (!(jobTypeCase_ == 4)) {
          jobType_ = com.google.cloud.dataproc.v1.PySparkJob.getDefaultInstance();
        }
        pysparkJobBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.dataproc.v1.PySparkJob,
                com.google.cloud.dataproc.v1.PySparkJob.Builder,
                com.google.cloud.dataproc.v1.PySparkJobOrBuilder>(
                (com.google.cloud.dataproc.v1.PySparkJob) jobType_,
                getParentForChildren(),
                isClean());
        jobType_ = null;
      }
      jobTypeCase_ = 4;
      onChanged();
      return pysparkJobBuilder_;
    }

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.HiveJob,
            com.google.cloud.dataproc.v1.HiveJob.Builder,
            com.google.cloud.dataproc.v1.HiveJobOrBuilder>
        hiveJobBuilder_;
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hive job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return Whether the hiveJob field is set.
     */
    @java.lang.Override
    public boolean hasHiveJob() {
      return jobTypeCase_ == 5;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hive job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The hiveJob.
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.HiveJob getHiveJob() {
      if (hiveJobBuilder_ == null) {
        if (jobTypeCase_ == 5) {
          return (com.google.cloud.dataproc.v1.HiveJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
      } else {
        if (jobTypeCase_ == 5) {
          return hiveJobBuilder_.getMessage();
        }
        return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hive job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setHiveJob(com.google.cloud.dataproc.v1.HiveJob value) {
      if (hiveJobBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        jobType_ = value;
        onChanged();
      } else {
        hiveJobBuilder_.setMessage(value);
      }
      jobTypeCase_ = 5;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hive job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setHiveJob(com.google.cloud.dataproc.v1.HiveJob.Builder builderForValue) {
      if (hiveJobBuilder_ == null) {
        jobType_ = builderForValue.build();
        onChanged();
      } else {
        hiveJobBuilder_.setMessage(builderForValue.build());
      }
      jobTypeCase_ = 5;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hive job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder mergeHiveJob(com.google.cloud.dataproc.v1.HiveJob value) {
      if (hiveJobBuilder_ == null) {
        if (jobTypeCase_ == 5
            && jobType_ != com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance()) {
          jobType_ =
              com.google.cloud.dataproc.v1.HiveJob.newBuilder(
                      (com.google.cloud.dataproc.v1.HiveJob) jobType_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          jobType_ = value;
        }
        onChanged();
      } else {
        if (jobTypeCase_ == 5) {
          hiveJobBuilder_.mergeFrom(value);
        } else {
          hiveJobBuilder_.setMessage(value);
        }
      }
      jobTypeCase_ = 5;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hive job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder clearHiveJob() {
      if (hiveJobBuilder_ == null) {
        if (jobTypeCase_ == 5) {
          jobTypeCase_ = 0;
          jobType_ = null;
          onChanged();
        }
      } else {
        if (jobTypeCase_ == 5) {
          jobTypeCase_ = 0;
          jobType_ = null;
        }
        hiveJobBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hive job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.HiveJob.Builder getHiveJobBuilder() {
      return getHiveJobFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hive job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.HiveJobOrBuilder getHiveJobOrBuilder() {
      if ((jobTypeCase_ == 5) && (hiveJobBuilder_ != null)) {
        return hiveJobBuilder_.getMessageOrBuilder();
      } else {
        if (jobTypeCase_ == 5) {
          return (com.google.cloud.dataproc.v1.HiveJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Hive job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.HiveJob hive_job = 5 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.HiveJob,
            com.google.cloud.dataproc.v1.HiveJob.Builder,
            com.google.cloud.dataproc.v1.HiveJobOrBuilder>
        getHiveJobFieldBuilder() {
      if (hiveJobBuilder_ == null) {
        if (!(jobTypeCase_ == 5)) {
          jobType_ = com.google.cloud.dataproc.v1.HiveJob.getDefaultInstance();
        }
        hiveJobBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.dataproc.v1.HiveJob,
                com.google.cloud.dataproc.v1.HiveJob.Builder,
                com.google.cloud.dataproc.v1.HiveJobOrBuilder>(
                (com.google.cloud.dataproc.v1.HiveJob) jobType_, getParentForChildren(), isClean());
        jobType_ = null;
      }
      jobTypeCase_ = 5;
      onChanged();
      return hiveJobBuilder_;
    }

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.PigJob,
            com.google.cloud.dataproc.v1.PigJob.Builder,
            com.google.cloud.dataproc.v1.PigJobOrBuilder>
        pigJobBuilder_;
    /**
     *
     *
     * <pre>
     * Optional. Job is a Pig job.
     * </pre>
     *
     * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return Whether the pigJob field is set.
     */
    @java.lang.Override
    public boolean hasPigJob() {
      return jobTypeCase_ == 6;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Pig job.
     * </pre>
     *
     * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The pigJob.
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.PigJob getPigJob() {
      if (pigJobBuilder_ == null) {
        if (jobTypeCase_ == 6) {
          return (com.google.cloud.dataproc.v1.PigJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
      } else {
        if (jobTypeCase_ == 6) {
          return pigJobBuilder_.getMessage();
        }
        return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Pig job.
     * </pre>
     *
     * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setPigJob(com.google.cloud.dataproc.v1.PigJob value) {
      if (pigJobBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        jobType_ = value;
        onChanged();
      } else {
        pigJobBuilder_.setMessage(value);
      }
      jobTypeCase_ = 6;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Pig job.
     * </pre>
     *
     * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setPigJob(com.google.cloud.dataproc.v1.PigJob.Builder builderForValue) {
      if (pigJobBuilder_ == null) {
        jobType_ = builderForValue.build();
        onChanged();
      } else {
        pigJobBuilder_.setMessage(builderForValue.build());
      }
      jobTypeCase_ = 6;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Pig job.
     * </pre>
     *
     * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder mergePigJob(com.google.cloud.dataproc.v1.PigJob value) {
      if (pigJobBuilder_ == null) {
        if (jobTypeCase_ == 6
            && jobType_ != com.google.cloud.dataproc.v1.PigJob.getDefaultInstance()) {
          jobType_ =
              com.google.cloud.dataproc.v1.PigJob.newBuilder(
                      (com.google.cloud.dataproc.v1.PigJob) jobType_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          jobType_ = value;
        }
        onChanged();
      } else {
        if (jobTypeCase_ == 6) {
          pigJobBuilder_.mergeFrom(value);
        } else {
          pigJobBuilder_.setMessage(value);
        }
      }
      jobTypeCase_ = 6;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Pig job.
     * </pre>
     *
     * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder clearPigJob() {
      if (pigJobBuilder_ == null) {
        if (jobTypeCase_ == 6) {
          jobTypeCase_ = 0;
          jobType_ = null;
          onChanged();
        }
      } else {
        if (jobTypeCase_ == 6) {
          jobTypeCase_ = 0;
          jobType_ = null;
        }
        pigJobBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Pig job.
     * </pre>
     *
     * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.PigJob.Builder getPigJobBuilder() {
      return getPigJobFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Pig job.
     * </pre>
     *
     * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.PigJobOrBuilder getPigJobOrBuilder() {
      if ((jobTypeCase_ == 6) && (pigJobBuilder_ != null)) {
        return pigJobBuilder_.getMessageOrBuilder();
      } else {
        if (jobTypeCase_ == 6) {
          return (com.google.cloud.dataproc.v1.PigJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Pig job.
     * </pre>
     *
     * <code>.google.cloud.dataproc.v1.PigJob pig_job = 6 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.PigJob,
            com.google.cloud.dataproc.v1.PigJob.Builder,
            com.google.cloud.dataproc.v1.PigJobOrBuilder>
        getPigJobFieldBuilder() {
      if (pigJobBuilder_ == null) {
        if (!(jobTypeCase_ == 6)) {
          jobType_ = com.google.cloud.dataproc.v1.PigJob.getDefaultInstance();
        }
        pigJobBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.dataproc.v1.PigJob,
                com.google.cloud.dataproc.v1.PigJob.Builder,
                com.google.cloud.dataproc.v1.PigJobOrBuilder>(
                (com.google.cloud.dataproc.v1.PigJob) jobType_, getParentForChildren(), isClean());
        jobType_ = null;
      }
      jobTypeCase_ = 6;
      onChanged();
      return pigJobBuilder_;
    }

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.SparkRJob,
            com.google.cloud.dataproc.v1.SparkRJob.Builder,
            com.google.cloud.dataproc.v1.SparkRJobOrBuilder>
        sparkRJobBuilder_;
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkR job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return Whether the sparkRJob field is set.
     */
    @java.lang.Override
    public boolean hasSparkRJob() {
      return jobTypeCase_ == 11;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkR job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The sparkRJob.
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.SparkRJob getSparkRJob() {
      if (sparkRJobBuilder_ == null) {
        if (jobTypeCase_ == 11) {
          return (com.google.cloud.dataproc.v1.SparkRJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
      } else {
        if (jobTypeCase_ == 11) {
          return sparkRJobBuilder_.getMessage();
        }
        return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkR job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setSparkRJob(com.google.cloud.dataproc.v1.SparkRJob value) {
      if (sparkRJobBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        jobType_ = value;
        onChanged();
      } else {
        sparkRJobBuilder_.setMessage(value);
      }
      jobTypeCase_ = 11;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkR job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setSparkRJob(com.google.cloud.dataproc.v1.SparkRJob.Builder builderForValue) {
      if (sparkRJobBuilder_ == null) {
        jobType_ = builderForValue.build();
        onChanged();
      } else {
        sparkRJobBuilder_.setMessage(builderForValue.build());
      }
      jobTypeCase_ = 11;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkR job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder mergeSparkRJob(com.google.cloud.dataproc.v1.SparkRJob value) {
      if (sparkRJobBuilder_ == null) {
        if (jobTypeCase_ == 11
            && jobType_ != com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance()) {
          jobType_ =
              com.google.cloud.dataproc.v1.SparkRJob.newBuilder(
                      (com.google.cloud.dataproc.v1.SparkRJob) jobType_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          jobType_ = value;
        }
        onChanged();
      } else {
        if (jobTypeCase_ == 11) {
          sparkRJobBuilder_.mergeFrom(value);
        } else {
          sparkRJobBuilder_.setMessage(value);
        }
      }
      jobTypeCase_ = 11;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkR job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder clearSparkRJob() {
      if (sparkRJobBuilder_ == null) {
        if (jobTypeCase_ == 11) {
          jobTypeCase_ = 0;
          jobType_ = null;
          onChanged();
        }
      } else {
        if (jobTypeCase_ == 11) {
          jobTypeCase_ = 0;
          jobType_ = null;
        }
        sparkRJobBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkR job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.SparkRJob.Builder getSparkRJobBuilder() {
      return getSparkRJobFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkR job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.SparkRJobOrBuilder getSparkRJobOrBuilder() {
      if ((jobTypeCase_ == 11) && (sparkRJobBuilder_ != null)) {
        return sparkRJobBuilder_.getMessageOrBuilder();
      } else {
        if (jobTypeCase_ == 11) {
          return (com.google.cloud.dataproc.v1.SparkRJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkR job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkRJob spark_r_job = 11 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.SparkRJob,
            com.google.cloud.dataproc.v1.SparkRJob.Builder,
            com.google.cloud.dataproc.v1.SparkRJobOrBuilder>
        getSparkRJobFieldBuilder() {
      if (sparkRJobBuilder_ == null) {
        if (!(jobTypeCase_ == 11)) {
          jobType_ = com.google.cloud.dataproc.v1.SparkRJob.getDefaultInstance();
        }
        sparkRJobBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.dataproc.v1.SparkRJob,
                com.google.cloud.dataproc.v1.SparkRJob.Builder,
                com.google.cloud.dataproc.v1.SparkRJobOrBuilder>(
                (com.google.cloud.dataproc.v1.SparkRJob) jobType_,
                getParentForChildren(),
                isClean());
        jobType_ = null;
      }
      jobTypeCase_ = 11;
      onChanged();
      return sparkRJobBuilder_;
    }

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.SparkSqlJob,
            com.google.cloud.dataproc.v1.SparkSqlJob.Builder,
            com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>
        sparkSqlJobBuilder_;
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkSql job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return Whether the sparkSqlJob field is set.
     */
    @java.lang.Override
    public boolean hasSparkSqlJob() {
      return jobTypeCase_ == 7;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkSql job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The sparkSqlJob.
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.SparkSqlJob getSparkSqlJob() {
      if (sparkSqlJobBuilder_ == null) {
        if (jobTypeCase_ == 7) {
          return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
      } else {
        if (jobTypeCase_ == 7) {
          return sparkSqlJobBuilder_.getMessage();
        }
        return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkSql job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value) {
      if (sparkSqlJobBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        jobType_ = value;
        onChanged();
      } else {
        sparkSqlJobBuilder_.setMessage(value);
      }
      jobTypeCase_ = 7;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkSql job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setSparkSqlJob(
        com.google.cloud.dataproc.v1.SparkSqlJob.Builder builderForValue) {
      if (sparkSqlJobBuilder_ == null) {
        jobType_ = builderForValue.build();
        onChanged();
      } else {
        sparkSqlJobBuilder_.setMessage(builderForValue.build());
      }
      jobTypeCase_ = 7;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkSql job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder mergeSparkSqlJob(com.google.cloud.dataproc.v1.SparkSqlJob value) {
      if (sparkSqlJobBuilder_ == null) {
        if (jobTypeCase_ == 7
            && jobType_ != com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance()) {
          jobType_ =
              com.google.cloud.dataproc.v1.SparkSqlJob.newBuilder(
                      (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          jobType_ = value;
        }
        onChanged();
      } else {
        if (jobTypeCase_ == 7) {
          sparkSqlJobBuilder_.mergeFrom(value);
        } else {
          sparkSqlJobBuilder_.setMessage(value);
        }
      }
      jobTypeCase_ = 7;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkSql job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder clearSparkSqlJob() {
      if (sparkSqlJobBuilder_ == null) {
        if (jobTypeCase_ == 7) {
          jobTypeCase_ = 0;
          jobType_ = null;
          onChanged();
        }
      } else {
        if (jobTypeCase_ == 7) {
          jobTypeCase_ = 0;
          jobType_ = null;
        }
        sparkSqlJobBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkSql job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.SparkSqlJob.Builder getSparkSqlJobBuilder() {
      return getSparkSqlJobFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkSql job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder getSparkSqlJobOrBuilder() {
      if ((jobTypeCase_ == 7) && (sparkSqlJobBuilder_ != null)) {
        return sparkSqlJobBuilder_.getMessageOrBuilder();
      } else {
        if (jobTypeCase_ == 7) {
          return (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a SparkSql job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.SparkSqlJob spark_sql_job = 7 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.SparkSqlJob,
            com.google.cloud.dataproc.v1.SparkSqlJob.Builder,
            com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>
        getSparkSqlJobFieldBuilder() {
      if (sparkSqlJobBuilder_ == null) {
        if (!(jobTypeCase_ == 7)) {
          jobType_ = com.google.cloud.dataproc.v1.SparkSqlJob.getDefaultInstance();
        }
        sparkSqlJobBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.dataproc.v1.SparkSqlJob,
                com.google.cloud.dataproc.v1.SparkSqlJob.Builder,
                com.google.cloud.dataproc.v1.SparkSqlJobOrBuilder>(
                (com.google.cloud.dataproc.v1.SparkSqlJob) jobType_,
                getParentForChildren(),
                isClean());
        jobType_ = null;
      }
      jobTypeCase_ = 7;
      onChanged();
      return sparkSqlJobBuilder_;
    }

    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.PrestoJob,
            com.google.cloud.dataproc.v1.PrestoJob.Builder,
            com.google.cloud.dataproc.v1.PrestoJobOrBuilder>
        prestoJobBuilder_;
    /**
     *
     *
     * <pre>
     * Optional. Job is a Presto job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return Whether the prestoJob field is set.
     */
    @java.lang.Override
    public boolean hasPrestoJob() {
      return jobTypeCase_ == 12;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Presto job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The prestoJob.
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.PrestoJob getPrestoJob() {
      if (prestoJobBuilder_ == null) {
        if (jobTypeCase_ == 12) {
          return (com.google.cloud.dataproc.v1.PrestoJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
      } else {
        if (jobTypeCase_ == 12) {
          return prestoJobBuilder_.getMessage();
        }
        return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Presto job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setPrestoJob(com.google.cloud.dataproc.v1.PrestoJob value) {
      if (prestoJobBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        jobType_ = value;
        onChanged();
      } else {
        prestoJobBuilder_.setMessage(value);
      }
      jobTypeCase_ = 12;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Presto job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setPrestoJob(com.google.cloud.dataproc.v1.PrestoJob.Builder builderForValue) {
      if (prestoJobBuilder_ == null) {
        jobType_ = builderForValue.build();
        onChanged();
      } else {
        prestoJobBuilder_.setMessage(builderForValue.build());
      }
      jobTypeCase_ = 12;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Presto job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder mergePrestoJob(com.google.cloud.dataproc.v1.PrestoJob value) {
      if (prestoJobBuilder_ == null) {
        if (jobTypeCase_ == 12
            && jobType_ != com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance()) {
          jobType_ =
              com.google.cloud.dataproc.v1.PrestoJob.newBuilder(
                      (com.google.cloud.dataproc.v1.PrestoJob) jobType_)
                  .mergeFrom(value)
                  .buildPartial();
        } else {
          jobType_ = value;
        }
        onChanged();
      } else {
        if (jobTypeCase_ == 12) {
          prestoJobBuilder_.mergeFrom(value);
        } else {
          prestoJobBuilder_.setMessage(value);
        }
      }
      jobTypeCase_ = 12;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Presto job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder clearPrestoJob() {
      if (prestoJobBuilder_ == null) {
        if (jobTypeCase_ == 12) {
          jobTypeCase_ = 0;
          jobType_ = null;
          onChanged();
        }
      } else {
        if (jobTypeCase_ == 12) {
          jobTypeCase_ = 0;
          jobType_ = null;
        }
        prestoJobBuilder_.clear();
      }
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Presto job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.PrestoJob.Builder getPrestoJobBuilder() {
      return getPrestoJobFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Presto job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    @java.lang.Override
    public com.google.cloud.dataproc.v1.PrestoJobOrBuilder getPrestoJobOrBuilder() {
      if ((jobTypeCase_ == 12) && (prestoJobBuilder_ != null)) {
        return prestoJobBuilder_.getMessageOrBuilder();
      } else {
        if (jobTypeCase_ == 12) {
          return (com.google.cloud.dataproc.v1.PrestoJob) jobType_;
        }
        return com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job is a Presto job.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.PrestoJob presto_job = 12 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.PrestoJob,
            com.google.cloud.dataproc.v1.PrestoJob.Builder,
            com.google.cloud.dataproc.v1.PrestoJobOrBuilder>
        getPrestoJobFieldBuilder() {
      if (prestoJobBuilder_ == null) {
        if (!(jobTypeCase_ == 12)) {
          jobType_ = com.google.cloud.dataproc.v1.PrestoJob.getDefaultInstance();
        }
        prestoJobBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.dataproc.v1.PrestoJob,
                com.google.cloud.dataproc.v1.PrestoJob.Builder,
                com.google.cloud.dataproc.v1.PrestoJobOrBuilder>(
                (com.google.cloud.dataproc.v1.PrestoJob) jobType_,
                getParentForChildren(),
                isClean());
        jobType_ = null;
      }
      jobTypeCase_ = 12;
      onChanged();
      return prestoJobBuilder_;
    }

    private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;

    private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
      if (labels_ == null) {
        return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
      }
      return labels_;
    }

    private com.google.protobuf.MapField<java.lang.String, java.lang.String>
        internalGetMutableLabels() {
      if (labels_ == null) {
        labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry);
      }
      if (!labels_.isMutable()) {
        labels_ = labels_.copy();
      }
      bitField0_ |= 0x00000200;
      onChanged();
      return labels_;
    }

    public int getLabelsCount() {
      return internalGetLabels().getMap().size();
    }
    /**
     *
     *
     * <pre>
     * Optional. The labels to associate with this job.
     * Label keys must be between 1 and 63 characters long, and must conform to
     * the following regular expression:
     * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
     * Label values must be between 1 and 63 characters long, and must conform to
     * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
     * No more than 32 labels can be associated with a given job.
     * </pre>
     *
     * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
     */
    @java.lang.Override
    public boolean containsLabels(java.lang.String key) {
      if (key == null) {
        throw new NullPointerException("map key");
      }
      return internalGetLabels().getMap().containsKey(key);
    }
    /** Use {@link #getLabelsMap()} instead. */
    @java.lang.Override
    @java.lang.Deprecated
    public java.util.Map<java.lang.String, java.lang.String> getLabels() {
      return getLabelsMap();
    }
    /**
     *
     *
     * <pre>
     * Optional. The labels to associate with this job.
     * Label keys must be between 1 and 63 characters long, and must conform to
     * the following regular expression:
     * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
     * Label values must be between 1 and 63 characters long, and must conform to
     * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
     * No more than 32 labels can be associated with a given job.
     * </pre>
     *
     * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
     */
    @java.lang.Override
    public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
      return internalGetLabels().getMap();
    }
    /**
     *
     *
     * <pre>
     * Optional. The labels to associate with this job.
     * Label keys must be between 1 and 63 characters long, and must conform to
     * the following regular expression:
     * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
     * Label values must be between 1 and 63 characters long, and must conform to
     * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
     * No more than 32 labels can be associated with a given job.
     * </pre>
     *
     * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
     */
    @java.lang.Override
    public /* nullable */ java.lang.String getLabelsOrDefault(
        java.lang.String key,
        /* nullable */
        java.lang.String defaultValue) {
      if (key == null) {
        throw new NullPointerException("map key");
      }
      java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
      return map.containsKey(key) ? map.get(key) : defaultValue;
    }
    /**
     *
     *
     * <pre>
     * Optional. The labels to associate with this job.
     * Label keys must be between 1 and 63 characters long, and must conform to
     * the following regular expression:
     * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
     * Label values must be between 1 and 63 characters long, and must conform to
     * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
     * No more than 32 labels can be associated with a given job.
     * </pre>
     *
     * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
     */
    @java.lang.Override
    public java.lang.String getLabelsOrThrow(java.lang.String key) {
      if (key == null) {
        throw new NullPointerException("map key");
      }
      java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
      if (!map.containsKey(key)) {
        throw new java.lang.IllegalArgumentException();
      }
      return map.get(key);
    }

    public Builder clearLabels() {
      bitField0_ = (bitField0_ & ~0x00000200);
      internalGetMutableLabels().getMutableMap().clear();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. The labels to associate with this job.
     * Label keys must be between 1 and 63 characters long, and must conform to
     * the following regular expression:
     * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
     * Label values must be between 1 and 63 characters long, and must conform to
     * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
     * No more than 32 labels can be associated with a given job.
     * </pre>
     *
     * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
     */
    public Builder removeLabels(java.lang.String key) {
      if (key == null) {
        throw new NullPointerException("map key");
      }
      internalGetMutableLabels().getMutableMap().remove(key);
      return this;
    }
    /** Use alternate mutation accessors instead. */
    @java.lang.Deprecated
    public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() {
      bitField0_ |= 0x00000200;
      return internalGetMutableLabels().getMutableMap();
    }
    /**
     *
     *
     * <pre>
     * Optional. The labels to associate with this job.
     * Label keys must be between 1 and 63 characters long, and must conform to
     * the following regular expression:
     * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
     * Label values must be between 1 and 63 characters long, and must conform to
     * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
     * No more than 32 labels can be associated with a given job.
     * </pre>
     *
     * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
     */
    public Builder putLabels(java.lang.String key, java.lang.String value) {
      if (key == null) {
        throw new NullPointerException("map key");
      }
      if (value == null) {
        throw new NullPointerException("map value");
      }
      internalGetMutableLabels().getMutableMap().put(key, value);
      bitField0_ |= 0x00000200;
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. The labels to associate with this job.
     * Label keys must be between 1 and 63 characters long, and must conform to
     * the following regular expression:
     * [&#92;p{Ll}&#92;p{Lo}][&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,62}
     * Label values must be between 1 and 63 characters long, and must conform to
     * the following regular expression: [&#92;p{Ll}&#92;p{Lo}&#92;p{N}_-]{0,63}
     * No more than 32 labels can be associated with a given job.
     * </pre>
     *
     * <code>map&lt;string, string&gt; labels = 8 [(.google.api.field_behavior) = OPTIONAL];</code>
     */
    public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) {
      internalGetMutableLabels().getMutableMap().putAll(values);
      bitField0_ |= 0x00000200;
      return this;
    }

    private com.google.cloud.dataproc.v1.JobScheduling scheduling_;
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.JobScheduling,
            com.google.cloud.dataproc.v1.JobScheduling.Builder,
            com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>
        schedulingBuilder_;
    /**
     *
     *
     * <pre>
     * Optional. Job scheduling configuration.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return Whether the scheduling field is set.
     */
    public boolean hasScheduling() {
      return ((bitField0_ & 0x00000400) != 0);
    }
    /**
     *
     *
     * <pre>
     * Optional. Job scheduling configuration.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The scheduling.
     */
    public com.google.cloud.dataproc.v1.JobScheduling getScheduling() {
      if (schedulingBuilder_ == null) {
        return scheduling_ == null
            ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
            : scheduling_;
      } else {
        return schedulingBuilder_.getMessage();
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job scheduling configuration.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setScheduling(com.google.cloud.dataproc.v1.JobScheduling value) {
      if (schedulingBuilder_ == null) {
        if (value == null) {
          throw new NullPointerException();
        }
        scheduling_ = value;
      } else {
        schedulingBuilder_.setMessage(value);
      }
      bitField0_ |= 0x00000400;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job scheduling configuration.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder setScheduling(
        com.google.cloud.dataproc.v1.JobScheduling.Builder builderForValue) {
      if (schedulingBuilder_ == null) {
        scheduling_ = builderForValue.build();
      } else {
        schedulingBuilder_.setMessage(builderForValue.build());
      }
      bitField0_ |= 0x00000400;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job scheduling configuration.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder mergeScheduling(com.google.cloud.dataproc.v1.JobScheduling value) {
      if (schedulingBuilder_ == null) {
        if (((bitField0_ & 0x00000400) != 0)
            && scheduling_ != null
            && scheduling_ != com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()) {
          getSchedulingBuilder().mergeFrom(value);
        } else {
          scheduling_ = value;
        }
      } else {
        schedulingBuilder_.mergeFrom(value);
      }
      bitField0_ |= 0x00000400;
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job scheduling configuration.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public Builder clearScheduling() {
      bitField0_ = (bitField0_ & ~0x00000400);
      scheduling_ = null;
      if (schedulingBuilder_ != null) {
        schedulingBuilder_.dispose();
        schedulingBuilder_ = null;
      }
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. Job scheduling configuration.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.JobScheduling.Builder getSchedulingBuilder() {
      bitField0_ |= 0x00000400;
      onChanged();
      return getSchedulingFieldBuilder().getBuilder();
    }
    /**
     *
     *
     * <pre>
     * Optional. Job scheduling configuration.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    public com.google.cloud.dataproc.v1.JobSchedulingOrBuilder getSchedulingOrBuilder() {
      if (schedulingBuilder_ != null) {
        return schedulingBuilder_.getMessageOrBuilder();
      } else {
        return scheduling_ == null
            ? com.google.cloud.dataproc.v1.JobScheduling.getDefaultInstance()
            : scheduling_;
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. Job scheduling configuration.
     * </pre>
     *
     * <code>
     * .google.cloud.dataproc.v1.JobScheduling scheduling = 9 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     */
    private com.google.protobuf.SingleFieldBuilderV3<
            com.google.cloud.dataproc.v1.JobScheduling,
            com.google.cloud.dataproc.v1.JobScheduling.Builder,
            com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>
        getSchedulingFieldBuilder() {
      if (schedulingBuilder_ == null) {
        schedulingBuilder_ =
            new com.google.protobuf.SingleFieldBuilderV3<
                com.google.cloud.dataproc.v1.JobScheduling,
                com.google.cloud.dataproc.v1.JobScheduling.Builder,
                com.google.cloud.dataproc.v1.JobSchedulingOrBuilder>(
                getScheduling(), getParentForChildren(), isClean());
        scheduling_ = null;
      }
      return schedulingBuilder_;
    }

    private com.google.protobuf.LazyStringList prerequisiteStepIds_ =
        com.google.protobuf.LazyStringArrayList.EMPTY;

    private void ensurePrerequisiteStepIdsIsMutable() {
      if (!((bitField0_ & 0x00000800) != 0)) {
        prerequisiteStepIds_ = new com.google.protobuf.LazyStringArrayList(prerequisiteStepIds_);
        bitField0_ |= 0x00000800;
      }
    }
    /**
     *
     *
     * <pre>
     * Optional. The optional list of prerequisite job step_ids.
     * If not specified, the job will start at the beginning of workflow.
     * </pre>
     *
     * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return A list containing the prerequisiteStepIds.
     */
    public com.google.protobuf.ProtocolStringList getPrerequisiteStepIdsList() {
      return prerequisiteStepIds_.getUnmodifiableView();
    }
    /**
     *
     *
     * <pre>
     * Optional. The optional list of prerequisite job step_ids.
     * If not specified, the job will start at the beginning of workflow.
     * </pre>
     *
     * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return The count of prerequisiteStepIds.
     */
    public int getPrerequisiteStepIdsCount() {
      return prerequisiteStepIds_.size();
    }
    /**
     *
     *
     * <pre>
     * Optional. The optional list of prerequisite job step_ids.
     * If not specified, the job will start at the beginning of workflow.
     * </pre>
     *
     * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @param index The index of the element to return.
     * @return The prerequisiteStepIds at the given index.
     */
    public java.lang.String getPrerequisiteStepIds(int index) {
      return prerequisiteStepIds_.get(index);
    }
    /**
     *
     *
     * <pre>
     * Optional. The optional list of prerequisite job step_ids.
     * If not specified, the job will start at the beginning of workflow.
     * </pre>
     *
     * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @param index The index of the value to return.
     * @return The bytes of the prerequisiteStepIds at the given index.
     */
    public com.google.protobuf.ByteString getPrerequisiteStepIdsBytes(int index) {
      return prerequisiteStepIds_.getByteString(index);
    }
    /**
     *
     *
     * <pre>
     * Optional. The optional list of prerequisite job step_ids.
     * If not specified, the job will start at the beginning of workflow.
     * </pre>
     *
     * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @param index The index to set the value at.
     * @param value The prerequisiteStepIds to set.
     * @return This builder for chaining.
     */
    public Builder setPrerequisiteStepIds(int index, java.lang.String value) {
      if (value == null) {
        throw new NullPointerException();
      }
      ensurePrerequisiteStepIdsIsMutable();
      prerequisiteStepIds_.set(index, value);
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. The optional list of prerequisite job step_ids.
     * If not specified, the job will start at the beginning of workflow.
     * </pre>
     *
     * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @param value The prerequisiteStepIds to add.
     * @return This builder for chaining.
     */
    public Builder addPrerequisiteStepIds(java.lang.String value) {
      if (value == null) {
        throw new NullPointerException();
      }
      ensurePrerequisiteStepIdsIsMutable();
      prerequisiteStepIds_.add(value);
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. The optional list of prerequisite job step_ids.
     * If not specified, the job will start at the beginning of workflow.
     * </pre>
     *
     * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @param values The prerequisiteStepIds to add.
     * @return This builder for chaining.
     */
    public Builder addAllPrerequisiteStepIds(java.lang.Iterable<java.lang.String> values) {
      ensurePrerequisiteStepIdsIsMutable();
      com.google.protobuf.AbstractMessageLite.Builder.addAll(values, prerequisiteStepIds_);
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. The optional list of prerequisite job step_ids.
     * If not specified, the job will start at the beginning of workflow.
     * </pre>
     *
     * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @return This builder for chaining.
     */
    public Builder clearPrerequisiteStepIds() {
      prerequisiteStepIds_ = com.google.protobuf.LazyStringArrayList.EMPTY;
      bitField0_ = (bitField0_ & ~0x00000800);
      onChanged();
      return this;
    }
    /**
     *
     *
     * <pre>
     * Optional. The optional list of prerequisite job step_ids.
     * If not specified, the job will start at the beginning of workflow.
     * </pre>
     *
     * <code>repeated string prerequisite_step_ids = 10 [(.google.api.field_behavior) = OPTIONAL];
     * </code>
     *
     * @param value The bytes of the prerequisiteStepIds to add.
     * @return This builder for chaining.
     */
    public Builder addPrerequisiteStepIdsBytes(com.google.protobuf.ByteString value) {
      if (value == null) {
        throw new NullPointerException();
      }
      checkByteStringIsUtf8(value);
      ensurePrerequisiteStepIdsIsMutable();
      prerequisiteStepIds_.add(value);
      onChanged();
      return this;
    }

    @java.lang.Override
    public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
      return super.setUnknownFields(unknownFields);
    }

    @java.lang.Override
    public final Builder mergeUnknownFields(
        final com.google.protobuf.UnknownFieldSet unknownFields) {
      return super.mergeUnknownFields(unknownFields);
    }

    // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.OrderedJob)
  }

  // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.OrderedJob)
  private static final com.google.cloud.dataproc.v1.OrderedJob DEFAULT_INSTANCE;

  static {
    DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.OrderedJob();
  }

  public static com.google.cloud.dataproc.v1.OrderedJob getDefaultInstance() {
    return DEFAULT_INSTANCE;
  }

  private static final com.google.protobuf.Parser<OrderedJob> PARSER =
      new com.google.protobuf.AbstractParser<OrderedJob>() {
        @java.lang.Override
        public OrderedJob parsePartialFrom(
            com.google.protobuf.CodedInputStream input,
            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
            throws com.google.protobuf.InvalidProtocolBufferException {
          Builder builder = newBuilder();
          try {
            builder.mergeFrom(input, extensionRegistry);
          } catch (com.google.protobuf.InvalidProtocolBufferException e) {
            throw e.setUnfinishedMessage(builder.buildPartial());
          } catch (com.google.protobuf.UninitializedMessageException e) {
            throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
          } catch (java.io.IOException e) {
            throw new com.google.protobuf.InvalidProtocolBufferException(e)
                .setUnfinishedMessage(builder.buildPartial());
          }
          return builder.buildPartial();
        }
      };

  public static com.google.protobuf.Parser<OrderedJob> parser() {
    return PARSER;
  }

  @java.lang.Override
  public com.google.protobuf.Parser<OrderedJob> getParserForType() {
    return PARSER;
  }

  @java.lang.Override
  public com.google.cloud.dataproc.v1.OrderedJob getDefaultInstanceForType() {
    return DEFAULT_INSTANCE;
  }
}
