org.apache.hadoop.hive.ql.plan.api.Query.<init>()方法的使用及代码示例

x33g5p2x  于2022-01-28 转载在 其他  
字(9.3k)|赞(0)|评价(0)|浏览(148)

本文整理了Java中org.apache.hadoop.hive.ql.plan.api.Query.<init>方法的一些代码示例,展示了Query.<init>的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Query.<init>方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.plan.api.Query
类名称:Query
方法名:<init>

Query.<init>介绍

[英]Performs a deep copy on other.
[中]在其他计算机上执行深度复制。

代码示例

代码示例来源:origin: apache/hive

public Query deepCopy() {
 return new Query(this);
}

代码示例来源:origin: apache/drill

public Query deepCopy() {
 return new Query(this);
}

代码示例来源:origin: apache/hive

/**
 * Performs a deep copy on <i>other</i>.
 */
public QueryPlan(QueryPlan other) {
 __isset_bitfield = other.__isset_bitfield;
 if (other.isSetQueries()) {
  List<Query> __this__queries = new ArrayList<Query>(other.queries.size());
  for (Query other_element : other.queries) {
   __this__queries.add(new Query(other_element));
  }
  this.queries = __this__queries;
 }
 this.done = other.done;
 this.started = other.started;
}

代码示例来源:origin: apache/drill

/**
 * Performs a deep copy on <i>other</i>.
 */
public QueryPlan(QueryPlan other) {
 __isset_bitfield = other.__isset_bitfield;
 if (other.isSetQueries()) {
  List<Query> __this__queries = new ArrayList<Query>(other.queries.size());
  for (Query other_element : other.queries) {
   __this__queries.add(new Query(other_element));
  }
  this.queries = __this__queries;
 }
 this.done = other.done;
 this.started = other.started;
}

代码示例来源:origin: apache/hive

for (int _i130 = 0; _i130 < _list128.size; ++_i130)
 _elem129 = new Query();
 _elem129.read(iprot);
 struct.queries.add(_elem129);

代码示例来源:origin: apache/hive

@Override
 public void read(org.apache.thrift.protocol.TProtocol prot, QueryPlan struct) throws org.apache.thrift.TException {
  TTupleProtocol iprot = (TTupleProtocol) prot;
  BitSet incoming = iprot.readBitSet(3);
  if (incoming.get(0)) {
   {
    org.apache.thrift.protocol.TList _list133 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
    struct.queries = new ArrayList<Query>(_list133.size);
    Query _elem134;
    for (int _i135 = 0; _i135 < _list133.size; ++_i135)
    {
     _elem134 = new Query();
     _elem134.read(iprot);
     struct.queries.add(_elem134);
    }
   }
   struct.setQueriesIsSet(true);
  }
  if (incoming.get(1)) {
   struct.done = iprot.readBool();
   struct.setDoneIsSet(true);
  }
  if (incoming.get(2)) {
   struct.started = iprot.readBool();
   struct.setStartedIsSet(true);
  }
 }
}

代码示例来源:origin: apache/drill

@Override
 public void read(org.apache.thrift.protocol.TProtocol prot, QueryPlan struct) throws org.apache.thrift.TException {
  TTupleProtocol iprot = (TTupleProtocol) prot;
  BitSet incoming = iprot.readBitSet(3);
  if (incoming.get(0)) {
   {
    org.apache.thrift.protocol.TList _list133 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
    struct.queries = new ArrayList<Query>(_list133.size);
    Query _elem134;
    for (int _i135 = 0; _i135 < _list133.size; ++_i135)
    {
     _elem134 = new Query();
     _elem134.read(iprot);
     struct.queries.add(_elem134);
    }
   }
   struct.setQueriesIsSet(true);
  }
  if (incoming.get(1)) {
   struct.done = iprot.readBool();
   struct.setDoneIsSet(true);
  }
  if (incoming.get(2)) {
   struct.started = iprot.readBool();
   struct.setStartedIsSet(true);
  }
 }
}

代码示例来源:origin: apache/drill

for (int _i130 = 0; _i130 < _list128.size; ++_i130)
 _elem129 = new Query();
 _elem129.read(iprot);
 struct.queries.add(_elem129);

代码示例来源:origin: apache/drill

public QueryPlan(String queryString, BaseSemanticAnalyzer sem, Long startTime, String queryId,
        HiveOperation operation, Schema resultSchema) {
 this.queryString = queryString;
 rootTasks = new ArrayList<Task<? extends Serializable>>(sem.getAllRootTasks());
 reducerTimeStatsPerJobList = new ArrayList<ReducerTimeStatsPerJob>();
 fetchTask = sem.getFetchTask();
 // Note that inputs and outputs can be changed when the query gets executed
 inputs = sem.getAllInputs();
 outputs = sem.getAllOutputs();
 linfo = sem.getLineageInfo();
 tableAccessInfo = sem.getTableAccessInfo();
 columnAccessInfo = sem.getColumnAccessInfo();
 idToTableNameMap = new HashMap<String, String>(sem.getIdToTableNameMap());
 this.queryId = queryId == null ? makeQueryId() : queryId;
 query = new org.apache.hadoop.hive.ql.plan.api.Query();
 query.setQueryId(this.queryId);
 query.putToQueryAttributes("queryString", this.queryString);
 queryProperties = sem.getQueryProperties();
 queryStartTime = startTime;
 this.operation = operation;
 this.autoCommitValue = sem.getAutoCommitValue();
 this.resultSchema = resultSchema;
}

代码示例来源:origin: apache/hive

public QueryPlan(String queryString, BaseSemanticAnalyzer sem, Long startTime, String queryId,
        HiveOperation operation, Schema resultSchema) {
 this.queryString = queryString;
 rootTasks = new ArrayList<Task<? extends Serializable>>(sem.getAllRootTasks());
 reducerTimeStatsPerJobList = new ArrayList<ReducerTimeStatsPerJob>();
 fetchTask = sem.getFetchTask();
 // Note that inputs and outputs can be changed when the query gets executed
 inputs = sem.getAllInputs();
 outputs = sem.getAllOutputs();
 linfo = sem.getLineageInfo();
 tableAccessInfo = sem.getTableAccessInfo();
 columnAccessInfo = sem.getColumnAccessInfo();
 idToTableNameMap = new HashMap<String, String>(sem.getIdToTableNameMap());
 this.queryId = queryId == null ? makeQueryId() : queryId;
 query = new org.apache.hadoop.hive.ql.plan.api.Query();
 query.setQueryId(this.queryId);
 query.putToQueryAttributes("queryString", this.queryString);
 queryProperties = sem.getQueryProperties();
 queryStartTime = startTime;
 this.operation = operation;
 this.autoCommitValue = sem.getAutoCommitValue();
 this.resultSchema = resultSchema;
 // TODO: all this ACID stuff should be in some sub-object
 this.acidResourcesInQuery = sem.hasTransactionalInQuery();
 this.acidSinks = sem.getAcidFileSinks();
 this.acidDdlDesc = sem.getAcidDdlDesc();
 this.acidAnalyzeTable = sem.getAcidAnalyzeTable();
 this.cboInfo = sem.getCboInfo();
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public Query deepCopy() {
 return new Query(this);
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

public Query deepCopy() {
 return new Query(this);
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

/**
 * Performs a deep copy on <i>other</i>.
 */
public QueryPlan(QueryPlan other) {
 __isset_bit_vector.clear();
 __isset_bit_vector.or(other.__isset_bit_vector);
 if (other.isSetQueries()) {
  List<Query> __this__queries = new ArrayList<Query>();
  for (Query other_element : other.queries) {
   __this__queries.add(new Query(other_element));
  }
  this.queries = __this__queries;
 }
 this.done = other.done;
 this.started = other.started;
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

/**
 * Performs a deep copy on <i>other</i>.
 */
public QueryPlan(QueryPlan other) {
 __isset_bitfield = other.__isset_bitfield;
 if (other.isSetQueries()) {
  List<Query> __this__queries = new ArrayList<Query>();
  for (Query other_element : other.queries) {
   __this__queries.add(new Query(other_element));
  }
  this.queries = __this__queries;
 }
 this.done = other.done;
 this.started = other.started;
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

public QueryPlan(String queryString, BaseSemanticAnalyzer sem) {
 this.queryString = queryString;
 rootTasks = new ArrayList<Task<? extends Serializable>>();
 rootTasks.addAll(sem.getRootTasks());
 fetchTask = sem.getFetchTask();
 // Note that inputs and outputs can be changed when the query gets executed
 inputs = sem.getInputs();
 outputs = sem.getOutputs();
 linfo = sem.getLineageInfo();
 idToTableNameMap = new HashMap<String, String>(sem.getIdToTableNameMap());
 queryId = makeQueryId();
 query = new org.apache.hadoop.hive.ql.plan.api.Query();
 query.setQueryId(queryId);
 query.putToQueryAttributes("queryString", this.queryString);
 counters = new HashMap<String, HashMap<String, Long>>();
 done = new HashSet<String>();
 started = new HashSet<String>();
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

_elem66 = new Query();
_elem66.read(iprot);
this.queries.add(_elem66);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

_elem130 = new Query();
_elem130.read(iprot);
struct.queries.add(_elem130);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

@Override
 public void read(org.apache.thrift.protocol.TProtocol prot, QueryPlan struct) throws org.apache.thrift.TException {
  TTupleProtocol iprot = (TTupleProtocol) prot;
  BitSet incoming = iprot.readBitSet(3);
  if (incoming.get(0)) {
   {
    org.apache.thrift.protocol.TList _list133 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
    struct.queries = new ArrayList<Query>(_list133.size);
    for (int _i134 = 0; _i134 < _list133.size; ++_i134)
    {
     Query _elem135; // required
     _elem135 = new Query();
     _elem135.read(iprot);
     struct.queries.add(_elem135);
    }
   }
   struct.setQueriesIsSet(true);
  }
  if (incoming.get(1)) {
   struct.done = iprot.readBool();
   struct.setDoneIsSet(true);
  }
  if (incoming.get(2)) {
   struct.started = iprot.readBool();
   struct.setStartedIsSet(true);
  }
 }
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public QueryPlan(String queryString, BaseSemanticAnalyzer sem, Long startTime, String queryId,
  String operationName) {
 this.queryString = queryString;
 rootTasks = new ArrayList<Task<? extends Serializable>>();
 this.reducerTimeStatsPerJobList = new ArrayList<ReducerTimeStatsPerJob>();
 rootTasks.addAll(sem.getRootTasks());
 fetchTask = sem.getFetchTask();
 // Note that inputs and outputs can be changed when the query gets executed
 inputs = sem.getInputs();
 outputs = sem.getOutputs();
 linfo = sem.getLineageInfo();
 tableAccessInfo = sem.getTableAccessInfo();
 columnAccessInfo = sem.getColumnAccessInfo();
 idToTableNameMap = new HashMap<String, String>(sem.getIdToTableNameMap());
 this.queryId = queryId == null ? makeQueryId() : queryId;
 query = new org.apache.hadoop.hive.ql.plan.api.Query();
 query.setQueryId(this.queryId);
 query.putToQueryAttributes("queryString", this.queryString);
 queryProperties = sem.getQueryProperties();
 queryStartTime = startTime;
 this.operationName = operationName;
}

相关文章