本文整理了Java中org.apache.hadoop.hive.ql.plan.api.Query.setQueryId
方法的一些代码示例,展示了Query.setQueryId
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Query.setQueryId
方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.plan.api.Query
类名称:Query
方法名:setQueryId
暂无
代码示例来源:origin: apache/drill
public QueryPlan(String queryString, BaseSemanticAnalyzer sem, Long startTime, String queryId,
HiveOperation operation, Schema resultSchema) {
this.queryString = queryString;
rootTasks = new ArrayList<Task<? extends Serializable>>(sem.getAllRootTasks());
reducerTimeStatsPerJobList = new ArrayList<ReducerTimeStatsPerJob>();
fetchTask = sem.getFetchTask();
// Note that inputs and outputs can be changed when the query gets executed
inputs = sem.getAllInputs();
outputs = sem.getAllOutputs();
linfo = sem.getLineageInfo();
tableAccessInfo = sem.getTableAccessInfo();
columnAccessInfo = sem.getColumnAccessInfo();
idToTableNameMap = new HashMap<String, String>(sem.getIdToTableNameMap());
this.queryId = queryId == null ? makeQueryId() : queryId;
query = new org.apache.hadoop.hive.ql.plan.api.Query();
query.setQueryId(this.queryId);
query.putToQueryAttributes("queryString", this.queryString);
queryProperties = sem.getQueryProperties();
queryStartTime = startTime;
this.operation = operation;
this.autoCommitValue = sem.getAutoCommitValue();
this.resultSchema = resultSchema;
}
代码示例来源:origin: apache/hive
public QueryPlan(String queryString, BaseSemanticAnalyzer sem, Long startTime, String queryId,
HiveOperation operation, Schema resultSchema) {
this.queryString = queryString;
rootTasks = new ArrayList<Task<? extends Serializable>>(sem.getAllRootTasks());
reducerTimeStatsPerJobList = new ArrayList<ReducerTimeStatsPerJob>();
fetchTask = sem.getFetchTask();
// Note that inputs and outputs can be changed when the query gets executed
inputs = sem.getAllInputs();
outputs = sem.getAllOutputs();
linfo = sem.getLineageInfo();
tableAccessInfo = sem.getTableAccessInfo();
columnAccessInfo = sem.getColumnAccessInfo();
idToTableNameMap = new HashMap<String, String>(sem.getIdToTableNameMap());
this.queryId = queryId == null ? makeQueryId() : queryId;
query = new org.apache.hadoop.hive.ql.plan.api.Query();
query.setQueryId(this.queryId);
query.putToQueryAttributes("queryString", this.queryString);
queryProperties = sem.getQueryProperties();
queryStartTime = startTime;
this.operation = operation;
this.autoCommitValue = sem.getAutoCommitValue();
this.resultSchema = resultSchema;
// TODO: all this ACID stuff should be in some sub-object
this.acidResourcesInQuery = sem.hasTransactionalInQuery();
this.acidSinks = sem.getAcidFileSinks();
this.acidDdlDesc = sem.getAcidDdlDesc();
this.acidAnalyzeTable = sem.getAcidAnalyzeTable();
this.cboInfo = sem.getCboInfo();
}
代码示例来源:origin: apache/hive
unsetQueryId();
} else {
setQueryId((String)value);
代码示例来源:origin: apache/drill
unsetQueryId();
} else {
setQueryId((String)value);
代码示例来源:origin: org.apache.hadoop.hive/hive-exec
public QueryPlan(String queryString, BaseSemanticAnalyzer sem) {
this.queryString = queryString;
rootTasks = new ArrayList<Task<? extends Serializable>>();
rootTasks.addAll(sem.getRootTasks());
fetchTask = sem.getFetchTask();
// Note that inputs and outputs can be changed when the query gets executed
inputs = sem.getInputs();
outputs = sem.getOutputs();
linfo = sem.getLineageInfo();
idToTableNameMap = new HashMap<String, String>(sem.getIdToTableNameMap());
queryId = makeQueryId();
query = new org.apache.hadoop.hive.ql.plan.api.Query();
query.setQueryId(queryId);
query.putToQueryAttributes("queryString", this.queryString);
counters = new HashMap<String, HashMap<String, Long>>();
done = new HashSet<String>();
started = new HashSet<String>();
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
public QueryPlan(String queryString, BaseSemanticAnalyzer sem, Long startTime, String queryId,
String operationName) {
this.queryString = queryString;
rootTasks = new ArrayList<Task<? extends Serializable>>();
this.reducerTimeStatsPerJobList = new ArrayList<ReducerTimeStatsPerJob>();
rootTasks.addAll(sem.getRootTasks());
fetchTask = sem.getFetchTask();
// Note that inputs and outputs can be changed when the query gets executed
inputs = sem.getInputs();
outputs = sem.getOutputs();
linfo = sem.getLineageInfo();
tableAccessInfo = sem.getTableAccessInfo();
columnAccessInfo = sem.getColumnAccessInfo();
idToTableNameMap = new HashMap<String, String>(sem.getIdToTableNameMap());
this.queryId = queryId == null ? makeQueryId() : queryId;
query = new org.apache.hadoop.hive.ql.plan.api.Query();
query.setQueryId(this.queryId);
query.putToQueryAttributes("queryString", this.queryString);
queryProperties = sem.getQueryProperties();
queryStartTime = startTime;
this.operationName = operationName;
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
unsetQueryId();
} else {
setQueryId((String)value);
代码示例来源:origin: org.apache.hadoop.hive/hive-exec
unsetQueryId();
} else {
setQueryId((String)value);
内容来源于网络,如有侵权,请联系作者删除!