org.apache.hadoop.hive.ql.metadata.Hive.getCurrentDatabase()方法的使用及代码示例

x33g5p2x  于2022-01-20 转载在 其他  
字(10.6k)|赞(0)|评价(0)|浏览(267)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Hive.getCurrentDatabase()方法的一些代码示例,展示了Hive.getCurrentDatabase()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Hive.getCurrentDatabase()方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Hive
类名称:Hive
方法名:getCurrentDatabase

Hive.getCurrentDatabase介绍

[英]Get the name of the current database
[中]获取当前数据库的名称

代码示例

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

/**
 * Get all table names for the current database.
 * @return List of table names
 * @throws HiveException
 */
public List<String> getAllTables() throws HiveException {
 return getAllTables(getCurrentDatabase());
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

/**
 * Returns all existing tables from default database which match the given
 * pattern. The matching occurs as per Java regular expressions
 *
 * @param tablePattern
 *          java re pattern
 * @return list of table names
 * @throws HiveException
 */
public List<String> getTablesByPattern(String tablePattern) throws HiveException {
 return getTablesByPattern(getCurrentDatabase(), tablePattern);
}

代码示例来源:origin: edu.berkeley.cs.shark/hive-cli

/**
  * Return the name of the current database
  * @return the name of the current database or, if an error, null
  */
 public String getCurrentDbName() {
  if (hive == null) {
   try {
    hive = Hive.get(conf);
   } catch (HiveException e) {
    return null;
   }
  }
  return hive.getCurrentDatabase();
 }
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private int dropIndex(Hive db, DropIndexDesc dropIdx) throws HiveException {
 db.dropIndex(db.getCurrentDatabase(), dropIdx.getTableName(),
   dropIdx.getIndexName(), true);
 return 0;
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void addInputsOutputsAlterTable(String tableName, HashMap<String, String> partSpec)
 throws SemanticException {
 try {
  Table tab = db.getTable(db.getCurrentDatabase(), tableName, false);
  if (tab != null) {
   inputs.add(new ReadEntity(tab));
   if ((partSpec == null) || (partSpec.isEmpty())) {
    outputs.add(new WriteEntity(tab));
   }
   else {
    Partition part = db.getPartition(tab, partSpec, false);
    if (part != null) {
     outputs.add(new WriteEntity(part));
    }
   }
  }
 } catch (HiveException e) {
  throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
 }
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

public Index getIndex(String qualifiedIndexName) throws HiveException {
 String[] names = getQualifiedNames(qualifiedIndexName);
 switch (names.length) {
 case 3:
  return getIndex(names[0], names[1], names[2]);
 case 2:
  return getIndex(getCurrentDatabase(), names[0], names[1]);
 default:
  throw new HiveException("Invalid index name:" + qualifiedIndexName);
 }
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

public Table newTable(String tableName) throws HiveException {
 String[] names = getQualifiedNames(tableName);
 switch (names.length) {
 case 2:
  return new Table(names[0], names[1]);
 case 1:
  return new Table(getCurrentDatabase(), names[0]);
 default:
  try{
   throw new HiveException("Invalid table name: " + tableName);
  }catch(Exception e) {
   e.printStackTrace();
  }
  throw new HiveException("Invalid table name: " + tableName);
 }
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

tab = db.getTable(db.getCurrentDatabase(), tblName, false);
if (tab != null) {
 inputs.add(new ReadEntity(tab));
  db.getCurrentDatabase(), tblName, null,
  AlterTableDesc.AlterTableTypes.TOUCH);
outputs.add(new WriteEntity(tab));
for (Map<String, String> partSpec : partSpecs) {
 AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc(
   db.getCurrentDatabase(), tblName, partSpec,
   AlterTableDesc.AlterTableTypes.TOUCH);
 rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void analyzeAlterTableRename(ASTNode ast) throws SemanticException {
 String tblName = getUnescapedName((ASTNode)ast.getChild(0));
 AlterTableDesc alterTblDesc = new AlterTableDesc(tblName,
   getUnescapedName((ASTNode)ast.getChild(1)));
 try {
  Table tab = db.getTable(db.getCurrentDatabase(), tblName, false);
  if (tab != null) {
   inputs.add(new ReadEntity(tab));
   outputs.add(new WriteEntity(tab));
  }
 } catch (HiveException e) {
  throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
 }
 rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
   alterTblDesc), conf));
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void analyzeAlterTableModifyCols(ASTNode ast,
  AlterTableTypes alterType) throws SemanticException {
 String tblName = getUnescapedName((ASTNode)ast.getChild(0));
 List<FieldSchema> newCols = getColumns((ASTNode) ast.getChild(1));
 AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, newCols,
   alterType);
 try {
  Table tab = db.getTable(db.getCurrentDatabase(), tblName, false);
  if (tab != null) {
   inputs.add(new ReadEntity(tab));
   outputs.add(new WriteEntity(tab));
  }
 } catch (HiveException e) {
  throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
 }
 rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
   alterTblDesc), conf));
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

Table tab = db.getTable(db.getCurrentDatabase(), tblName, false);
if (tab != null) {
 inputs.add(new ReadEntity(tab));
  db.getCurrentDatabase(), tblName, partSpec,
  (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

Table tab = db.getTable(db.getCurrentDatabase(), tableName, false);
if (tab != null) {
 inputs.add(new ReadEntity(tab));

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void analyzeDropTable(ASTNode ast, boolean expectView)
  throws SemanticException {
 String tableName = getUnescapedName((ASTNode)ast.getChild(0));
 boolean ifExists = (ast.getFirstChildWithType(TOK_IFEXISTS) != null);
 // we want to signal an error if the table/view doesn't exist and we're
 // configured not to fail silently
 boolean throwException =
  !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
 try {
  Table tab = db.getTable(db.getCurrentDatabase(), tableName, throwException);
  if (tab != null) {
   inputs.add(new ReadEntity(tab));
   outputs.add(new WriteEntity(tab));
  }
 } catch (HiveException e) {
  throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
 }
 DropTableDesc dropTblDesc = new DropTableDesc(tableName, expectView);
 rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
   dropTblDesc), conf));
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void analyzeAlterTableProps(ASTNode ast, boolean expectView)
 throws SemanticException {
 String tableName = getUnescapedName((ASTNode)ast.getChild(0));
 HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1))
   .getChild(0));
 AlterTableDesc alterTblDesc =
  new AlterTableDesc(AlterTableTypes.ADDPROPS, expectView);
 alterTblDesc.setProps(mapProp);
 alterTblDesc.setOldName(tableName);
 try {
  Table tab = db.getTable(db.getCurrentDatabase(), tableName, false);
  if (tab != null) {
   inputs.add(new ReadEntity(tab));
   outputs.add(new WriteEntity(tab));
  }
 } catch (HiveException e) {
  throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
 }
 rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
   alterTblDesc), conf));
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void analyzeAlterIndexProps(ASTNode ast)
 throws SemanticException {
 String baseTableName = getUnescapedName((ASTNode)ast.getChild(0));
 String indexName = unescapeIdentifier(ast.getChild(1).getText());
 HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(2))
   .getChild(0));
 AlterIndexDesc alterIdxDesc =
  new AlterIndexDesc(AlterIndexTypes.ADDPROPS);
 alterIdxDesc.setProps(mapProp);
 alterIdxDesc.setIndexName(indexName);
 alterIdxDesc.setBaseTableName(baseTableName);
 alterIdxDesc.setDbName(db.getCurrentDatabase());
 rootTasks.add(TaskFactory.get(new DDLWork(alterIdxDesc), conf));
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private List<Task<?>> getIndexBuilderMapRed(String baseTableName, String indexName,
  HashMap<String, String> partSpec) throws SemanticException {
 try {
  String dbName = db.getCurrentDatabase();
  Index index = db.getIndex(dbName, baseTableName, indexName);
  Table indexTbl = db.getTable(dbName, index.getIndexTableName());
  String baseTblName = index.getOrigTableName();
  Table baseTbl = db.getTable(dbName, baseTblName);
  String handlerCls = index.getIndexHandlerClass();
  HiveIndexHandler handler = HiveUtils.getIndexHandler(conf, handlerCls);
  List<Partition> indexTblPartitions = null;
  List<Partition> baseTblPartitions = null;
  if(indexTbl != null) {
   indexTblPartitions = new ArrayList<Partition>();
   baseTblPartitions = preparePartitions(baseTbl, partSpec,
     indexTbl, db, indexTblPartitions);
  }
  List<Task<?>> ret = handler.generateIndexBuildTaskList(baseTbl,
    index, indexTblPartitions, baseTblPartitions, indexTbl, getInputs(), getOutputs());
  return ret;
 } catch (Exception e) {
  throw new SemanticException(e);
 }
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void analyzeAlterTableDropParts(ASTNode ast, boolean expectView)
 throws SemanticException {
 String tblName = getUnescapedName((ASTNode)ast.getChild(0));
 // get table metadata
 List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
 DropTableDesc dropTblDesc =
  new DropTableDesc(tblName, partSpecs, expectView);
 try {
  Table tab = db.getTable(db.getCurrentDatabase(), tblName, false);
  if (tab != null) {
   inputs.add(new ReadEntity(tab));
  }
 } catch (HiveException e) {
  throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
 }
 if (partSpecs != null) {
  boolean ifExists = (ast.getFirstChildWithType(TOK_IFEXISTS) != null);
  // we want to signal an error if the partition doesn't exist and we're
  // configured not to fail silently
  boolean throwException =
   !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
  addTablePartsOutputs(tblName, partSpecs, throwException, ast);
 }
 rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
   dropTblDesc), conf));
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

try {
 if (tbl.getDbName() == null || "".equals(tbl.getDbName().trim())) {
  tbl.setDbName(getCurrentDatabase());

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void analyzeShowTableStatus(ASTNode ast) throws SemanticException {
 ShowTableStatusDesc showTblStatusDesc;
 String tableNames = getUnescapedName((ASTNode)ast.getChild(0));
 String dbName = db.getCurrentDatabase();
 int children = ast.getChildCount();
 HashMap<String, String> partSpec = null;
 if (children >= 2) {
  if (children > 3) {
   throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg());
  }
  for (int i = 1; i < children; i++) {
   ASTNode child = (ASTNode) ast.getChild(i);
   if (child.getToken().getType() == HiveParser.Identifier) {
    dbName = unescapeIdentifier(child.getText());
   } else if (child.getToken().getType() == HiveParser.TOK_PARTSPEC) {
    partSpec = getPartSpec(child);
   } else {
    throw new SemanticException(ErrorMsg.GENERIC_ERROR.getMsg());
   }
  }
 }
 showTblStatusDesc = new ShowTableStatusDesc(ctx.getResFile().toString(), dbName,
   tableNames, partSpec);
 rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
   showTblStatusDesc), conf));
 setFetchTask(createFetchTask(showTblStatusDesc.getSchema()));
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

private void analyzeShowTables(ASTNode ast) throws SemanticException {
 ShowTablesDesc showTblsDesc;
 String dbName = db.getCurrentDatabase();
 String tableNames = null;

相关文章

Hive类方法