org.apache.hadoop.hive.ql.metadata.Hive类的使用及代码示例

x33g5p2x  于2022-01-20 转载在 其他  
字(9.1k)|赞(0)|评价(0)|浏览(380)

本文整理了Java中org.apache.hadoop.hive.ql.metadata.Hive类的一些代码示例,展示了Hive类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Hive类的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Hive
类名称:Hive

Hive介绍

[英]This class has functions that implement meta data/DDL operations using calls to the metastore. It has a metastore client instance it uses to communicate with the metastore. It is a thread local variable, and the instances is accessed using static get methods in this class.
[中]此类具有使用对metastore的调用实现元数据/DDL操作的函数。它有一个metastore客户端实例,用于与metastore通信。它是一个线程局部变量,使用此类中的静态get方法访问实例。

代码示例

代码示例来源:origin: apache/hive

@Override
 public void run() {
  try {
   RuntimeStat rec = encode(map);
   Hive.get().getMSC().addRuntimeStat(rec);
  } catch (TException | HiveException | IOException e) {
   logException("Exception while persisting runtime stat", e);
  }
 }
}

代码示例来源:origin: apache/hive

/**
 * Returns metadata for the table named tableName
 * @param tableName the name of the table
 * @return the table metadata
 * @throws HiveException if there's an internal error or if the
 * table doesn't exist
 */
public Table getTable(final String tableName) throws HiveException {
 return this.getTable(tableName, true);
}

代码示例来源:origin: apache/hive

/**
 * Drop a database
 * @param name
 * @param deleteData
 * @param ignoreUnknownDb if true, will ignore NoSuchObjectException
 * @throws HiveException
 * @throws NoSuchObjectException
 */
public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb)
  throws HiveException, NoSuchObjectException {
 dropDatabase(name, deleteData, ignoreUnknownDb, false);
}

代码示例来源:origin: apache/hive

HiveUpdater(HiveConf conf, boolean fileRename) throws HiveException {
 hive = Hive.get(conf);
 Hive.set(hive);
 doFileRename = fileRename;
}

代码示例来源:origin: apache/hive

private ImmutableMap<String, Long> dumpMetaCallTimingWithoutEx(String phase) {
 try {
  return Hive.get().dumpAndClearMetaCallTiming(phase);
 } catch (HiveException he) {
  LOG.warn("Caught exception attempting to write metadata call information " + he, he);
 }
 return null;
}

代码示例来源:origin: apache/hive

private static Hive getInternal(HiveConf c, boolean needsRefresh, boolean isFastCheck,
  boolean doRegisterAllFns) throws HiveException {
 Hive db = hiveDB.get();
 if (db == null || !db.isCurrentUserOwner() || needsRefresh
   || (c != null && !isCompatible(db, c, isFastCheck))) {
  if (db != null) {
   LOG.debug("Creating new db. db = " + db + ", needsRefresh = " + needsRefresh +
       ", db.isCurrentUserOwner = " + db.isCurrentUserOwner());
   closeCurrent();
  }
  db = create(c, doRegisterAllFns);
 }
 if (c != null) {
  db.conf = c;
 }
 return db;
}

代码示例来源:origin: apache/hive

@Test
public void testDataDeletion() throws HiveException,
 IOException, TException {
 Database db = new Database();
 db.setName(dbName);
 hive.createDatabase(db);
 Table table = new Table(dbName, tableName);
 table.setDbName(dbName);
 table.setInputFormatClass(TextInputFormat.class);
 table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
 table.setPartCols(partCols);
 hive.createTable(table);
 table = hive.getTable(dbName, tableName);
 Path fakeTable = table.getPath().getParent().suffix(
   Path.SEPARATOR + "faketable");
 fs = fakeTable.getFileSystem(hive.getConf());
 fs.mkdirs(fakeTable);
 fs.deleteOnExit(fakeTable);
 Path fakePart = new Path(table.getDataLocation().toString(),
   "fakepartition=fakevalue");
 fs.mkdirs(fakePart);
 fs.deleteOnExit(fakePart);
 hive.dropTable(dbName, tableName, true, true);
 assertFalse(fs.exists(fakePart));
 hive.dropDatabase(dbName);
 assertFalse(fs.exists(fakeTable));
}

代码示例来源:origin: apache/hive

@BeforeClass
public static void init() throws Exception {
 queryState = new QueryState.Builder().build();
 conf = queryState.getConf();
 conf
 .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
   "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
 SessionState.start(conf);
 // Create a table so we can work against it
 Hive h = Hive.get(conf);
 List<String> cols = new ArrayList<String>();
 cols.add("a");
 List<String> partCols = new ArrayList<String>();
 partCols.add("ds");
 h.createTable("foo", cols, partCols, OrcInputFormat.class, OrcOutputFormat.class);
 Table t = h.getTable("foo");
 Map<String, String> partSpec = new HashMap<String, String>();
 partSpec.put("ds", "today");
 h.createPartition(t, partSpec);
}

代码示例来源:origin: apache/hive

@Override
public Hive getHive() throws HiveException {
 return Hive.get((HiveConf)conf);
}

代码示例来源:origin: apache/hive

private IMetaStoreClient getMetaStoreClient(boolean retryInCaseOfTokenExpiration) throws HiveSQLException {
 try {
  return Hive.get(getHiveConf()).getMSC();
 } catch (HiveException e) {
  throw new HiveSQLException("Failed to get metastore connection: " + e, e);
 } catch(MetaException e1) {
  if (hmsDelegationTokenStr != null && retryInCaseOfTokenExpiration) {
   LOG.info("Retrying failed metastore connection: " + e1, e1);
   Hive.closeCurrent();
   try {
    setDelegationToken(Hive.get(getHiveConf()).getDelegationToken(sessionUgi.getUserName(), getUserName()));
   } catch (HiveException e2) {
    throw new HiveSQLException("Error connect metastore to setup impersonation: " + e2, e2);
   }
   return getMetaStoreClient(false);
  } else {
   throw new HiveSQLException("Failed to get metastore connection: " + e1, e1);
  }
 }
}

代码示例来源:origin: apache/hive

/**
 * Removes all databases and tables from the metastore
 */
public static void cleanupHMS(Hive hive, Warehouse wh, FsPermission defaultPerm)
 throws HiveException, MetaException, NoSuchObjectException {
 for (String dbName : hive.getAllDatabases()) {
  if (dbName.equals("default")) {
   continue;
  }
  try {
   Path path = getDbPath(hive, wh, dbName);
   FileSystem whFs = path.getFileSystem(hive.getConf());
   whFs.setPermission(path, defaultPerm);
  } catch (IOException ex) {
   //ignore
  }
  hive.dropDatabase(dbName, true, true, true);
 }
 //clean tables in default db
 for (String tablename : hive.getAllTables("default")) {
  hive.dropTable("default", tablename, true, true);
 }
}

代码示例来源:origin: apache/hive

public String getDelegationTokenFromMetaStore(String owner)
  throws HiveSQLException, UnsupportedOperationException, LoginException, IOException {
 HiveConf hiveConf = getHiveConf();
 if (!hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL) ||
   !hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
  throw new UnsupportedOperationException(
    "delegation token is can only be obtained for a secure remote metastore");
 }
 try {
  Hive.closeCurrent();
  return Hive.get(hiveConf).getDelegationToken(owner, owner);
 } catch (HiveException e) {
  if (e.getCause() instanceof UnsupportedOperationException) {
   throw (UnsupportedOperationException)e.getCause();
  } else {
   throw new HiveSQLException("Error connect metastore to setup impersonation", e);
  }
 }
}

代码示例来源:origin: apache/hive

@AfterClass
public static void deInit() throws Exception {
 Hive h = Hive.get(conf);
 h.dropTable("foo");
}

代码示例来源:origin: apache/hive

/**
 * Test basic Hive class interaction, that:
 * - We can have different Hive objects throughout the lifetime of this thread.
 */
public void testHiveCloseCurrent() throws Throwable {
 Hive hive1 = Hive.get();
 Hive.closeCurrent();
 Hive hive2 = Hive.get();
 Hive.closeCurrent();
 assertTrue(hive1 != hive2);
}

代码示例来源:origin: apache/hive

private Table createPartitionedTable(String dbName, String tableName) throws Exception {
 try {
  hm.dropTable(dbName, tableName);
  hm.createTable(tableName,
          Arrays.asList("key", "value"),   // Data columns.
          Arrays.asList("ds", "hr"),       // Partition columns.
          TextInputFormat.class,
          HiveIgnoreKeyTextOutputFormat.class);
  return hm.getTable(dbName, tableName);
 }
 catch (Exception exception) {
  fail("Unable to drop and create table " + StatsUtils.getFullyQualifiedTableName(dbName, tableName)
    + " because " + StringUtils.stringifyException(exception));
  throw exception;
 }
}

代码示例来源:origin: apache/hive

/**
 * Drops table along with the data in it. If the table doesn't exist then it
 * is a no-op
 *
 * @param tableName
 *          table to drop
 * @throws HiveException
 *           thrown if the drop fails
 */
public void dropTable(String tableName) throws HiveException {
 dropTable(tableName, false);
}

代码示例来源:origin: apache/hive

private Table createTestTable() throws HiveException, AlreadyExistsException {
 Database db = new Database();
 db.setName(dbName);
 hive.createDatabase(db, true);
 Table table = new Table(dbName, tableName);
 table.setDbName(dbName);
 table.setInputFormatClass(TextInputFormat.class);
 table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
 table.setPartCols(partCols);
 hive.createTable(table);
 table = hive.getTable(dbName, tableName);
 Assert.assertTrue(table.getTTable().isSetId());
 table.getTTable().unsetId();
 for (Map<String, String> partSpec : parts) {
  hive.createPartition(table, partSpec);
 }
 return table;
}

代码示例来源:origin: apache/hive

private void cancelDelegationToken() throws HiveSQLException {
 if (hmsDelegationTokenStr != null) {
  try {
   Hive.get(getHiveConf()).cancelDelegationToken(hmsDelegationTokenStr);
   hmsDelegationTokenStr = null;
   getHiveConf().setVar(HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE, "");
  } catch (HiveException e) {
   throw new HiveSQLException("Couldn't cancel delegation token", e);
  }
 }
}

代码示例来源:origin: apache/hive

private static Set<Partition> getAllPartitions(Table tab) throws HiveException {
 PerfLogger perfLogger = SessionState.getPerfLogger();
 perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
 Set<Partition> result = Hive.get().getAllPartitionsOf(tab);
 perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARTITION_RETRIEVING);
 return result;
}

代码示例来源:origin: apache/hive

private IMetaStoreClient getMSC() throws HiveException {
 try {
  return hive.getMSC();
 } catch (MetaException ex) {
  throw new HiveException(ex);
 }
}

相关文章

Hive类方法