本文整理了Java中org.apache.hadoop.hive.ql.metadata.Hive.createDatabase()
方法的一些代码示例,展示了Hive.createDatabase()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Hive.createDatabase()
方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.metadata.Hive
类名称:Hive
方法名:createDatabase
[英]Create a Database. Raise an error if a database with the same name already exists.
[中]创建一个数据库。如果已存在同名数据库,则引发错误。
代码示例来源:origin: apache/hive
/**
* Create a Database. Raise an error if a database with the same name already exists.
* @param db
* @throws AlreadyExistsException
* @throws HiveException
*/
public void createDatabase(Database db) throws AlreadyExistsException, HiveException {
createDatabase(db, false);
}
代码示例来源:origin: apache/drill
/**
* Create a Database. Raise an error if a database with the same name already exists.
* @param db
* @throws AlreadyExistsException
* @throws HiveException
*/
public void createDatabase(Database db) throws AlreadyExistsException, HiveException {
createDatabase(db, false);
}
代码示例来源:origin: apache/hive
Database db = new Database();
db.setName(dbName);
hive.createDatabase(db, true);
代码示例来源:origin: apache/hive
@Test
public void testDataDeletion() throws HiveException,
IOException, TException {
Database db = new Database();
db.setName(dbName);
hive.createDatabase(db);
Table table = new Table(dbName, tableName);
table.setDbName(dbName);
table.setInputFormatClass(TextInputFormat.class);
table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
table.setPartCols(partCols);
hive.createTable(table);
table = hive.getTable(dbName, tableName);
Path fakeTable = table.getPath().getParent().suffix(
Path.SEPARATOR + "faketable");
fs = fakeTable.getFileSystem(hive.getConf());
fs.mkdirs(fakeTable);
fs.deleteOnExit(fakeTable);
Path fakePart = new Path(table.getDataLocation().toString(),
"fakepartition=fakevalue");
fs.mkdirs(fakePart);
fs.deleteOnExit(fakePart);
hive.dropTable(dbName, tableName, true, true);
assertFalse(fs.exists(fakePart));
hive.dropDatabase(dbName);
assertFalse(fs.exists(fakeTable));
}
代码示例来源:origin: apache/hive
hm.createDatabase(db);
代码示例来源:origin: apache/hive
hm.createDatabase(db);
代码示例来源:origin: apache/hive
/**
* Create a Database
* @param db
* @param crtDb
* @return Always returns 0
* @throws HiveException
*/
private int createDatabase(Hive db, CreateDatabaseDesc crtDb)
throws HiveException {
Database database = new Database();
database.setName(crtDb.getName());
database.setDescription(crtDb.getComment());
database.setLocationUri(crtDb.getLocationUri());
database.setParameters(crtDb.getDatabaseProperties());
database.setOwnerName(SessionState.getUserFromAuthenticator());
database.setOwnerType(PrincipalType.USER);
try {
makeLocationQualified(database);
db.createDatabase(database, crtDb.getIfNotExists());
}
catch (AlreadyExistsException ex) {
//it would be better if AlreadyExistsException had an errorCode field....
throw new HiveException(ex, ErrorMsg.DATABASE_ALREADY_EXISTS, crtDb.getName());
}
return 0;
}
代码示例来源:origin: apache/drill
/**
* Create a Database
* @param db
* @param crtDb
* @return Always returns 0
* @throws HiveException
*/
private int createDatabase(Hive db, CreateDatabaseDesc crtDb)
throws HiveException {
Database database = new Database();
database.setName(crtDb.getName());
database.setDescription(crtDb.getComment());
database.setLocationUri(crtDb.getLocationUri());
database.setParameters(crtDb.getDatabaseProperties());
database.setOwnerName(SessionState.getUserFromAuthenticator());
database.setOwnerType(PrincipalType.USER);
try {
if (!Utilities.isDefaultNameNode(conf)) {
makeLocationQualified(database);
}
db.createDatabase(database, crtDb.getIfNotExists());
}
catch (AlreadyExistsException ex) {
//it would be better if AlreadyExistsException had an errorCode field....
throw new HiveException(ex, ErrorMsg.DATABSAE_ALREADY_EXISTS, crtDb.getName());
}
return 0;
}
代码示例来源:origin: apache/hive
private Table createTestTable() throws HiveException, AlreadyExistsException {
Database db = new Database();
db.setName(dbName);
hive.createDatabase(db, true);
Table table = new Table(dbName, tableName);
table.setDbName(dbName);
table.setInputFormatClass(TextInputFormat.class);
table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
table.setPartCols(partCols);
hive.createTable(table);
table = hive.getTable(dbName, tableName);
Assert.assertTrue(table.getTTable().isSetId());
table.getTTable().unsetId();
for (Map<String, String> partSpec : parts) {
hive.createPartition(table, partSpec);
}
return table;
}
代码示例来源:origin: com.facebook.presto.hive/hive-apache
/**
* Create a Database. Raise an error if a database with the same name already exists.
* @param db
* @throws AlreadyExistsException
* @throws HiveException
*/
public void createDatabase(Database db) throws AlreadyExistsException, HiveException {
createDatabase(db, false);
}
代码示例来源:origin: org.apache.hadoop.hive/hive-exec
/**
* Create a Database. Raise an error if a database with the same name already exists.
* @param db
* @throws AlreadyExistsException
* @throws HiveException
*/
public void createDatabase(Database db) throws AlreadyExistsException, HiveException {
createDatabase(db, false);
}
代码示例来源:origin: apache/lens
/**
* Create a database in Hive metastore
*
* @param database database name
* @param ignore ignore if database already exists
* @throws LensException
*/
@Override
public void createDatabase(LensSessionHandle sessionid, String database, boolean ignore) throws LensException {
try (SessionContext ignored = new SessionContext(sessionid)){
Database db = new Database();
db.setName(database);
Hive.get(getSession(sessionid).getHiveConf()).createDatabase(db, ignore);
} catch (AlreadyExistsException | HiveException e) {
throw new LensException(e);
}
log.info("Database created " + database);
}
代码示例来源:origin: apache/lens
/**
* Setup.
*
* @throws AlreadyExistsException the already exists exception
* @throws HiveException the hive exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@BeforeTest
public void setup() throws AlreadyExistsException, HiveException, IOException {
SessionState.start(conf);
Hive client = Hive.get(conf);
Database database = new Database();
database.setName(TestDBStorage.class.getSimpleName());
client.createDatabase(database);
SessionState.get().setCurrentDatabase(TestDBStorage.class.getSimpleName());
}
代码示例来源:origin: apache/lens
/**
* Configure hive tables.
*
* @return the configuration
*/
private HiveConf configureHiveTables() {
assertNotNull(System.getProperty("hadoop.bin.path"));
HiveConf conf = LensServerConf.getHiveConf();
try {
Hive hive = getHiveClient(conf);
Database database = new Database();
database.setName(LensConfConstants.DEFAULT_STATISTICS_DATABASE);
hive.dropTable(LensConfConstants.DEFAULT_STATISTICS_DATABASE, EVENT_NAME, true, true);
hive.dropTable(LensConfConstants.DEFAULT_STATISTICS_DATABASE, QueryExecutionStatistics.class.getSimpleName(),
true, true);
hive.dropDatabase(LensConfConstants.DEFAULT_STATISTICS_DATABASE, true, true);
hive.createDatabase(database);
Table t = getHiveTable();
hive.createTable(t);
} catch (Exception e) {
Assert.fail();
}
return conf;
}
代码示例来源:origin: apache/lens
/**
* Before test.
*
* @throws Exception the exception
*/
@BeforeTest
public void beforeTest() throws Exception {
// Check if hadoop property set
System.out.println("###HADOOP_PATH " + System.getProperty("hadoop.bin.path"));
assertNotNull(System.getProperty("hadoop.bin.path"));
createDriver();
ss = new SessionState(hiveConf, "testuser");
SessionState.start(ss);
Hive client = Hive.get(hiveConf);
Database database = new Database();
database.setName(dataBase);
client.createDatabase(database, true);
SessionState.get().setCurrentDatabase(dataBase);
sessionid = SessionState.get().getSessionId();
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
QueryContext context = createContext("USE " + dataBase, this.queryConf);
driver.execute(context);
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
}
代码示例来源:origin: org.apache.hadoop.hive/hive-exec
/**
* Create a Database
* @param db
* @param crtDb
* @return Always returns 0
* @throws HiveException
* @throws AlreadyExistsException
*/
private int createDatabase(Hive db, CreateDatabaseDesc crtDb)
throws HiveException, AlreadyExistsException {
Database database = new Database();
database.setName(crtDb.getName());
database.setDescription(crtDb.getComment());
database.setLocationUri(crtDb.getLocationUri());
database.setParameters(crtDb.getDatabaseProperties());
db.createDatabase(database, crtDb.getIfNotExists());
return 0;
}
代码示例来源:origin: apache/lens
@BeforeClass
public static void setup() throws HiveException, AlreadyExistsException, LensException {
SessionState.start(conf);
conf.set(LensConfConstants.AUTHORIZER_CLASS, "org.apache.lens.cube.parse.MockAuthorizer");
LensAuthorizer.get().init(conf);
Database database = new Database();
database.setName(TestCubeMetastoreClient.class.getSimpleName());
Hive.get(conf).createDatabase(database);
SessionState.get().setCurrentDatabase(TestCubeMetastoreClient.class.getSimpleName());
client = CubeMetastoreClient.getInstance(conf);
client.getConf().setBoolean(LensConfConstants.ENABLE_METASTORE_SCHEMA_AUTHORIZATION_CHECK, true);
client.getConf().setBoolean(LensConfConstants.USER_GROUPS_BASED_AUTHORIZATION, true);
SessionState.getSessionConf().set(LensConfConstants.SESSION_USER_GROUPS, "lens-auth-test1");
defineCube(CUBE_NAME, CUBE_NAME_WITH_PROPS, DERIVED_CUBE_NAME, DERIVED_CUBE_NAME_WITH_PROPS);
defineUberDims();
}
代码示例来源:origin: apache/lens
@BeforeTest
public void setUp() throws Exception {
super.setUp();
Hive hive = Hive.get(new HiveConf());
Database db = new Database();
db.setName(TEST_DB);
hive.createDatabase(db, true);
LensClientConfig lensClientConfig = new LensClientConfig();
lensClientConfig.setLensDatabase(TEST_DB);
lensClientConfig.set(LensConfConstants.SERVER_BASE_URL,
"http://localhost:" + getTestPort() + "/lensapi");
LensClient client = new LensClient(lensClientConfig);
mlClient = new LensMLClient(client);
}
代码示例来源:origin: apache/lens
@BeforeTest
public void setUp() throws Exception {
super.setUp();
Hive hive = Hive.get(new HiveConf());
Database db = new Database();
db.setName(TEST_DB);
hive.createDatabase(db, true);
LensClientConfig lensClientConfig = new LensClientConfig();
lensClientConfig.setLensDatabase(TEST_DB);
lensClientConfig.set(LensConfConstants.SERVER_BASE_URL,
"http://localhost:" + getTestPort() + "/lensapi");
LensClient client = new LensClient(lensClientConfig);
mlClient = new LensMLClient(client);
}
代码示例来源:origin: apache/lens
public void createSources(HiveConf conf, String dbName) throws Exception {
try {
Database database = new Database();
database.setName(dbName);
Hive.get(conf).dropDatabase(dbName, true, true, true);
Hive.get(conf).createDatabase(database);
SessionState.get().setCurrentDatabase(dbName);
CubeMetastoreClient client = CubeMetastoreClient.getInstance(conf);
createFromXML(client);
assertTestFactTimelineClass(client);
createCubeCheapFactPartitions(client);
// commenting this as the week date format throws IllegalPatternException
// createCubeFactWeekly(client);
createTestFact2Partitions(client);
createTestFact2RawPartitions(client);
createBaseCubeFactPartitions(client);
createSummaryPartitions(client);
// dump(client);
} catch (Exception exc) {
log.error("Exception while creating sources.", exc);
throw exc;
}
}
private static final StrSubstitutor GREGORIAN_SUBSTITUTOR = new StrSubstitutor(new StrLookup<String>() {
内容来源于网络,如有侵权,请联系作者删除!