本文整理了Java中org.apache.hive.service.server.HiveServer2.start()
方法的一些代码示例,展示了HiveServer2.start()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。HiveServer2.start()
方法的具体详情如下:
包路径:org.apache.hive.service.server.HiveServer2
类名称:HiveServer2
方法名:start
暂无
代码示例来源:origin: apache/hive
private void startHiveServer() throws InterruptedException {
// Start hive server2
server = new HiveServer2();
server.init(hiveConf);
server.start();
Thread.sleep(5000);
System.out.println("## HiveServer started");
}
代码示例来源:origin: apache/hive
static void startHiveServer2WithConf(HiveConf hiveConf) throws Exception {
hiveServer2.init(hiveConf);
// Start HiveServer2 with given config
// Fail if server doesn't start
try {
hiveServer2.start();
} catch (Throwable t) {
t.printStackTrace();
fail();
}
// Wait for startup to complete
Thread.sleep(2000);
System.out.println("HiveServer2 started on port " + port);
}
代码示例来源:origin: apache/hive
server.start();
代码示例来源:origin: apache/hive
protected static void startHiveServer2WithConf(HiveConf hiveConf) throws Exception {
Exception HS2Exception = null;
boolean HS2Started = false;
for (int tryCount = 0; tryCount < MetaStoreTestUtils.RETRY_COUNT; tryCount++) {
try {
hiveServer2.init(hiveConf);
hiveServer2.start();
HS2Started = true;
break;
} catch (Exception t) {
HS2Exception = t;
port = MetaStoreTestUtils.findFreePort();
hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, port);
hiveServer2 = new HiveServer2();
}
}
if (!HS2Started) {
HS2Exception.printStackTrace();
fail();
}
// Wait for startup to complete
Thread.sleep(2000);
System.out.println("HiveServer2 started on port " + port);
}
代码示例来源:origin: apache/hive
@BeforeClass
public static void beforeTests() throws Exception {
webUIPort =
MetaStoreTestUtils.findFreePortExcepting(Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue()));
hiveConf = new HiveConf();
hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true);
hiveConf.set(ConfVars.METASTOREPWD.varname, metastorePasswd);
hiveConf.set(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, webUIPort.toString());
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES, "sshd");
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM, true);
hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true);
hiveServer2 = new HiveServer2(new TestPamAuthenticator(hiveConf));
hiveServer2.init(hiveConf);
hiveServer2.start();
Thread.sleep(5000);
}
代码示例来源:origin: apache/hive
hiveServer2 = new HiveServer2();
hiveServer2.init(hiveConf);
hiveServer2.start();
Thread.sleep(5000);
hs2Started = true;
代码示例来源:origin: org.springframework.data/spring-data-hadoop-hive
@Override
public void start() {
if (!isRunning()) {
server.start();
}
}
代码示例来源:origin: kite-sdk/kite
private HiveServer2 startHiveServer(HiveConf serverConf) {
HiveServer2 hiveServer = new HiveServer2();
hiveServer.init(serverConf);
hiveServer.start();
return hiveServer;
}
代码示例来源:origin: com.github.sakserv/hadoop-mini-clusters-hiveserver2
@Override
public void start() throws Exception {
hiveServer2 = new HiveServer2();
LOG.info("HIVESERVER2: Starting HiveServer2 on port: {}", hiveServer2Port);
configure();
hiveServer2.init(hiveConf);
hiveServer2.start();
}
代码示例来源:origin: apache/sqoop
@Override
public Void run() {
hiveServer2.init(config);
hiveServer2.start();
return null;
}
});
代码示例来源:origin: com.hotels/beeju
@Override
protected void beforeTest() throws Throwable {
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, RelaxedSQLStdHiveAuthorizerFactory.class.getName());
hiveServer2 = new HiveServer2();
hiveServer2.init(conf);
hiveServer2.start();
waitForHiveServer2StartUp();
jdbcConnectionUrl = "jdbc:hive2://localhost:" + port + "/" + databaseName();
}
代码示例来源:origin: org.apache.spark/spark-hive-thriftserver_2.11
server = new HiveServer2();
server.init(hiveConf);
server.start();
ShimLoader.getHadoopShims().startPauseMonitor(hiveConf);
break;
代码示例来源:origin: org.apache.spark/spark-hive-thriftserver
server = new HiveServer2();
server.init(hiveConf);
server.start();
ShimLoader.getHadoopShims().startPauseMonitor(hiveConf);
break;
代码示例来源:origin: org.springframework.data/spring-data-hadoop-hive
@Override
public void afterPropertiesSet() throws Exception {
Configuration cfg = ConfigurationUtils.createFrom(configuration, properties);
hiveConf = new HiveConf(cfg, HiveServer2.class);
hiveConf.set("hive.server2.thrift.bind.host", host);
hiveConf.set("hive.server2.thrift.port", String.valueOf(port));
hiveConf.set("hive.server2.thrift.min.worker.threads", String.valueOf(minThreads));
hiveConf.set("hive.server2.thrift.max.worker.threads", String.valueOf(maxThreads));
ServerUtils.cleanUpScratchDir(hiveConf);
server = new HiveServer2();
server.init(hiveConf);
if (autoStartup) {
server.start();
}
}
代码示例来源:origin: uber/hudi
private HiveServer2 startHiveServer(HiveConf serverConf) {
HiveServer2 hiveServer = new HiveServer2();
hiveServer.init(serverConf);
hiveServer.start();
return hiveServer;
}
代码示例来源:origin: apache/oozie
public void start(Map<String, String> confOverlay) throws Exception {
hiveServer2 = new HiveServer2();
// Set confOverlay parameters
for (Map.Entry<String, String> entry : confOverlay.entrySet()) {
setConfProperty(entry.getKey(), entry.getValue());
}
hiveServer2.init(getHiveConf());
hiveServer2.start();
waitForStartup();
setStarted(true);
}
代码示例来源:origin: com.github.hyukjinkwon/hive-service
server = new HiveServer2();
server.init(hiveConf);
server.start();
ShimLoader.getHadoopShims().startPauseMonitor(hiveConf);
代码示例来源:origin: org.spark-project.hive/hive-service
server = new HiveServer2();
server.init(hiveConf);
server.start();
ShimLoader.getHadoopShims().startPauseMonitor(hiveConf);
代码示例来源:origin: com.presidentio.but/hive2-unit
@Override
public void init() throws InitUnitException {
try {
hdfsUnit.getFileSystem().mkdirs(new Path(HIVE_HOME));
hdfsUnit.getFileSystem().setOwner(new Path(HIVE_HOME), "hive", "hive");
} catch (IOException e) {
throw new InitUnitException("Failed to create hive home directory: " + HIVE_HOME, e);
}
HiveConf hiveConf = new HiveConf(yarnUnit.getConfig(), Hive2Unit.class);
hiveConf.set("datanucleus.connectiondrivername", "org.hsqldb.jdbc.JDBCDriver");
hiveConf.set("datanucleus.connectionPoolingType", "None");
hiveConf.set("javax.jdo.option.ConnectionDriverName", "org.hsqldb.jdbc.JDBCDriver");
hiveConf.setVar(HiveConf.ConfVars.HADOOPBIN, "NO_BIN!");
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_INFER_BUCKET_SORT, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVEMETADATAONLYQUERIES, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVEOPTINDEXFILTER, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVECONVERTJOIN, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVESKEWJOIN, false);
hiveConf.setBoolVar(HiveConf.ConfVars.LOCALMODEAUTO, false);
hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, "localhost");
String metaStorageUrl = "jdbc:hsqldb:mem:" + UUID.randomUUID().toString() + ";create=true";
hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, metaStorageUrl);
// hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE, "http");
hiveServer = new HiveServer2();
hiveServer.init(hiveConf);
hiveServer.start();
initConnection();
}
代码示例来源:origin: apache/lens
hiveConf.addResource(remoteConf);
server.init(hiveConf);
server.start();
while (true) {
try {
内容来源于网络,如有侵权,请联系作者删除!