本文整理了Java中org.apache.hive.service.server.HiveServer2.<init>()
方法的一些代码示例,展示了HiveServer2.<init>()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。HiveServer2.<init>()
方法的具体详情如下:
包路径:org.apache.hive.service.server.HiveServer2
类名称:HiveServer2
方法名:<init>
暂无
代码示例来源:origin: apache/hive
private void startHiveServer() throws InterruptedException {
// Start hive server2
server = new HiveServer2();
server.init(hiveConf);
server.start();
Thread.sleep(5000);
System.out.println("## HiveServer started");
}
代码示例来源:origin: apache/hive
server = new HiveServer2();
server.init(hiveConf);
server.start();
代码示例来源:origin: apache/hive
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// Find a free port
port = MetaStoreTestUtils.findFreePort();
hiveServer2 = new HiveServer2();
hiveConf = new HiveConf();
}
代码示例来源:origin: apache/hive
protected static void startHiveServer2WithConf(HiveConf hiveConf) throws Exception {
Exception HS2Exception = null;
boolean HS2Started = false;
for (int tryCount = 0; tryCount < MetaStoreTestUtils.RETRY_COUNT; tryCount++) {
try {
hiveServer2.init(hiveConf);
hiveServer2.start();
HS2Started = true;
break;
} catch (Exception t) {
HS2Exception = t;
port = MetaStoreTestUtils.findFreePort();
hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, port);
hiveServer2 = new HiveServer2();
}
}
if (!HS2Started) {
HS2Exception.printStackTrace();
fail();
}
// Wait for startup to complete
Thread.sleep(2000);
System.out.println("HiveServer2 started on port " + port);
}
代码示例来源:origin: apache/hive
@Test
public void testSslIsFalse() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(is(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname
+ " has false value. It is recommended to set to true when PAM is used."));
hiveConf.setVar(ConfVars.HIVE_SERVER2_PAM_SERVICES, "sshd");
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL, false);
hiveServer2 = new HiveServer2();
hiveServer2.init(hiveConf);
}
代码示例来源:origin: apache/hive
@Test
public void testPamServicesAreNotConfigured() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage(is(ConfVars.HIVE_SERVER2_PAM_SERVICES.varname + " are not configured."));
hiveConf.setVar(ConfVars.HIVE_SERVER2_PAM_SERVICES, "");
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL, true);
hiveConf.setVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH, sslKeyStorePath);
hiveConf.setVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PASSWORD, keyStorePassword);
hiveServer2 = new HiveServer2();
hiveServer2.init(hiveConf);
}
代码示例来源:origin: apache/hive
@Test
public void testPamCorrectConfiguration() {
hiveConf.setVar(ConfVars.HIVE_SERVER2_PAM_SERVICES, "sshd");
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL, true);
hiveConf.setVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PATH, sslKeyStorePath);
hiveConf.setVar(ConfVars.HIVE_SERVER2_WEBUI_SSL_KEYSTORE_PASSWORD, keyStorePassword);
hiveServer2 = new HiveServer2();
hiveServer2.init(hiveConf);
}
代码示例来源:origin: apache/hive
@BeforeClass
public static void beforeTests() throws Exception {
webUIPort =
MetaStoreTestUtils.findFreePortExcepting(Integer.valueOf(ConfVars.HIVE_SERVER2_WEBUI_PORT.getDefaultValue()));
hiveConf = new HiveConf();
hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true);
hiveConf.set(ConfVars.METASTOREPWD.varname, metastorePasswd);
hiveConf.set(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, webUIPort.toString());
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES, "sshd");
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_PAM, true);
hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true);
hiveServer2 = new HiveServer2(new TestPamAuthenticator(hiveConf));
hiveServer2.init(hiveConf);
hiveServer2.start();
Thread.sleep(5000);
}
代码示例来源:origin: apache/hive
for (int tryCount = 0; (tryCount < MetaStoreTestUtils.RETRY_COUNT); tryCount++) {
try {
hiveServer2 = new HiveServer2();
hiveServer2.init(hiveConf);
hiveServer2.start();
代码示例来源:origin: apache/hive
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// Find a free port
port = MetaStoreTestUtils.findFreePort();
hiveServer2 = new HiveServer2();
hiveConf = new HiveConf();
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_COOKIE_AUTH_ENABLED, true);
// Set the cookie max age to a very low value so that
// the server sends 401 very frequently
hiveConf.setTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_COOKIE_MAX_AGE, 1, TimeUnit.SECONDS);
hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, "http");
hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH, "cliservice");
assertNotNull(port);
assertNotNull(hiveServer2);
assertNotNull(hiveConf);
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, host);
hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, port);
hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NOSASL.toString());
startHiveServer2WithConf(hiveConf);
client = getServiceClientInternal();
}
代码示例来源:origin: apache/sqoop
public HiveMiniCluster(String hostname, int port, String tempFolderPath, AuthenticationConfiguration authenticationConfiguration) {
this.hostName = hostname;
this.port = port;
this.tempFolderPath = tempFolderPath;
this.authenticationConfiguration = authenticationConfiguration;
this.hiveServer2 = new HiveServer2();
}
代码示例来源:origin: kite-sdk/kite
private HiveServer2 startHiveServer(HiveConf serverConf) {
HiveServer2 hiveServer = new HiveServer2();
hiveServer.init(serverConf);
hiveServer.start();
return hiveServer;
}
代码示例来源:origin: com.github.sakserv/hadoop-mini-clusters-hiveserver2
@Override
public void start() throws Exception {
hiveServer2 = new HiveServer2();
LOG.info("HIVESERVER2: Starting HiveServer2 on port: {}", hiveServer2Port);
configure();
hiveServer2.init(hiveConf);
hiveServer2.start();
}
代码示例来源:origin: linkedin/transport
private void createHiveServer() {
HiveServer2 server = new HiveServer2();
server.init(new HiveConf());
for (Service service : server.getServices()) {
if (service instanceof CLIService) {
_client = (CLIService) service;
}
}
Preconditions.checkNotNull(_client, "CLI service not found in local Hive server");
try {
_sessionHandle = _client.openSession(null, null, null);
_functionRegistry = SessionState.getRegistryForWrite();
// "map_from_entries" UDF is required to create maps with non-primitive key types
_functionRegistry.registerGenericUDF("map_from_entries", MapFromEntriesWrapper.class);
// TODO: This is a hack. Hive's public API does not have a way to register an already created GenericUDF object
// It only accepts a class name after which the parameterless constructor of the class is called to create a
// GenericUDF object. This does not work for HiveTestStdUDFWrapper as it accepts the UDF classes as parameters.
// However, Hive has an internal method which does allow passing GenericUDF objects instead of classes.
_functionRegistryAddFunctionMethod =
_functionRegistry.getClass().getDeclaredMethod("addFunction", String.class, FunctionInfo.class);
_functionRegistryAddFunctionMethod.setAccessible(true);
} catch (HiveSQLException | NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
代码示例来源:origin: com.hotels/beeju
@Override
protected void beforeTest() throws Throwable {
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, RelaxedSQLStdHiveAuthorizerFactory.class.getName());
hiveServer2 = new HiveServer2();
hiveServer2.init(conf);
hiveServer2.start();
waitForHiveServer2StartUp();
jdbcConnectionUrl = "jdbc:hive2://localhost:" + port + "/" + databaseName();
}
代码示例来源:origin: uber/hudi
private HiveServer2 startHiveServer(HiveConf serverConf) {
HiveServer2 hiveServer = new HiveServer2();
hiveServer.init(serverConf);
hiveServer.start();
return hiveServer;
}
代码示例来源:origin: org.springframework.data/spring-data-hadoop-hive
@Override
public void afterPropertiesSet() throws Exception {
Configuration cfg = ConfigurationUtils.createFrom(configuration, properties);
hiveConf = new HiveConf(cfg, HiveServer2.class);
hiveConf.set("hive.server2.thrift.bind.host", host);
hiveConf.set("hive.server2.thrift.port", String.valueOf(port));
hiveConf.set("hive.server2.thrift.min.worker.threads", String.valueOf(minThreads));
hiveConf.set("hive.server2.thrift.max.worker.threads", String.valueOf(maxThreads));
ServerUtils.cleanUpScratchDir(hiveConf);
server = new HiveServer2();
server.init(hiveConf);
if (autoStartup) {
server.start();
}
}
代码示例来源:origin: apache/oozie
public void start(Map<String, String> confOverlay) throws Exception {
hiveServer2 = new HiveServer2();
// Set confOverlay parameters
for (Map.Entry<String, String> entry : confOverlay.entrySet()) {
setConfProperty(entry.getKey(), entry.getValue());
}
hiveServer2.init(getHiveConf());
hiveServer2.start();
waitForStartup();
setStarted(true);
}
代码示例来源:origin: com.presidentio.but/hive2-unit
@Override
public void init() throws InitUnitException {
try {
hdfsUnit.getFileSystem().mkdirs(new Path(HIVE_HOME));
hdfsUnit.getFileSystem().setOwner(new Path(HIVE_HOME), "hive", "hive");
} catch (IOException e) {
throw new InitUnitException("Failed to create hive home directory: " + HIVE_HOME, e);
}
HiveConf hiveConf = new HiveConf(yarnUnit.getConfig(), Hive2Unit.class);
hiveConf.set("datanucleus.connectiondrivername", "org.hsqldb.jdbc.JDBCDriver");
hiveConf.set("datanucleus.connectionPoolingType", "None");
hiveConf.set("javax.jdo.option.ConnectionDriverName", "org.hsqldb.jdbc.JDBCDriver");
hiveConf.setVar(HiveConf.ConfVars.HADOOPBIN, "NO_BIN!");
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_INFER_BUCKET_SORT, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVEMETADATAONLYQUERIES, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVEOPTINDEXFILTER, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVECONVERTJOIN, false);
hiveConf.setBoolVar(HiveConf.ConfVars.HIVESKEWJOIN, false);
hiveConf.setBoolVar(HiveConf.ConfVars.LOCALMODEAUTO, false);
hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, "localhost");
String metaStorageUrl = "jdbc:hsqldb:mem:" + UUID.randomUUID().toString() + ";create=true";
hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, metaStorageUrl);
// hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE, "http");
hiveServer = new HiveServer2();
hiveServer.init(hiveConf);
hiveServer.start();
initConnection();
}
代码示例来源:origin: apache/lens
HiveConf.setVar(remoteConf, HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT, "60000s");
remoteConf.setLong(HiveDriver.HS2_CONNECTION_EXPIRY_DELAY, 10000);
server = new HiveServer2();
hiveConf = new HiveConf();
hiveConf.addResource(remoteConf);
内容来源于网络,如有侵权,请联系作者删除!