我对大数据/hadoop还不熟悉。我正在尝试使用jdbc从eclipse连接到hive。我不断得到以下错误。我不知道发生了什么事。请帮忙。
错误状态记录器未找到log4j2配置文件。使用默认配置:仅将错误记录到控制台。slf4j:类路径包含多个slf4j绑定。slf4j:在中找到绑定[jar:file:/c:/users/eragank/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.4.1/log4j-slf4j-impl-2.4.1.jar/org/slf4j/impl/staticloggerbinder.class]slf4j:在中找到绑定[jar:file:/c:/users/eragank/.m2/repository/org/slf4j/slf4j-log4j12/1.6.1/slf4j-log4j12-1.6.1.jar/org/slf4j/impl/staticloggerbinder.class]slf4j:请参阅http://www.slf4j.org/codes.html#multiple_bindings 为了解释。slf4j:实际绑定的类型为[org.apache.logging.slf4j.log4jloggerfactory]线程“main”java.io.ioexception:hive/mksd1dllytj001.c.iron-potion-771的登录失败。internal@ihgext.global 从keytab/etc/security/keytab/hive.service.keytab:javax.security.auth.login.login异常:无法从位于的用户处获取密码org.apache.hadoop.security.usergroupinformation.loginuserfromkeytab(用户组信息)。java:935)在com.ihg.test.test.main(test。java:32)
原因:javax.security.auth.login.login异常:无法从com.sun.security.auth.module.krb5loginmodule.promptforpass(krb5loginmodule)上的用户处获取密码。java:897)在com.sun.security.auth.module.krb5loginmodule.attemptauthentication(krb5loginmodule。java:760)
在com.sun.security.auth.module.krb5loginmodule.login(krb5loginmodule。java:617)
在sun.reflect.nativemethodaccessorimpl.invoke0(本机方法)
在sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl。java:62)
在sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl。java:43)
在java.lang.reflect.method.invoke(方法。java:498)
在javax.security.auth.login.logincontext.invoke(logincontext。java:755)
在javax.security.auth.login.logincontext.access$000(logincontext。java:195)
在javax.security.auth.login.logincontext$4.run(logincontext。java:682)
在javax.security.auth.login.logincontext$4.run(logincontext。java:680)
位于java.security.accesscontroller.doprivileged(本机方法)
在javax.security.auth.login.logincontext.invokepriv(logincontext。java:680)
在javax.security.auth.login.logincontext.login(logincontext。java:587)
在org.apache.hadoop.security.usergroupinformation.loginuserfromkeytab(usergroupinformation)上。java:926) ... 还有1个
测试.java
package com.test.Test;
import java.sql.SQLException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.Properties;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.jdbc.HiveDriver;
import java.sql.DriverManager;
public class Test {
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
/**
* @param args
* @throws SQLException
* @throws IOException
*/
public static void main(String[] args) throws SQLException, IOException {
try {
Class.forName(driverName);
System.out.println("Class FOUND!!");
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.exit(1);
}
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
conf.set("hadoop.security.authentication", "Kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab("hive/mksd1dlllytj001.c.iron-potion-771.internal@IHGEXT.GLOBAL", "/etc/security/keytabs/hive.service.keytab");
//replace "hive" here with the name of the user the queries should run as
Connection con = DriverManager.getConnection("jdbc:hive2://mksd1dlllytj001.c.iron-potion-771.internal:10000/grs_core?transportMode=http;httpPath=cliservice;auth=kerberos;sasl.qop=auth-int;principal=hive/mksd1dlllytj001.c.iron-potion-771.internal@IHGEXT.GLOBAL", "hive", "");
//Connection con = DriverManager.getConnection("jdbc:hive2://mksd1dlllytj001.c.iron-potion-771.internal:10000/;principal=hive/mksd1dlllytj001.c.iron-potion-771.internal@IHGEXT.GLOBAL;transportMode=http;httpPath=cliservice");
// HiveDriver hd=new HiveDriver();
// System.out.println("HDB CON..");
//
// Connection con2= hd.connect("jdbc:hive2://mksd1dlllytj001.c.iron-potion-771.internal:10000/grs_core?transportMode=http;httpPath=cliservice;auth=kerberos;sasl.qop=auth-int");
//
// System.out.println("HDB CONN="+con2);
//
//;principal=hive/node.addr@ABCREALM.LOCAL
Statement stmt = con.createStatement();
/*String tableName = "testHiveDriverTable";
stmt.execute("drop table if exists " + tableName);
stmt.execute("create table " + tableName + " (key int, value string)");
// show tables
// String sql = "show tables '" + tableName + "'"; */
String sql = ("show tables;");
ResultSet res = stmt.executeQuery(sql);
if (res.next()) {
System.out.println(res.getString(1));
}
}
}
pom.xml文件
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.ihg.ptfwork</groupId>
<artifactId>ptfUpdateTest</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>ptfUpdateTest</name>
<url>http://maven.apache.org</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<env.hiveVersion>1.2.1000.2.6.0.3-8</env.hiveVersion>
<env.hivecoreDir>/usr/hdp/current/hive-client/conf</env.hivecoreDir>
<env.hivecoreFile>hive-site.xml</env.hivecoreFile>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-beeline</artifactId>
<version>2.0.0</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.7.0_05</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
</dependencies>
</project>
1条答案
按热度按时间ncgqoxb01#
我会先尝试不使用
UserGroupInformation
,您应该能够使用DriverManager.getConnection()
,我对一个h2数据库也做了同样的操作,效果很好。这听起来像是一个认证过程中的问题,你正在试图使用,我不是很熟悉。