如何在hive-site.xml文件中为hive元存储连接设置配置?

icnyk63a  于 2021-05-30  发布在  Hadoop
关注(0)|答案(2)|浏览(550)

我想用java代码连接metastore。我不知道如何在hive-site.xml文件中设置配置设置,也不知道在哪里发布hive-site.xml文件。请帮忙。

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;

public class HiveMetastoreJDBCTest {

    public static void main(String[] args) throws Exception {

        Connection conn = null;
        try {
            HiveConf conf = new HiveConf();
            conf.addResource(new Path("file:///path/to/hive-site.xml"));
            Class.forName(conf.getVar(ConfVars.METASTORE_CONNECTION_DRIVER));
            conn = DriverManager.getConnection(
                    conf.getVar(ConfVars.METASTORECONNECTURLKEY),
                    conf.getVar(ConfVars.METASTORE_CONNECTION_USER_NAME),
                    conf.getVar(ConfVars.METASTOREPWD));

            Statement st = conn.createStatement();
            ResultSet rs = st.executeQuery(
                "select t.tbl_name, s.location from tbls t " +
                "join sds s on t.sd_id = s.sd_id");
            while (rs.next()) {
                System.out.println(rs.getString(1) + " : " + rs.getString(2));
            }
        }        

    }
}
ukxgm1gy

ukxgm1gy1#

在hive-site.xml中添加以下行:

<property>
  <name>hive.metastore.local</name>
  <value>true</value>
</property>
<property>
  <name>javax.jdo.option.ConnectionURL</name>
  <value>jdbc:mysql://localhost:3306/hive</value>
</property>
<property>
  <name>javax.jdo.option.ConnectionUserName</name>
  <value>hiveuser</value>
</property>
<property>
  <name>javax.jdo.option.ConnectionPassword</name>
  <value>hivepass</value>
</property>

jdbc:mysql://localhost:3306/hive , 3306 是默认的mysql端口; hive 是配置单元元存储的mysql数据库名称。改变 hiveuser 您的mysql配置单元用户名和 hivepass 你的mysql配置单元密码。
如果尚未在mysql中为配置单元元存储创建数据库,请在terminal中执行此步骤: mysql -u root -p 输入mysql根密码。
mysql> create database hive; mysql> create user 'hiveuser'@'%' IDENTIFIED BY 'hivepass'; mysql> GRANT all on . to 'hiveuser'@localhost identified by 'hivepass'; mysql> flush privileges; 在这里, hiveuser 以及 hivepass 是您分别为配置单元元存储提供的用户名和密码。
注意:您需要在$hive\u home/lib和$hadoop\u home/lib中安装mysql-jdbc-connector.jar

z2acfund

z2acfund2#

关于hive-site.xml,这里是来自我的测试机器的示例。这是用于在本地主机上安装mysql服务器的情况下设置配置单元元存储的。

<configuration>
<property>
 <name>javax.jdo.option.ConnectionURL</name>
 <value>jdbc:mysql://localhost/metastore?createDatabaseIfNotExist=true</value>
 <description>metadata is stored in a MySQL server</description>
</property>
<property>
 <name>javax.jdo.option.ConnectionDriverName</name>
 <value>com.mysql.jdbc.Driver</value>
 <description>MySQL JDBC driver class</description>
</property>
<property>
 <name>javax.jdo.option.ConnectionUserName</name>
 <value>hive</value>
 <description>user name for connecting to mysql server </description>
</property>
<property>
 <name>javax.jdo.option.ConnectionPassword</name>
 <value>123456</value>
 <description>password for connecting to mysql server </description>
</property>
</configuration>

这个文件你需要放进去 <system_path>/apache-hive-x.xx.x-bin/conf 目录
我不知道如何在java中使用这个文件。但通过在java代码中指定连接字符串,您可以按如下所示执行

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

public class WriteToHive {
    private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
    static Connection con;
    static Statement stmt;

    public WriteToHive() throws SQLException, ClassNotFoundException, Exception {
        try {
            Class.forName(driverName);
        } catch (ClassNotFoundException e){
            e.printStackTrace();
            throw new ClassNotFoundException("No JDBC Hive Driver found");
            //System.exit(1);
        } catch (Exception e) {
            e.printStackTrace();
            throw new Exception(e);
            //System.exit(1);
        }

        con = DriverManager.getConnection("jdbc:hive://localhost:10000/rajen","","");
        stmt = con.createStatement();
    }

    public static void main(String[] args) throws SQLException {
        try {
            Class.forName(driverName);
        } catch (ClassNotFoundException e){
            e.printStackTrace();
            System.exit(1);
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }
        con = DriverManager.getConnection("jdbc:hive://localhost:10000/rajen","","");
        stmt = con.createStatement();
        //Connection con = DriverManager.getConnection("jdbc:hive://","","");
        String tableName = "company_mas_hive_eclipse_trial";

        ResultSet res = stmt.executeQuery("use rajen");

        String sql = "DROP TABLE IF EXISTS " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);

        sql = "CREATE TABLE IF NOT EXISTS rajen.company_mas_hive_eclipse_trial (" +
              "Name string," + 
              "dateofincorporation string," + 
              "country string)" +
              "ROW FORMAT DELIMITED FIELDS TERMINATED BY \",\"";
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);

        sql = "show tables '" + tableName + "'";
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);

        if (res.next()){
            System.out.println(res.getString(1));
        }

        sql = "describe " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        System.out.println("=========================================");
        while (res.next()) {
          System.out.println(res.getString(1) + "\t" + res.getString(2));
        }
        System.out.println("=========================================");

        // load data into table
        // NOTE: filepath has to be local to the hive server
        // NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
        String filepath = "/home/seo/Refrence_Doc/sampledata/companymas"; //"/rajen/companymas";
        sql = "load data local inpath '" + filepath + "' into table " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);

        // load data into table
        // NOTE: filepath has to be local to the hive server
        // NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
        filepath = "/rajen/companymas";
        sql = "load data inpath '" + filepath + "' into table " + tableName;
        System.out.println("Running: " + sql);
        //res = stmt.executeQuery(sql);

        // select * query
        sql = "select * from " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        while (res.next()) {
            System.out.println(String.valueOf(res.getString(1)) + "\t" + res.getString(2));
        }

        // regular hive query
        sql = "select count(*) from " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        while (res.next()) {
            System.out.println(res.getString(1));
        }
    }

    public void createTable(String def, String dbname) throws SQLException{
        @SuppressWarnings("unused")
        ResultSet res = stmt.executeQuery("use " + dbname);
        stmt.executeQuery(def);
    }

    public static void loadData(String filepath, String tableName) throws SQLException{
        stmt.executeQuery("load data local inpath '" + filepath + "' into table " + tableName);
    }
}

相关问题