使用spring挂起查询远程配置单元server2

hlswsv35  于 2021-05-30  发布在  Hadoop
关注(0)|答案(1)|浏览(364)

我正在尝试使用spring和jdbc连接连接到hive。我使用的样本与spring手册中的相同(https://github.com/spring-projects/spring-hadoop-samples/tree/master/hive)我成功地得到了连接部分,但是当我运行一个查询时,它挂起了,我再也没有得到结果。
我看到更多的帖子有类似的问题,但没有一个具体的/有效的答案。
有人能帮帮我吗?我开始认为hiveserver2和spring框架有问题。
这是我的密码:

public class HiveApp {

private static final Log log = LogFactory.getLog(HiveApp.class);

public static void main(String[] args) throws Exception {
    try {
        AbstractApplicationContext context = new ClassPathXmlApplicationContext(
                "/META-INF/spring/hive-context.xml", HiveApp.class);
        log.info("Hive Application Running");
        System.out.println("Hive Application Running");
        context.registerShutdownHook(); 

        HiveTemplate template = context.getBean(HiveTemplate.class);
        System.out.println("Hive Template = " + template);
        List<String> results = template.query("show tables");
        for (String result : results) {
            System.out.println(result);         
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

}

这是我的hive-context.xml

<?xml version="1.0" encoding="UTF-8"?>
<beans:beans xmlns="http://www.springframework.org/schema/hadoop"
xmlns:beans="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans        http://www.springframework.org/schema/beans/spring-beans.xsd
  http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
  http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd">

<beans:import resource="jdbc-context.xml"/>

<context:property-placeholder location="hadoop.properties,hive.properties"/>

<context:component-scan base-package="com.oreilly.springdata.hadoop.hive" />

<configuration>
    fs.default.name=${hd.fs}
</configuration>

<!-- This will throw a NPE at the end of running the app that should be ignored

     To avoid this, run against the stand-alone server, use the command line  

     hive -hiveconf fs.default.name=hdfs://localhost:9000 -hiveconf mapred.job.tracker=localhost:9001

     -->

<hive-server port="${hive.port}" auto-startup="true"
             properties-location="hive-server.properties"/>          

<hive-client-factory host="${hive.host}" port="${hive.port}"/>  

<hive-template id="hiveTemplate"/>      

</beans:beans>

这是我的jdbc-context.xml

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:hdp="http://www.springframework.org/schema/hadoop"
xmlns:batch="http://www.springframework.org/schema/batch"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
  http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
  http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
  http://www.springframework.org/schema/batch   http://www.springframework.org/schema/batch/spring-batch.xsd">

<bean id="hiveDriver" class="org.apache.hadoop.hive.jdbc.HiveDriver"/>

<bean id="dataSource" class="org.springframework.jdbc.datasource.SimpleDriverDataSource">
  <constructor-arg name="driver" ref="hiveDriver"/>
  <constructor-arg name="url" value="${hive.url}"/>
</bean>

<bean id="jdbcTemplate" class="org.springframework.jdbc.core.JdbcTemplate">
  <constructor-arg ref="dataSource"/>
</bean>

</beans>

这是我的Hive

hive.host=somehost.int
hive.port=10000
hive.url=jdbc:hive2://${hive.host}:${hive.port}/
hive.table=tablename

这是my hive-server.properties hive.exec.drop.ignorenonexistent=true
这是我的pom

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<artifactId>spring-data-book-hadoop-hive</artifactId>

<name>Spring Data Book - Hadoop Hive</name>

<parent>
    <groupId>com.oreilly.springdata</groupId>
    <artifactId>spring-data-book</artifactId>
    <version>1.0.0.BUILD-SNAPSHOT</version>
    <relativePath>../../pom.xml</relativePath>
</parent>

<properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <spring.hadoop.version>1.0.0.RELEASE</spring.hadoop.version>
    <spring.integration.version>2.1.3.RELEASE</spring.integration.version>      
    <hadoop.version>1.0.1</hadoop.version>
    <hive.version>0.8.1</hive.version>
    <thrift.version>0.7.0</thrift.version>
    <log4j.version>1.2.17</log4j.version>
</properties>

<dependencies>

    <dependency>
        <groupId>org.springframework.data</groupId>
        <artifactId>spring-data-hadoop</artifactId>
        <version>${spring.hadoop.version}</version>
        <exclusions>
            <exclusion>
                <groupId>org.springframework</groupId>
                <artifactId>spring-context-support</artifactId>
            </exclusion>
            <exclusion>
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-log4j12</artifactId>
            </exclusion>
        </exclusions>                
    </dependency> 

    <dependency>
        <groupId>org.springframework</groupId>
        <artifactId>spring-jdbc</artifactId>
        <version>${spring.version}</version>
    </dependency>
    <dependency>
        <groupId>org.springframework</groupId>
        <artifactId>spring-test</artifactId>
        <version>${spring.version}</version>
    </dependency>       

    <dependency>
        <groupId>org.springframework</groupId>
        <artifactId>spring-tx</artifactId>
        <version>${spring.version}</version>
    </dependency>

    <dependency>
        <groupId>org.springframework.integration</groupId>
        <artifactId>spring-integration-core</artifactId>
        <version>${spring.integration.version}</version>
    </dependency>       

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-core</artifactId>
        <version>${hadoop.version}</version>
        <scope>compile</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-metastore</artifactId>
        <version>${hive.version}</version>
        <exclusions>
            <exclusion> 
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-log4j12</artifactId>
            </exclusion>
            <exclusion> 
                <groupId>log4j</groupId>
                <artifactId>log4j</artifactId>
            </exclusion>
        </exclusions>            
    </dependency>

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-service</artifactId>
        <version>${hive.version}</version>      
        <exclusions>
            <exclusion>
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-log4j12</artifactId>
            </exclusion>
            <exclusion> 
                <groupId>log4j</groupId>
                <artifactId>log4j</artifactId>
            </exclusion>
        </exclusions>           
    </dependency>   

    <!-- thrift only needed as compile time to support 'native' implementation of HivePasswordRepository -->
    <dependency>
        <groupId>org.apache.thrift</groupId>
        <artifactId>libthrift</artifactId>
        <version>${thrift.version}</version>
    </dependency>           

    <dependency>
        <groupId>org.apache.thrift</groupId>
        <artifactId>libfb303</artifactId>
        <version>${thrift.version}</version>
    </dependency>               

    <!-- runtime Hive deps start -->

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-common</artifactId>
        <version>${hive.version}</version>
        <scope>runtime</scope>
    </dependency>       

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-builtins</artifactId>
        <version>${hive.version}</version>
        <scope>runtime</scope>
    </dependency>           

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-jdbc</artifactId>
        <version>${hive.version}</version>
        <scope>runtime</scope>
    </dependency>           

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-shims</artifactId>
        <version>${hive.version}</version>
        <scope>runtime</scope>
    </dependency>           

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-serde</artifactId>
        <version>${hive.version}</version>
        <scope>runtime</scope>
        <exclusions>
            <exclusion> 
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-log4j12</artifactId>
            </exclusion>
            <exclusion> 
                <groupId>log4j</groupId>
                <artifactId>log4j</artifactId>
            </exclusion>
        </exclusions>               
    </dependency>               

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-contrib</artifactId>
        <version>${hive.version}</version>
        <scope>runtime</scope>
    </dependency>
    <!-- runtime Hive deps end -->      

    <dependency>
        <groupId>org.codehaus.groovy</groupId>
        <artifactId>groovy</artifactId>
        <version>1.8.5</version>
        <scope>runtime</scope>
    </dependency>                   

    <dependency>
        <groupId>ch.qos.logback</groupId>
        <artifactId>logback-classic</artifactId>
        <version>${logback.version}</version>
    </dependency>

</dependencies>

<repositories>
    <repository>
        <id>spring-milestone</id>
        <url>http://repo.springsource.org/libs-milestone</url>
    </repository>
</repositories>

<build>
    <plugins>
        <plugin>
            <groupId>org.codehaus.mojo</groupId>
            <artifactId>appassembler-maven-plugin</artifactId>
            <version>1.2.2</version>
            <configuration>
                <repositoryLayout>flat</repositoryLayout>
                <!-- Extra JVM arguments that will be included in the bin scripts -->
                <extraJvmArguments>-Xms512m -Xmx1024m</extraJvmArguments>
                <programs>
                    <program>
                        <mainClass>com.oreilly.springdata.hadoop.hive.HiveApp</mainClass>
                        <name>hiveApp</name>
                    </program>
                    <program>
                        <mainClass>com.oreilly.springdata.hadoop.hive.HiveAppWithApacheLogs</mainClass>
                        <name>hiveAppWithApacheLogs</name>
                    </program>          
                </programs>
            </configuration>
        </plugin>
        <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-antrun-plugin</artifactId>
            <executions>
                <execution>
                    <id>config</id>
                    <phase>package</phase>
                    <configuration>
                        <tasks>
                            <copy todir="target/appassembler/data">
                                <fileset dir="data"/>
                            </copy>
                        </tasks>
                    </configuration>
                    <goals>
                        <goal>run</goal>
                    </goals>
                </execution>
            </executions>
        </plugin>       
    </plugins>
</build>
1cosmwyk

1cosmwyk1#

hivedriver(org.apache.hadoop.hive.jdbc.hivedriver)有问题,请将其更改为[org.apache.hive.jdbc.hivedriver;]
请参阅下面的示例

package org.springframework.samples.hadoop.hive;

import org.apache.hive.jdbc.HiveDriver;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.SimpleDriverDataSource;

public class TestHive {

      private static String driverName = "org.apache.hive.jdbc.HiveDriver";

    public static void main(String args[]){

         java.sql.Driver driver = new HiveDriver();
        SimpleDriverDataSource ds  = new SimpleDriverDataSource(driver, "jdbc:hive2://$host/$DB:10000");
        JdbcTemplate hiveTemplate = new JdbcTemplate(ds);
        System.out.println(hiveTemplate.queryForList("show tables"));
        System.out.println("Done"); 
    }

}

相关问题