java.lang.noclassdeffounderror-org/apache/spark/sql/hive/hivecontext

h6my8fg2  于 2021-06-27  发布在  Hive
关注(0)|答案(1)|浏览(356)

我正在尝试使用配置单元上下文,但出现以下错误:

exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/hive/HiveContext

有什么建议吗?

package com.mapr.examples;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;

import org.apache.spark.sql.Row;
import org.apache.spark.sql.hive.HiveContext;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.*;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext;
import static org.apache.spark.sql.functions.*;

import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;

public class SparkSQLHiveContextExample {
    public static void main(String[] args) throws Exception {
        SparkConf sparkConf = new SparkConf().setAppName("SparkSQLHiveContextExample");
        JavaSparkContext ctx = new JavaSparkContext(sparkConf);

        // SQLContext sqlContext = new HiveContext(ctx); // exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/hive/HiveContext
        // HiveContext hiveContext = new HiveContext(ctx.sc()); // exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/hive/HiveContext
        // HiveContext hiveContext = new org.apache.spark.sql.hive.HiveContext(ctx.sc()); // exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/hive/HiveContext

        ctx.stop();
        ctx.close();

    }
}

pom.xml文件

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.mapr.sample</groupId>
    <artifactId>SparkSQLExample</artifactId>
    <version>1.0-SNAPSHOT</version>

<dependencies>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-sql_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-hive_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-catalyst_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>
    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-jdbc</artifactId>
        <version>1.2.2</version>
    </dependency>
    <dependency>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-compiler-plugin</artifactId>
        <version>2.3.2</version>
    </dependency>
</dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>2.3.2</version>
                <configuration>
                    <source>1.7</source>
                    <target>1.7</target>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>
nnsrf1az

nnsrf1az1#

你能在添加这个依赖项之后再试一次吗?
应该有用。
maven链接

相关问题