向gradle添加mahout依赖项导致的错误

ar7v8xwq  于 2021-05-27  发布在  Hadoop
关注(0)|答案(2)|浏览(396)

我正在尝试使用gradle运行hadoop作业来构建我的项目,并且在添加mahout依赖项时

apply plugin: 'java'

repositories {
    mavenCentral()
}

dependencies {
    compile group: 'org.apache.hadoop', name: 'hadoop-mapreduce-client-core', version: '3.1.2'

    compile group: 'org.apache.hadoop', name: 'hadoop-common', version: '3.1.2'

    compile group: 'org.apache.mahout', name: 'mahout-hdfs', version: '0.13.0'
    compile group: 'org.apache.mahout', name: 'mahout-mr', version: '0.13.0'
    compile group: 'org.apache.mahout', name: 'mahout-math', version: '0.13.0'
}

jar {
    from configurations.compile.collect { it.isDirectory() ? it : zipTree(it) }
}

ext.hadoopVersion = "3.1.2"

在生成文件中,出现以下错误:

Exception in thread "main" java.lang.IllegalAccessError: class org.apache.hadoop.hdfs.web.HftpFileSystem cannot access its superinterface org.apache.hadoop.hdfs.web.TokenAspect$TokenManagementDelegator
    at java.lang.ClassLoader.defineClass1(Native Method)
    at java.lang.ClassLoader.defineClass(ClassLoader.java:760)
    at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
    at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
    at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    at java.lang.Class.forName0(Native Method)
    at java.lang.Class.forName(Class.java:348)
    at java.util.ServiceLoader$LazyIterator.nextService(ServiceLoader.java:370)
    at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:404)
    at java.util.ServiceLoader$1.next(ServiceLoader.java:480)
    at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:3217)
    at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3262)
    at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3301)
    at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
    at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3352)
    at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3320)
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:479)
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:227)
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:463)
    at org.apache.hadoop.fs.Path.getFileSystem(Path.java:361)
    at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.addInputPath(FileInputFormat.java:542)
    at ConvertText.ConvertTextJob.main(ConvertTextJob.java:25)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at org.apache.hadoop.util.RunJar.run(RunJar.java:318)
    at org.apache.hadoop.util.RunJar.main(RunJar.java:232)

但如果没有这种依赖性,这项工作就做得很好。
下面是我使用库的代码:

package ConvertText;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.mahout.math.VectorWritable;

public class ConvertTextJob {
  public static void main(String[] args){
    try {
      //Setup for the first job
      Configuration conf = new Configuration();

      //Setup for jar of class
      Job job = Job.getInstance(conf, "Convert Text");
      job.setJarByClass(ConvertTextJob.class);

      // path to input/output in HDFS
      FileInputFormat.addInputPath(job, new Path(args[0]));
      FileOutputFormat.setOutputPath(job, new Path(args[1]));

      //Set Mapper class
      job.setMapperClass(ConvertTextMapper.class);

      // Outputs from the Mapper
      job.setOutputKeyClass(NullWritable.class);
      job.setOutputValueClass(VectorWritable.class);

      //Set format of the key/value format
      job.setOutputFormatClass(SequenceFileOutputFormat.class);

      job.setNumReduceTasks(0);

      // Block until the job is completed.
      System.exit(job.waitForCompletion(true) ? 0 : 1);

    } catch (IOException | InterruptedException | ClassNotFoundException e) {
      System.err.println(e.getMessage());
    }
  }

}

是否有人知道问题是什么,以及我如何修复它以便使用依赖关系?我正在做一个项目,涉及mahout和需要这种依赖关系。

tjjdgumg

tjjdgumg1#

您需要查找并删除/排除 hadoop-hdfs-2.x.x.jar 从类路径。它与mahout推出的新版本hdfs发生了冲突。

cuxqih21

cuxqih212#

这个 hdfs 模块是从mahout核心中分解出来的 mahout-0.13.0 .
https://github.com/apache/mahout/tree/mahout-0.13.0

~ > mahout > mahout-0.13.0 > 7? > $ > ls 
  100-interpreter-spec.yaml       LICENSE.txt           NOTICE.txt
  README.md                       bin/                  buildtools/
  community/                      conf/                 core/
  distribution/                   doap_Mahout.rdf       docs/
  dry_run.sh                      engine/               examples/
  experimental/                   flink/                h2o/
  hdfs/                           integration/          issuse
  lib/                            mahout.iml            math/
  math-scala/                     mr/                   pom.xml
  resource-managers/              runtests.sh           scratch
  spark/                          src/                  target/
  viennacl/                       viennacl-omp/

你得加上 mahout-hdfs 为0.13.0构建的arifact。

// https://mvnrepository.com/artifact/org.apache.mahout/mahout-hdfs
compile group: 'org.apache.mahout', name: 'mahout-hdfs', version: '0.13.0'

https://mvnrepository.com/artifact/org.apache.mahout/mahout-hdfs/0.13.0

相关问题