我在用jni运行hadoop程序时遇到问题,你能帮我吗?谢谢您。
我试着运行一个java程序,在hadoop2.4.0中使用jni调用opengl程序。
这是我的mapreduce程序
1.fakesegmentforjni.java文件
public class FakeSegmentForJni
{
public static native boolean Init (String file);
public static native boolean write ();
public static native String SegmentALine (String line);
public static native int runproject ();
static
{
System.load("/usr/lib/i386-linux-gnu/libglut.so.3");
System.load("/usr/lib/i386-linux-gnu/mesa/libGL.so.1");
System.load("/usr/lib/i386-linux-gnu/libGLU.so.1");
System.load("/usr/lib/i386-linux-gnu/libstdc++.so.6");
System.load("/lib/i386-linux-gnu/libgcc_s.so.1");
System.load("/lib/i386-linux-gnu/libc.so.6");
System.load("/usr/lib/i386-linux-gnu/libX11.so.6");
System.load("/lib/i386-linux-gnu/libm.so.6");
System.load("/usr/lib/i386-linux-gnu/libXi.so.6");
System.load("/usr/lib/i386-linux-gnu/libXxf86vm.so.1");
System.load("/lib/i386-linux-gnu/libexpat.so.1");
System.load("/usr/lib/i386-linux-gnu/libglapi.so.0");
System.load("/usr/lib/i386-linux-gnu/libXext.so.6");
System.load("/usr/lib/i386-linux-gnu/libXdamage.so.1");
System.load("/usr/lib/i386-linux-gnu/libXfixes.so.3");
System.load("/usr/lib/i386-linux-gnu/libX11-xcb.so.1");
System.load("/usr/lib/i386-linux-gnu/libxcb-glx.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-dri2.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-dri3.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-present.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-sync.so.1");
System.load("/usr/lib/i386-linux-gnu/libxcb.so.1");
System.load("/usr/lib/i386-linux-gnu/libxshmfence.so.1");
System.load("/usr/lib/i386-linux-gnu/libdrm.so.2");
System.load("/lib/i386-linux-gnu/libpthread.so.0");
System.load("/lib/i386-linux-gnu/libdl.so.2");
System.load("/lib/ld-linux.so.2");
System.load("/usr/lib/i386-linux-gnu/libXau.so.6");
System.load("/usr/lib/i386-linux-gnu/libXdmcp.so.6");
System.load("/home/grid/MR/facksegement/easyopengl/libFakeSegmentForJni.so");
}
}
2.testfakesegmentforjni.java文件
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import com.sun.org.apache.xml.internal.utils.URI;
public class TestFakeSegmentForJni {
public static class MapTestJni extends Mapper<Writable, Text, Text, Text> {
protected String s;
protected void setup(Context context) throws IOException, InterruptedException
{
System.load("/usr/lib/i386-linux-gnu/libglut.so.3");
System.load("/usr/lib/i386-linux-gnu/mesa/libGL.so.1");
System.load("/usr/lib/i386-linux-gnu/libGLU.so.1");
System.load("/usr/lib/i386-linux-gnu/libstdc++.so.6");
System.load("/lib/i386-linux-gnu/libgcc_s.so.1");
System.load("/lib/i386-linux-gnu/libc.so.6");
System.load("/usr/lib/i386-linux-gnu/libX11.so.6");
System.load("/lib/i386-linux-gnu/libm.so.6");
System.load("/usr/lib/i386-linux-gnu/libXi.so.6");
System.load("/usr/lib/i386-linux-gnu/libXxf86vm.so.1");
System.load("/lib/i386-linux-gnu/libexpat.so.1");
System.load("/usr/lib/i386-linux-gnu/libglapi.so.0");
System.load("/usr/lib/i386-linux-gnu/libXext.so.6");
System.load("/usr/lib/i386-linux-gnu/libXdamage.so.1");
System.load("/usr/lib/i386-linux-gnu/libXfixes.so.3");
System.load("/usr/lib/i386-linux-gnu/libX11-xcb.so.1");
System.load("/usr/lib/i386-linux-gnu/libxcb-glx.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-dri2.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-dri3.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-present.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-sync.so.1");
System.load("/usr/lib/i386-linux-gnu/libxcb.so.1");
System.load("/usr/lib/i386-linux-gnu/libxshmfence.so.1");
System.load("/usr/lib/i386-linux-gnu/libdrm.so.2");
System.load("/lib/i386-linux-gnu/libpthread.so.0");
System.load("/lib/i386-linux-gnu/libdl.so.2");
System.load("/lib/ld-linux.so.2");
System.load("/usr/lib/i386-linux-gnu/libXau.so.6");
System.load("/usr/lib/i386-linux-gnu/libXdmcp.so.6");
System.load("/home/grid/MR/facksegement/easyopengl/libFakeSegmentForJni.so");
FakeSegmentForJni.runproject();
FakeSegmentForJni.Init("Lex.txt");
s = FakeSegmentForJni.SegmentALine("jni-value");
}
protected void map(Writable key, Text value, Context context)
throws IOException, InterruptedException {
// the format of input value is:
// mcid totaltimes item1 item2(itemkey=itemvalue)
context.write(new Text("key"), new Text(s.toString()));
}
}
public static class ReduceTestJni extends Reducer<Text, Text, Text, Text> {
protected void reduce(Text key, Iterable<Text> values, Context context)
throws IOException, InterruptedException {
String outString = "";
for (Text value: values)
{
outString = value.toString();
}
//FakeSegmentForJni.runproject();
context.write(key, new Text(outString));
}
}
@SuppressWarnings("deprecation")
public void runTestJni (String[] args) throws Exception {
// the configuration
Configuration conf = new Configuration();
GenericOptionsParser goparser = new GenericOptionsParser(conf, args);
String otherargs [] = goparser.getRemainingArgs();
// adding the current dir to java library path.
conf.set("mapred.child.java.opts", "-Djava.library.path=/usr/lib/i386-linux-gnu:/lib/i386-linux-gnu:/usr/lib/i386-linux-gnu/mesa:/home/grid/hadoop/lib/native:/home/grid/jdk1.7.0_21/jre/lib/i386:/home/grid/jdk1.7.0_21/jre/lib/i386/client:/home/grid/jdk1.7.0_21/jre/lib/i386/server:./");
//DistributedCache.addCacheArchive(new URI("hdfs://192.168.1.231:9000/lib/lib.jar#./"),conf);
// the job
Job job;
job = new Job(conf, "@here-TestFakeSegmentForJni-hadoopJni");
job.setJarByClass(TestFakeSegmentForJni.class);
// the mapper
job.setMapperClass(MapTestJni.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
// the reducer
job.setReducerClass(ReduceTestJni.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.setNumReduceTasks(1);
// the path
FileInputFormat.addInputPath(job, new Path(otherargs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherargs[1]));
job.waitForCompletion(true);
}
public static void main(String[] args) throws Exception {
System.out.println ("In this project, we test jni!\n");
System.load("/usr/lib/i386-linux-gnu/libglut.so.3");
System.load("/usr/lib/i386-linux-gnu/mesa/libGL.so.1");
System.load("/usr/lib/i386-linux-gnu/libGLU.so.1");
System.load("/usr/lib/i386-linux-gnu/libstdc++.so.6");
System.load("/lib/i386-linux-gnu/libgcc_s.so.1");
System.load("/lib/i386-linux-gnu/libc.so.6");
System.load("/usr/lib/i386-linux-gnu/libX11.so.6");
System.load("/lib/i386-linux-gnu/libm.so.6");
System.load("/usr/lib/i386-linux-gnu/libXi.so.6");
System.load("/usr/lib/i386-linux-gnu/libXxf86vm.so.1");
System.load("/lib/i386-linux-gnu/libexpat.so.1");
System.load("/usr/lib/i386-linux-gnu/libglapi.so.0");
System.load("/usr/lib/i386-linux-gnu/libXext.so.6");
System.load("/usr/lib/i386-linux-gnu/libXdamage.so.1");
System.load("/usr/lib/i386-linux-gnu/libXfixes.so.3");
System.load("/usr/lib/i386-linux-gnu/libX11-xcb.so.1");
System.load("/usr/lib/i386-linux-gnu/libxcb-glx.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-dri2.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-dri3.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-present.so.0");
System.load("/usr/lib/i386-linux-gnu/libxcb-sync.so.1");
System.load("/usr/lib/i386-linux-gnu/libxcb.so.1");
System.load("/usr/lib/i386-linux-gnu/libxshmfence.so.1");
System.load("/usr/lib/i386-linux-gnu/libdrm.so.2");
System.load("/lib/i386-linux-gnu/libpthread.so.0");
System.load("/lib/i386-linux-gnu/libdl.so.2");
System.load("/lib/ld-linux.so.2");
System.load("/usr/lib/i386-linux-gnu/libXau.so.6");
System.load("/usr/lib/i386-linux-gnu/libXdmcp.so.6");
System.load("/home/grid/MR/facksegement/easyopengl/libFakeSegmentForJni.so");
// test jni on linux local
/*String s = FakeSegmentForJni.SegmentALine("now we test FakeSegmentForJni");
System.out.print(s);*/
System.out.println(System.getProperty("java.library.path"));
// test jni on hadoop
new TestFakeSegmentForJni().runTestJni(args);
} // main
}
这是我的c++opengl代码
fakesegmentforjni.cpp
# include <jni.h>
# include <stdio.h>
# include <string.h>
# include <string>
# include <vector>
# include <fstream>
# include <iostream>
# include <cmath>
# include <cctype>
# include <algorithm>
# include <GL/glut.h>
# include <GL/freeglut.h>
# include "FakeSegmentForJni.h"
using namespace std;
vector <string> WordVec;
string sWordt;
vector <string> WordVect;
void init();
void display();
int runpro();
bool write(string,const char*);
void init()
{
glClearColor(0.0, 0.0, 0.0, 0.0);
glMatrixMode(GL_PROJECTION);
glOrtho(-5, 5, -5, 5, 5, 15);
glMatrixMode(GL_MODELVIEW);
gluLookAt(0, 0, 10, 0, 0, 0, 0, 1, 0);
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(1.0, 0, 0);
glutWireTeapot(3);
glFlush();
}
bool write(string ss,const char *ff)
{
string hh=" open f1 fail";
string vv=" write f3 fail";
ofstream outfile(ff);
if(!outfile)
{
WordVect.push_back(vv);
printf("write f3 fail");
return false;
}
else
{
outfile<<ss<<endl;
}
outfile.close();
return true;
}
JNIEXPORT jint JNICALL Java_FakeSegmentForJni_runproject(JNIEnv *env, jclass obj)
{
write("runpro()","/home/grid/f3/runpro().dat");
int c=2;
int angleX=0;
char* v[]={"hello","world"};
glutInit(&c, v);
write(" glutInit;","/home/grid/f3/glutInit.dat");
glutInitDisplayMode(GLUT_RGB | GLUT_SINGLE);
write(" glutInit;","/home/grid/f3/glutInitDisplayMode.dat");
glutInitWindowPosition(0, 0);
write(" glutInit;","/home/grid/f3/glutInitWindowPosition.dat");
glutInitWindowSize(300, 300);
write(" glutInit;","/home/grid/f3/glutInitWindowSize.dat");
glutCreateWindow("OpenGL 3D View");
write(" glutInit;","/home/grid/f3/glutCreateWindow.dat");
init();
glutDisplayFunc(display);
write(" glutInit;","/home/grid/f3/glutDisplayFunc.dat");
while(true)
{
glutMainLoopEvent();
write(" glutInit;","/home/grid/f3/glutMainLoopEvent.dat");
angleX=angleX+1;
glutPostRedisplay();
write(" glutInit;","/home/grid/f3/glutPostRedisplay.dat");
}
return 0;
}
JNIEXPORT jboolean JNICALL Java_FakeSegmentForJni_Init(JNIEnv *env, jclass obj, jstring line)
{
write(" open f3 success","/home/grid/f3/fs.dat");
const char *pFileName = NULL;
pFileName = env->GetStringUTFChars (line, false);
if (pFileName == NULL)
return false;
ifstream in (pFileName);
if (!in)
{
cerr << "Can not open the file of " << pFileName << endl;
return false;
}
string sWord;
while (getline (in, sWord))
{
WordVec.push_back(sWord);
}
return true;
}
JNIEXPORT jstring JNICALL Java_FakeSegmentForJni_SegmentALine(JNIEnv *env, jclass obj, jstring line)
{
char buf[128];
buf[0] = 0;
const char *str = NULL;
str = env->GetStringUTFChars(line, false);
if (str == NULL)
return NULL;
strcpy (buf, str);
if (!WordVec.empty())
strcat (buf, WordVec.at(0).c_str());
env->ReleaseStringUTFChars(line, str);
return env->NewStringUTF(buf);
}
我将opengl程序编译到一个共享库(libfakesegmentforjni.so),并将hadoop程序打包到testfakesegmentforjnihadoop.jar中。libfakesegmentforjni.so依赖于其他共享库。我将这些共享库上传到hdfs中。我使用以下命令运行hadoop问题。
bin/hadoop jar /home/grid/MR/facksegement/easyopengl/TestFakeSegmentForJniHadoop.jar \
-files /home/grid/MR/facksegement/easyopengl/libFakeSegmentForJni.so,\
hdfs://slave1:9000/input/Lex.txt,\
hdfs://slave1:9000/lib/libjvm.so,\
hdfs://slave1:9000/lib/ld-2.19.so,\
hdfs://slave1:9000/lib/libglut.so.3.9.0,\
hdfs://slave1:9000/lib/libxcb-present.so.0.0.0,\
hdfs://slave1:9000/lib/ld-linux.so.2,\
hdfs://slave1:9000/lib/libm-2.19.so,\
hdfs://slave1:9000/lib/libxcb.so.1,\
hdfs://slave1:9000/lib/lib,\
hdfs://slave1:9000/lib/libm.so.6,\
hdfs://slave1:9000/lib/libxcb.so.1.1.0,\
hdfs://slave1:9000/lib/libc-2.19.so,\
hdfs://slave1:9000/lib/libpthread-2.19.so,\
hdfs://slave1:9000/lib/libxcb-sync.so.1,\
hdfs://slave1:9000/lib/libc.so.6,\
hdfs://slave1:9000/lib/libpthread.so.0,\
hdfs://slave1:9000/lib/libxcb-sync.so.1.0.0,\
hdfs://slave1:9000/lib/libdl-2.19.so,\
hdfs://slave1:9000/lib/libstdc++.so.6,\
hdfs://slave1:9000/lib/libXdamage.so.1,\
hdfs://slave1:9000/lib/libdl.so.2,\
hdfs://slave1:9000/lib/libstdc++.so.6.0.19,\
hdfs://slave1:9000/lib/libXdamage.so.1.1.0,\
hdfs://slave1:9000/lib/libdrm.so.2,\
hdfs://slave1:9000/lib/libX11.so.6,\
hdfs://slave1:9000/lib/libXdmcp.so.6,\
hdfs://slave1:9000/lib/libdrm.so.2.4.0,\
hdfs://slave1:9000/lib/libX11.so.6.3.0,\
hdfs://slave1:9000/lib/libXdmcp.so.6.0.0,\
hdfs://slave1:9000/lib/libexpat.so.1,\
hdfs://slave1:9000/lib/libX11-xcb.so.1,\
hdfs://slave1:9000/lib/libXext.so.6,\
hdfs://slave1:9000/lib/libexpat.so.1.6.0,\
hdfs://slave1:9000/lib/libX11-xcb.so.1.0.0,\
hdfs://slave1:9000/lib/libXext.so.6.4.0,\
hdfs://slave1:9000/lib/libXau.so.6,\
hdfs://slave1:9000/lib/libXfixes.so.3,\
hdfs://slave1:9000/lib/libgcc_s.so.1,\
hdfs://slave1:9000/lib/libXau.so.6.0.0,\
hdfs://slave1:9000/lib/libXfixes.so.3.1.0,\
hdfs://slave1:9000/lib/libglapi.so.0,\
hdfs://slave1:9000/lib/libxcb-dri2.so.0,\
hdfs://slave1:9000/lib/libXi.so.6,\
hdfs://slave1:9000/lib/libglapi.so.0.0.0,\
hdfs://slave1:9000/lib/libxcb-dri2.so.0.0.0,\
hdfs://slave1:9000/lib/libXi.so.6.1.0,\
hdfs://slave1:9000/lib/libGL.so.1,\
hdfs://slave1:9000/lib/libxcb-dri3.so.0,\
hdfs://slave1:9000/lib/libxshmfence.so.1,\
hdfs://slave1:9000/lib/libGL.so.1.2.0,\
hdfs://slave1:9000/lib/libxcb-dri3.so.0.0.0,\
hdfs://slave1:9000/lib/libxshmfence.so.1.0.0,\
hdfs://slave1:9000/lib/libGLU.so.1,\
hdfs://slave1:9000/lib/libxcb-glx.so.0,\
hdfs://slave1:9000/lib/libXxf86vm.so.1,\
hdfs://slave1:9000/lib/libGLU.so.1.3.1,\
hdfs://slave1:9000/lib/libxcb-glx.so.0.0.0,\
hdfs://slave1:9000/lib/libXxf86vm.so.1.0.0,\
hdfs://slave1:9000/lib/libglut.so.3,\
hdfs://slave1:9000/lib/libxcb-present.so.0,\
hdfs://slave1:9000/input/f1.dat \
/input \
/output/outputJNI
但我有以下错误:
15/05/25 10:06:55 INFO mapreduce.Job: Task Id : attempt_1432519520706_0002_m_000004_2, Status : FAILED
Exception from container-launch:
org.apache.hadoop.util.Shell$ExitCodeException:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:464)
at org.apache.hadoop.util.Shell.run(Shell.java:379)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:589)
at org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor.launchContainer(DefaultContainerExecutor.java:195)
at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:283)
at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:79)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
at java.util.concurrent.FutureTask.run(FutureTask.java:166)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:722)
我猜这个错误是由jni引起的。我编写了一个小的测试java代码来调用liboglgpu.so,通过jni,它可以正常工作。我还ldd liboglgpu.so,每个库都是链接的
我的opengl程序包括标准c代码和opengl代码。标准的c代码可以正常工作,但是当hadoop运行opengl代码时,程序就不能工作了。
如果我用eclipse(独立模式)运行hadoop程序,该程序可以在本机上运行。但是,如果在分布式模式下运行hadoop程序,该程序将无法在datanode上运行。namenode和datanode都有opengl环境。
我试过很多不同的方法:
1) liboglgpu.so依赖于其他共享库。我尝试将这些库发送到hadoop分布式文件系统,并使用“-files”运行hadoop程序来分发这些库。
2) 我尝试从代码级别加载这些库(例如使用system.load(“usr/lib/libgl.so.0.1”);)。
3) 我在hadoop env.sh、yarn-env.sh中设置了java\u库\u路径;我在/etc/profile中设置了ld\u library\u路径。路径就是这些库所在的位置。
4) 我将这些库放在路径/hadoop/lib/native中
你能告诉我我还能做什么吗?为什么hadoop程序不能运行?
暂无答案!
目前还没有任何答案,快来回答吧!