hadoop nullpinter异常

9rygscc1  于 2021-06-02  发布在  Hadoop
关注(0)|答案(1)|浏览(307)

我是hadoop编程的新手,我尝试在map-side-join上编写代码,但是得到了空指针异常。请帮忙,让我知道问题的原因。

package mapreduce.mapSideJoin;
import java.io.IOException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.HashMap;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.io.LongWritable;

public class mapSideJoinUserMapper extends Mapper<LongWritable,Text,Text,Text > {

    private  HashMap<String, String> cacheMap = new HashMap<String , String>();
    private BufferedReader br ;

    protected void setup (Context context)throws IOException, InterruptedException{

        Path[] pathArray = DistributedCache.getLocalCacheFiles(context.getConfiguration());
        try {
        for ( Path eachPath : pathArray){
            //System.out.pritn(eachPath.toString());
            if(eachPath.getName().toString().trim().equals("custData.txt"));
            LoadHashMap(eachPath,context);
        }

        }

        catch(IOException e){

            System.err.println("An IOException was caught!");

        }

    }

    private void LoadHashMap(Path filePath, Context context) throws IOException{

        String strReadLine="";
        br = new BufferedReader(new FileReader(filePath.toString()));
        while((strReadLine=br.readLine())!= null){

            String[] wordArray = strReadLine.split(",");
            cacheMap.put(wordArray[0].trim(),wordArray[1].trim());
        }
        br.close();
    }

    public void map(LongWritable key, Text value , Context context ) throws IOException, InterruptedException{

        String [] word = value.toString().split(",");
        String name =cacheMap.get(word[0]);
         context.write(new Text(name), new Text(word[1]));
        //context.write(new Text("jdewdeded"), new Text("dededededede"));

    }
}

这是我的Map代码和数据文件

0001,Zunil Kumar , Mumbai,India
0002,vikas mandal, Haryana, India
0003,Karan, JFK,France
0004,manish,banglore,India
0005,devesh,meerut,India

0001,crax,2,300
0002,munch,1,10
0003,lays,1,20
0004,ship,1,200
0005,barOne,3,400
0002,crax,2,300
0001,kurkure,3,101
0003,milk,1,20
0004,butter,2,30
0005,meat,1,1220
0002,color,1,230
0003,dailrymilk,1,20


谢谢

qmb5sa22

qmb5sa221#

我尝试实现与你尝试的相同的逻辑并得到了输出。

public class StackOverflowMapsideJoin {
public static class StMapperr extends Mapper<LongWritable,Text,Text,Text>
{
    Text outkey=new Text();
    Text outvalue= new Text();
    HashMap<String,String> hmap=new HashMap<String,String>();
    public void setup(Context context) throws IOException
    {
        Path paths[] =DistributedCache.getLocalCacheFiles(context.getConfiguration());
        for(Path p :paths)
        {
            if(p.getName().equals("person.dat"));
            {
            BufferedReader br=new BufferedReader(new FileReader(p.toString()));
            //0001,Zunil Kumar , Mumbai,India
            String line=br.readLine();
            while(line !=null)
            {
                String cols[]=line.split(",");
                String id=cols[0];
                String name=cols[1];
                hmap.put(id, name);
                line=br.readLine();
            }
            br.close();
            }
        }
    }
    public void map(LongWritable key,Text values,Context context) throws IOException, InterruptedException
    {
        ////0001,crax,2,300
        String mapcols[]=values.toString().split(",");
        String cid=mapcols[0];
        String type=mapcols[1];
        outkey.set(hmap.get(cid));
        outvalue.set(type);
        context.write(outkey, outvalue);
    }
}
@SuppressWarnings("deprecation")
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException, URISyntaxException {

    Configuration conf = new Configuration();
    Job job=new Job(conf,"cdecedc");

    job.setJarByClass(StackOverflowMapsideJoin.class);
    job.setMapperClass(StMapperr.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Text.class);
    job.setNumReduceTasks(0);
    DistributedCache.addCacheFile(new URI("/person.dat"), job.getConfiguration());
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true)?1:0);

}

}
如果有用的话请告诉我!!!

相关问题