您好,欢迎访问三七文档
当前位置:首页 > 行业资料 > 酒店餐饮 > Hadoop倒排索引例子
packagecn.yws;importjava.io.IOException;importjava.util.StringTokenizer;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.fs.Path;importorg.apache.hadoop.io.Text;importorg.apache.hadoop.mapreduce.Job;importorg.apache.hadoop.mapreduce.Mapper;importorg.apache.hadoop.mapreduce.Reducer;importorg.apache.hadoop.mapreduce.lib.input.FileInputFormat;importorg.apache.hadoop.mapreduce.lib.input.FileSplit;importorg.apache.hadoop.mapreduce.lib.output.FileOutputFormat;importorg.apache.hadoop.util.GenericOptionsParser;//倒排索引请在hadoopindex_in目录下放置file1,file2测试文件publicclassMyInvertedIndex{publicstaticclassMapextendsMapperObject,Text,Text,Text{privateTextkeyinfo=newText();privateTextvalueinfo=newText();privateFileSplitsplit;//映射@Overrideprotectedvoidmap(Objectkey,Textvalue,Contextcontext)throwsIOException,InterruptedException{//super.map(key,value,context);//获取文件分词split=(FileSplit)context.getInputSplit();StringTokenizertokenizer=newStringTokenizer(value.toString());while(tokenizer.hasMoreTokens()){intsplitindex=split.getPath().toString().indexOf(file);keyinfo.set(tokenizer.nextToken()+:+split.getPath().toString().substring(splitindex));valueinfo.set(1);//file3:1;context.write(keyinfo,valueinfo);}}}publicstaticclassCombineextendsReducerText,Text,Text,Text{privateTextinfoText=newText();@Overrideprotectedvoidreduce(Textkey,IterableTextvalues,Contextcontext)throwsIOException,InterruptedException{//super.reduce(key,values,context);intsum=0;for(Textvalue:values){sum+=Integer.parseInt(value.toString());}intsplitindex=key.toString().indexOf(:);//file2:1;file3:2;file1:1infoText.set(key.toString().substring(splitindex+1)+:+sum);key.set(key.toString().substring(0,splitindex));context.write(key,infoText);}}publicstaticclassReduceextendsReducerText,Text,Text,Text{privateTextresult=newText();@Overrideprotectedvoidreduce(Textkey,IterableTextvalues,Contextcontext)throwsIOException,InterruptedException{//super.reduce(key,values,context);//生成文档列表Stringfilelist=newString();for(Textvalue:values){filelist+=value.toString()+;;}result.set(filelist);context.write(key,result);}}publicstaticvoidmain(String[]args){try{Configurationconfiguration=newConfiguration();//这句话很关键configuration.set(mapred.job.tracker,192.168.1.15:9001);String[]ioargs=newString[]{index_in,index_out3};if(args.length==2){ioargs=args;}String[]otherArgs=newGenericOptionsParser(configuration,ioargs).getRemainingArgs();if(otherArgs.length!=2){System.err.println(Usage:inverted+MyInvertedIndex.class.getSimpleName()+inout);System.exit(2);}//启动计算任务Jobjob=newJob(configuration,MyInvertedIndex.class.getSimpleName());job.setJarByClass(MyInvertedIndex.class);//映射job.setMapperClass(Map.class);//合成job.setCombinerClass(Combine.class);//规约job.setReducerClass(Reduce.class);//设置映射Map输出类型job.setMapOutputKeyClass(Text.class);job.setMapOutputValueClass(Text.class);//设置reduce规约输出类型job.setOutputKeyClass(Text.class);job.setOutputValueClass(Text.class);//设置输入和输出目录FileInputFormat.addInputPath(job,newPath(otherArgs[0]));FileOutputFormat.setOutputPath(job,newPath(otherArgs[1]));System.exit(job.waitForCompletion(true)?0:1);}catch(Exceptione){e.printStackTrace();}}}
本文标题:Hadoop倒排索引例子
链接地址:https://www.777doc.com/doc-2875729 .html