您的位置:首页 > 产品设计 > UI/UE

hadoop mapreduce模式读取sequence文件

2015-06-03 10:08 393 查看
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.io.SequenceFile;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.mapreduce.Mapper;

import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;

import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import org.apache.hadoop.util.ReflectionUtils;

import org.apache.mahout.fpm.pfpgrowth.convertors.string.TopKStringPatterns;

public class ReadSeqFile {

String uri = "/home/hadoop/fpdir/output/part-r-00000";

private static SequenceFile.Reader reader = null;

private static Configuration conf = new Configuration();

public static class ReadFileMapper extends

Mapper<LongWritable, Text, Text, Text> {

@Override

public void map(LongWritable key, Text value,Context context) {

Text text = (Text) ReflectionUtils.newInstance(reader.getKeyClass(), conf);

TopKStringPatterns k = (TopKStringPatterns)ReflectionUtils.newInstance(reader.getValueClass(), conf);

try {

while (reader.next(text, k)) {

//System.out.printf("%s\t%s\n", text, k);

context.write(text, new Text(k.toString()));

//注意这里使用的reader读取的,而不是map方法中的key value

}

} catch (IOException e1) {

e1.printStackTrace();

} catch (InterruptedException e) {

e.printStackTrace();

}

}

}

/**

* @param args

* @throws IOException

* @throws InterruptedException

* @throws ClassNotFoundException

*/

public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

Job job = new Job(conf,"read seq file");

job.setJarByClass(ReadSeqFile.class);

job.setMapperClass(ReadFileMapper.class);

job.setMapOutputKeyClass(Text.class);

job.setMapOutputValueClass(Text.class);

//配置输出格式

Path path = new Path("/home/hadoop/fpdir/output/part-r-00000");

FileSystem fs = FileSystem.get(conf);

reader = new SequenceFile.Reader(fs, path, conf);

FileInputFormat.addInputPath(job, path);

FileOutputFormat.setOutputPath(job, new Path("/home/hadoop/fpdir/testReadSeq"));

System.exit(job.waitForCompletion(true)?0:1);

}

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: