您的位置:首页 > 运维架构

在hadoop MapReduce 中写日志消息

2017-01-25 19:05 211 查看
在hadoop集群中,在自带的web界面中,可以显示在代码中写入的一些日志消息,下面进行简单的记录:

程序:

package canma.dmml.MRJobWithLog;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;

import java.io.IOException;

/**
* Created by macan on 2017/1/25.
*/
public class WordCountWithLogging extends Configured implements Tool{

public static class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable>{
public Text data = new Text();
public IntWritable num = new IntWritable(1);

//logger setting
public static Logger logger = Logger.getLogger(WordCountMapper.class);

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] words = value.toString().split(" ");

for (String word : words){
//普通日志,使用info()
logger.info("Mapper key  :  " + key);
if (logger.isDebugEnabled()){
logger.info("Mapper value  " + value);
}
data.set(word);
context.write(data, num);
}

}
}

public static class WordcountReducer extends Reducer<Text, IntWritable, Text, IntWritable>{

public IntWritable num = new IntWritable();

//logger setting
public static Logger logger = Logger.getLogger(WordcountReducer.class);
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : values){
sum += val.get();
}
logger.info("Recuder key  :  " + key);
if (logger.isDebugEnabled()){
logger.info("Reducer value  :  " + sum);
}
num.set(sum);
context.write(key, num);
}
}

@Override
public int run(String[] strings) throws Exception {
Job job = Job.getInstance(getConf());

job.setJarByClass(WordCountWithLogging.class);

job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);

job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);

job.setMapperClass(WordCountMapper.class);
job.setReducerClass(WordcountReducer.class);

job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);

String[] args = new GenericOptionsParser(getConf(), strings).getRemainingArgs();

FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));

return job.waitForCompletion(true) ? 0 : 1;
}

public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
ToolRunner.run(new WordCountWithLogging(), args);
}
}


其中在WordcountMapper 和WordcountReducer这两个类中的Logger对象进行日志的管理,

写日志的方法如下:

logger.info(string)


在集群中运行程序,可以在jobHistory service 中看到我们写的日志消息。
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  hadoop mapreduce