hadoop wordcount新API例子
Jun 07, 2016 pm 04:32 PM
api
hadoop
例子
准备
准备 准备一些输入文件,可以用hdfs dfs -put xxx/*?/user/fatkun/input上传文件 代码 package com.fatkun;?import java.io.IOException;import java.util.ArrayList;import java.util.List;import java.util.StringTokenizer;?import org.apache.commons.lo
准备
准备一些输入文件,可以用hdfs dfs -put xxx/*?/user/fatkun/input上传文件
代码
package com.fatkun; ? import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; ? import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; ? public class WordCount extends Configured implements Tool { static enum Counters { INPUT_WORDS // 计数器 } ? static Log logger = LogFactory.getLog(WordCount.class); ? public static class CountMapper extends Mapper { private final IntWritable one = new IntWritable(1); private Text word = new Text(); private boolean caseSensitive = true; ? @Override protected void setup(Context context) throws IOException, InterruptedException { // 读取配置 Configuration conf = context.getConfiguration(); caseSensitive = conf.getBoolean("wordcount.case.sensitive", true); super.setup(context); } ? @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { StringTokenizer itr = new StringTokenizer(value.toString()); while (itr.hasMoreTokens()) { if (caseSensitive) { // 是否大小写敏感 word.set(itr.nextToken()); } else { word.set(itr.nextToken().toLowerCase()); } context.write(word, one); context.getCounter(Counters.INPUT_WORDS).increment(1); } } } ? public static class CountReducer extends Reducer { ? @Override protected void reduce(Text text, Iterable values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable value : values) { sum += value.get(); } context.write(text, new IntWritable(sum)); } ? } ? @Override public int run(String[] args) throws Exception { Configuration conf = new Configuration(getConf()); Job job = Job.getInstance(conf, "Example Hadoop WordCount"); job.setJarByClass(WordCount.class); job.setMapperClass(CountMapper.class); job.setCombinerClass(CountReducer.class); job.setReducerClass(CountReducer.class); ? job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); ? List other_args = new ArrayList(); for (int i = 0; i <h2 id="运行">运行</h2> <p>在eclipse导出jar包,执行以下命令</p> <pre class="brush:php;toolbar:false">hadoop jar wordcount.jar com.fatkun.WordCount -Dwordcount.case.sensitive=false /user/fatkun/input /user/fatkun/output
登录后复制
参考
http://cxwangyi.blogspot.com/2009/12/wordcount-tutorial-for-hadoop-0201.html
http://hadoop.apache.org/docs/r1.2.1/mapred_tutorial.html#Example%3A+WordCount+v2.0
原文地址:hadoop wordcount新API例子, 感谢原作者分享。
本站声明
本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系admin@php.cn

热门文章
仓库:如何复兴队友
3 周前
By 尊渡假赌尊渡假赌尊渡假赌
击败分裂小说需要多长时间?
3 周前
By DDD
Hello Kitty Island冒险:如何获得巨型种子
3 周前
By 尊渡假赌尊渡假赌尊渡假赌
R.E.P.O.能量晶体解释及其做什么(黄色晶体)
1 周前
By 尊渡假赌尊渡假赌尊渡假赌
公众号网页更新缓存难题:如何避免版本更新后旧缓存影响用户体验?
3 周前
By 王林

热门文章
仓库:如何复兴队友
3 周前
By 尊渡假赌尊渡假赌尊渡假赌
击败分裂小说需要多长时间?
3 周前
By DDD
Hello Kitty Island冒险:如何获得巨型种子
3 周前
By 尊渡假赌尊渡假赌尊渡假赌
R.E.P.O.能量晶体解释及其做什么(黄色晶体)
1 周前
By 尊渡假赌尊渡假赌尊渡假赌
公众号网页更新缓存难题:如何避免版本更新后旧缓存影响用户体验?
3 周前
By 王林

热门文章标签

记事本++7.3.1
好用且免费的代码编辑器

SublimeText3汉化版
中文版,非常好用

禅工作室 13.0.1
功能强大的PHP集成开发环境

Dreamweaver CS6
视觉化网页开发工具

SublimeText3 Mac版
神级代码编辑软件(SublimeText3)