前往小程序,Get更优阅读体验!
立即前往
首页
学习
活动
专区
工具
TVP
发布
社区首页 >专栏 >wordcount.java

wordcount.java

作者头像
闵开慧
发布2018-03-30 13:40:32
6460
发布2018-03-30 13:40:32
举报
文章被收录于专栏:闵开慧闵开慧闵开慧
 package com.biencloud.test;  
 import java.io.IOException;    
 import java.util.StringTokenizer;    
 import org.apache.hadoop.conf.Configuration;    
 import org.apache.hadoop.fs.Path;    
 import org.apache.hadoop.io.IntWritable;    
 import org.apache.hadoop.io.Text;    
 import org.apache.hadoop.mapreduce.Job;    
 import org.apache.hadoop.mapreduce.Mapper;    
 import org.apache.hadoop.mapreduce.Reducer;    
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;    
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;    
 import org.apache.hadoop.util.GenericOptionsParser;    
 
 public class WordCount {    
 
 
     public static class TokenizerMapper extends    
             Mapper<Object, Text, Text, IntWritable> {    
 
         private final static IntWritable one = new IntWritable(1);    
         private Text word = new Text();    
 
         public void map(Object key, Text value, Context context)    
                 throws IOException, InterruptedException {    
             StringTokenizer itr = new StringTokenizer(value.toString());    
             while (itr.hasMoreTokens()) {    
                word.set(itr.nextToken());    
                 context.write(word, one);    
             }    
         }    
     }    
 
     public static class IntSumReducer extends    
             Reducer<Text, IntWritable, Text, IntWritable> {    
        private IntWritable result = new IntWritable();    
 
         public void reduce(Text key, Iterable<IntWritable> values,    
                 Context context) throws IOException, InterruptedException {    
             int sum = 0;    
             for (IntWritable val : values) {    
                 sum += val.get();    
             }    
             result.set(sum);    
             context.write(key, result);    
         }    
     }    
 
     public static void main(String[] args) throws Exception {  
         Configuration conf = new Configuration();   
         conf.set("mapred.job.tracker", "192.168.0.118:9001");  
 //        String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();    
 //        if (otherArgs.length != 2) {    
 //            System.err.println("Usage: wordcount <in> <out>");    
 //           System.exit(2);    
 //       }    
         String[] otherArgs = new String[2];  
         otherArgs[0] = "hdfs://192.168.0.118:9000/user/hadoop/f";  
         otherArgs[1] = "hdfs://192.168.0.118:9000/user/hadoop/f2";  
         conf.set("hadoop.job.ugi", "hadoop,hadoop");  
         Job job = new Job(conf, "word count");    
         job.setNumReduceTasks(4);  
         job.setJarByClass(WordCount.class);    
         job.setMapperClass(TokenizerMapper.class);    
         job.setCombinerClass(IntSumReducer.class);    
         job.setReducerClass(IntSumReducer.class);    
         job.setOutputKeyClass(Text.class);    
         job.setOutputValueClass(IntWritable.class);    
         FileInputFormat.addInputPath(job, new Path(otherArgs[0]));    
         FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));    
         System.exit(job.waitForCompletion(true) ? 0 : 1);    
 
 //        Configuration conf=new Configuration();  
 //        conf.set("hadoop.job.ugi", "hadoop,hadoop");  
 //        FileSystem src=FileSystem.getLocal(conf);  
 //        FileSystem dst= FileSystem.get(URI.create(hadoopFile),conf);  
 //        Path srcpath = new Path(localFile);  
 //        Path dstpath = new Path(hadoopFile);  
 //        FileUtil.copy(src, srcpath, dst, dstpath,false,conf);  
     }    
 }    
 
本文参与 腾讯云自媒体分享计划,分享自作者个人站点/博客。
如有侵权请联系 cloudcommunity@tencent.com 删除

本文分享自 作者个人站点/博客 前往查看

如有侵权,请联系 cloudcommunity@tencent.com 删除。

本文参与 腾讯云自媒体分享计划  ,欢迎热爱写作的你一起参与!

评论
登录后参与评论
0 条评论
热度
最新
推荐阅读
领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档