前往小程序,Get更优阅读体验!
立即前往
首页
学习
活动
专区
工具
TVP
发布
社区首页 >专栏 >MapReduce单表关联学习~

MapReduce单表关联学习~

作者头像
用户3003813
发布2018-09-06 14:01:52
6600
发布2018-09-06 14:01:52
举报
文章被收录于专栏:个人分享

 首先考虑表的自连接,其次是列的设置,最后是结果的整理.

文件内容:

代码语言:javascript
复制
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;


import java.io.IOException;
import java.util.Iterator;
import java.util.Objects;


public class STjoin extends Configured implements Tool {
    public static int time = 0;
  //map将输入分割成child和parent,然后正序输出一次作为右表,反序输出一次作为左表
  //需要注意的是在输出的value中必须加上左右表区别标志

    public static class Map extends Mapper<Object,Text,Text,Text>{

        public void map(Object key,Text value,Context context) throws IOException,
                InterruptedException{
            String childname = new String();
            String parentname = new String();
            String relationtype = new String();
            String line = value.toString();
            int i = 0;
       //文件以空格分隔
            while(line.charAt(i) != ' '){
                i++;
            }
       //拆分child 和 parent
            String[] values = {line.substring(0,i),line.substring(i+1)};
            if(values[0].compareTo("child") != 0){
                childname = values[0];
                parentname = values[1];
         //左右表区分标志
                relationtype = "1"; 
                context.write(new Text(values[1]),new Text(relationtype + "+" + childname + "+" + parentname));

                relationtype = "2";
                context.write(new Text(values[0]),new Text(relationtype + "+" + childname + "+" + parentname));
            }
        }
    }

    public static class Reduce extends Reducer<Text,Text,Text,Text>{

        public void reduce(Text key,Iterable<Text> values,Context context) throws IOException,InterruptedException{
        //输出表头
            if(time == 0){
                context.write(new Text("grandchild"),new Text("grandparent"));
                time++;
            }
            int grandchildnum = 0;
            String grandchild[] = new String[10];
            int grandparentnum = 0;
            String grandparent[] = new String[10];
            Iterator ite = values.iterator();

            while(ite.hasNext()){
                String record = ite.next().toString();
                int len = record.length();
                int i = 2;
                if(len == 0){
                    continue;
                }
                char relationtype = record.charAt(0);
                String childname = new String();
                String parentname = new String();

                while(record.charAt(i) != '+'){
                    childname = childname + record.charAt(i);
                    i++;
                }
                i = i+1;

                while(i<len){
                    parentname = parentname + record.charAt(i);
                    i++;
                }

                if(relationtype == '1') {
                    grandchild[grandchildnum] = childname;
                    ;
                    grandchildnum++;
                }else{
                    grandparent[grandparentnum] = parentname;
                    grandparentnum++;
                }

            }

            if(grandparentnum != 0 && grandchildnum != 0){
                    for(int m = 0;m<grandchildnum;m++){
                        for(int n = 0;n<grandparentnum;n++){
                            System.out.println(grandchild[m] + "  " + grandparent[n]);
                            context.write(new Text(grandchild[m]),new Text(grandparent[n]));
                        }
                    }
            }

        }
    }

    public  int run(String[] args) throws Exception{
        Configuration aaa = new Configuration();
        Job job = Job.getInstance(aaa);
        String InputPaths = "/usr/local/idea-IC-139.1117.1/Hadoop/out/datainput/child-parent.txt";
        String OutputPath = "/usr/local/idea-IC-139.1117.1/Hadoop/out/dataout/";

        job.setJarByClass(Sort.class);
        job.setJobName("Sort");

        job.setMapperClass(Map.class);
        job.setReducerClass(Reduce.class);
        FileInputFormat.setInputPaths(job, new Path(InputPaths));
        FileOutputFormat.setOutputPath(job, new Path(OutputPath));

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(Text.class);

        job.setInputFormatClass(TextInputFormat.class);
        job.setOutputFormatClass(org.apache.hadoop.mapreduce.lib.output.TextOutputFormat.class);
        boolean success = job.waitForCompletion(true);
        return success ? 0 : 1;


    }


    public static void main(String[] args) throws Exception{
            int ret = ToolRunner.run(new STjoin(), args);
            System.exit(ret);
    }
}

输出结果:

参考:《Hadoop实战》

本文参与 腾讯云自媒体同步曝光计划,分享自作者个人站点/博客。
原始发表:2016-05-12 ,如有侵权请联系 cloudcommunity@tencent.com 删除

本文分享自 作者个人站点/博客 前往查看

如有侵权,请联系 cloudcommunity@tencent.com 删除。

本文参与 腾讯云自媒体同步曝光计划  ,欢迎热爱写作的你一起参与!

评论
登录后参与评论
0 条评论
热度
最新
推荐阅读
领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档