Hadoop:MapReduce编程之字符串的拼接

2021/4/27 20:55:19

本文主要是介绍Hadoop:MapReduce编程之字符串的拼接,对大家解决编程问题具有一定的参考价值,需要的程序猿们随着小编来一起学习吧!

MapReduce编程之字符串的拼接

要求:统计出每个单词出现的次数,显示结果为单词 单词的长度 单词出现的次数

分析:由于MapReduce中的数据传输只能以KV形式传输,只能传递两列数据,因此为了实现三列数据的传输,我们需要对单词与单词的长度拼接成一列、

代码实现:

package com.miao.wordcount;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.IOException;

/**
 * @ClassName WordConcat
 * @Description TODO 统计每个单词出现的次数,并且单词与对应长度拼接
 * @Date 2021-04-27 20:08:50
 * @Create By     Miao
 */
public class WordConcat extends Configured implements Tool {
    public int run(String[] args) throws Exception {

        //构建Job
        Job job = Job.getInstance(this.getConf(),"wordConcat");
        job.setJarByClass(WordConcat.class);

        //配置Job
        job.setInputFormatClass(TextInputFormat.class);
        //指定输入源
        TextInputFormat.setInputPaths(job,new Path("D:\\Study\\idea\\MavenProject\\count.txt"));

        job.setMapperClass(WCMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        job.setReducerClass(WCReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        job.setOutputFormatClass(TextOutputFormat.class);
        //指定输出源
        Path outputPath = new Path("D:\\Study\\idea\\MavenProject\\output\\four");
        FileSystem fs = FileSystem.get(this.getConf());
        if(fs.exists(outputPath)){
            fs.delete(outputPath,true);
        }
        TextOutputFormat.setOutputPath(job,outputPath);
        //提交Job
        return job.waitForCompletion(true) ? 0 : -1;
    }

    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        int status = ToolRunner.run(conf, new WordConcat(), args);
        System.exit(status);
    }


    public static class WCMapper extends Mapper<LongWritable, Text,Text, IntWritable> {
        //输出的Key2
        Text outputKey = new Text();
        //输出的Value2
        IntWritable outputValue = new IntWritable(1);

        /**
         * 每条KV调用一次map
         * @param key:行的偏移量
         * @param value:行的内容
         * @param context
         * @throws IOException
         * @throws InterruptedException
         */
        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            //将每行的内容分割得到每个单词
            String[] words = value.toString().split("\\s+");
            //迭代取出每个单词作为Key2
            for (String word : words) {
                //将当前的单词作为Key2
                this.outputKey.set(word);
                //将Key2和Value2传递到下一步
                context.write(outputKey,outputValue);
            }
        }
    }


    public static class WCReducer extends Reducer<Text, IntWritable,Text, IntWritable> {

        //输出Key3:拼接单词和单词的长度
   Text outputKey = new Text();
        //输出Value3
        IntWritable outputValue = new IntWritable();

        /**
         * 每一组调用一次
         * @param key:单词
         * @param values:所有相同单词对应的1
         * @param context
         * @throws IOException
         * @throws InterruptedException
         */
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            int sum = 0;
            for (IntWritable value : values) {
                //取出当前单词所有 1,进行累加
                sum += value.get();
            }
            //给key3赋值,单词和单词的长度
            this.outputKey.set(key.toString()+"\t"+key.toString().length());
            //给Value3赋值
            this.outputValue.set(sum);
            //传递到下一步
            context.write(outputKey,this.outputValue);
        }
    }
}


这篇关于Hadoop:MapReduce编程之字符串的拼接的文章就介绍到这儿,希望我们推荐的文章对大家有所帮助,也希望大家多多支持为之网!


扫一扫关注最新编程教程