hadoopMapReducejava示例-成都快上网建站

hadoopMapReducejava示例

wordcount工作流程
input-> 拆分Split->映射map->派发Shuffle->缩减reduce->output
hadoop jar /usr/local/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.3.jar wordcount 10803060234.txt /output

成都创新互联是一家集网站建设,北镇企业网站建设,北镇品牌网站建设,网站定制,北镇网站建设报价,网络营销,网络优化,北镇网站推广为一体的创新建站企业,帮助传统企业提升企业形象加强企业竞争力。可充分满足这一群体相比中小企业更为丰富、高端、多元的互联网需求。同时我们时刻保持专业、时尚、前沿,时刻以成就客户成长自我,坚持不断学习、思考、沉淀、净化自己,让我们为更多的企业打造出实用型网站。

package wordcount;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class Test {

public Test() {
    // TODO Auto-generated constructor stub
}

public static void main(String[] args) throws Exception {
    // TODO Auto-generated method stub

    Configuration conf = new Configuration();
    conf.set("fs.defaultFS","hdfs://172.26.19.40:9000");
    conf.set("mapreduce.job.jar", "target/wc.jar");
    conf.set("mapreduce.framework.name", "yarn");
    conf.set("yarn.resourcemanager.hostname", "hmaster");
    conf.set("mapreduce.app-submission.cross-platform", "true");
    Job job = Job.getInstance(conf);
    job.setMapperClass(WordMapper.class);
    job.setReducerClass(WordReducer.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(IntWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    FileInputFormat.setInputPaths(job, "");
    FileOutputFormat.setOutputPath(job, new Path(""));

    job.waitForCompletion(true);
}

}

package wordcount;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class WordMapper extends Mapper {

@Override
protected void map(LongWritable key, Text value, Mapper.Context context)
        throws IOException, InterruptedException {
    String lineValue = value.toString();
    String[] words = lineValue.split(" ");
    IntWritable cIntWritable = new IntWritable(1);
    for(String word : words) {
        context.write(new Text(word), cIntWritable);
    }
}

}

package wordcount;

import java.io.IOException;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class WordReducer extends Reducer {

@Override
protected void reduce(Text key, Iterable values,
        Reducer.Context context) throws IOException, InterruptedException {

     Long tmpCount = 0L;
     for(IntWritable value : values) {
         tmpCount = tmpCount + value.get();
     }

     context.write(key, new LongWritable(tmpCount));

}

}

xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
4.0.0
com.skcc
wordcount
0.0.1-SNAPSHOT
wordcount
count the word


    UTF-8
    2.7.3


    
        junit
        junit
        4.12
    
    
        org.apache.hadoop
        hadoop-client
        ${hadoop.version}
    
    
        org.apache.hadoop
        hadoop-common
        ${hadoop.version}
    
    
        org.apache.hadoop
        hadoop-hdfs
        ${hadoop.version}
    


网页题目:hadoopMapReducejava示例
文章转载:http://kswjz.com/article/ipdese.html
扫二维码与项目经理沟通

我们在微信上24小时期待你的声音

解答本文疑问/技术咨询/运营咨询/技术建议/互联网交流