package com.sky.mr.wordcount;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.junit.Test;
import java.io.IOException;
public class WordcountMapper extends Mapper<LongWritable,Text, Text, IntWritable> {
//由于每讀一行文本數據,就要調用一次map方法,為了避免多次創建對象,浪費內存資源,將Text,IntWritable對象創建在
//map方法之外
Text k = new Text();
IntWritable v = new IntWritable(1);
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//獲取每一行的文本內容
String line = value.toString();
//按空格分割
String[] words = line.split(" ");
//轉換數據格式,輸出
for ( String word: words) {
k.set(word);
context.write(k, v);
}
}
}
package com.sky.mr.wordcount;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class WordcountReducer extends Reducer<Text, IntWritable, Text,IntWritable> {
IntWritable v = new IntWritable();
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
//求每組相同key的總個數
int sum = 0;
for ( IntWritable count:values) {
sum += count.get();
}
//輸出
v.set(sum);
context.write(key, v);
}
}
package com.sky.mr.wordcount;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class WordcountDriver {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
//1、獲取配置信息以及job對象
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
//2、設置jar包路徑
job.setJarByClass(WordcountDriver.class);
//3、關聯自定義mapper和reducer類
job.setMapperClass(WordcountMapper.class);
job.setReducerClass(WordcountReducer.class);
//4、設置Map輸出key和value類型
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
//5、設置最終結果key,value類型
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
//6、設置文件輸入輸出路徑
FileInputFormat.setInputPaths(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]));
//7、將封裝了MapReduce程序運行參數的job對象,提交到Yarn集群
boolean result = job.waitForCompletion(true);
System.exit(result?0:1);
}
}
import org apache hadoop io
import org apache hadoop io
import org apache hadoop
import java io IOException
創新互聯致力于互聯網網站建設與網站營銷,提供成都做網站、成都網站設計、網站開發、seo優化、網站排名、互聯網營銷、重慶小程序開發、公眾號商城、等建站開發,創新互聯網站建設策劃專家,為不同類型的客戶提供良好的互聯網應用定制解決方案,幫助客戶在新的全球化互聯網環境中保持優勢。
IOException 1
apache 3
hadoop 3
import 4
io 3
java 1
org 3
網頁標題:MapReduce單詞統計
URL網址:http://vcdvsql.cn/article40/gdgeho.html
成都網站建設公司_創新互聯,為您提供網站導航、面包屑導航、網站內鏈、服務器托管、網站設計公司、企業網站制作
聲明:本網站發布的內容(圖片、視頻和文字)以用戶投稿、用戶轉載內容為主,如果涉及侵權請盡快告知,我們將會在第一時間刪除。文章觀點不代表本網站立場,如需處理請聯系客服。電話:028-86922220;郵箱:631063699@qq.com。內容未經允許不得轉載,或轉載時需注明來源: 創新互聯