新建项目Flow
创建依赖,在pm.xml里添加如下内容:
<!-- 添加hadoop-client 3.1.3的依赖-->
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.3</version>
</dependency>
</dependencies>
新建一个log.txt.,编写数据,如:
12611113333 556 8976
12612113333 1123 9087
13787653490 2345 7864
15027889876 556 76
13889764536 887 9
分别创建FlowBean FlowDriver FlowMapper FlowRecuder
代码如下:
FlowBean:
package org.example.flow;import org.apache.hadoop.io.Writable;import java.io.DataInput; import java.io.DataOutput; import java.io.IOException;//hadoop 序列化 //三个属性,手机号,上行流量,下行流量 public class FlowBean implements Writable {private String phone;private Long upFlow;private Long downFlow;public FlowBean(String phone, Long upFlow, Long downFlow) {this.phone = phone;this.upFlow = upFlow;this.downFlow = downFlow;}//定义get/set方法public String getPhone() {return phone;}public void setPhone(String phone) {this.phone = phone;}public Long getUpFlow() {return upFlow;}public void setUpFlow(Long upFlow) {this.upFlow = upFlow;}public Long getDownFlow() {return downFlow;}public void setDownFlow(Long downFlow) {this.downFlow = downFlow;}//定义无参构造public FlowBean(){}//定义一个获取总流量的方法public Long getTotalFlow(){return upFlow + downFlow;}@Overridepublic void write(DataOutput dataOutput) throws IOException {dataOutput.writeUTF(phone);dataOutput.writeLong(upFlow);dataOutput.writeLong(downFlow);}@Overridepublic void readFields(DataInput dataInput) throws IOException {phone = dataInput.readUTF();upFlow = dataInput.readLong();downFlow = dataInput.readLong();} }
FlowDriver:
package org.example.flow;import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import java.io.IOException;//1.提交job类,一共做7件事 public class FlowDriver {public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {//1.获取配置,找到job对象Configuration conf = new Configuration();Job job = Job.getInstance(conf);//2.设置jar包路径job.setJarByClass(FlowDriver.class);//3.关联Mapper和Reducerjob.setMapperClass(FlowMapper.class);job.setReducerClass(FlowReducer.class);//4.设置Mapper和Reducer的输出类型job.setMapOutputKeyClass(Text.class);job.setMapOutputValueClass(FlowBean.class);//5.设置reducer的输出类型job.setOutputKeyClass(Text.class);job.setOutputValueClass(Text.class);//6.设置输入和输出路径FileInputFormat.setInputPaths(job,new Path("D:\\vm\\wcinput"));FileOutputFormat.setOutputPath(job,new Path("output4"));//7.提交job,根据返回值设置程序退出codeboolean result = job.waitForCompletion(true);System.exit(result ? 0 : 1);} }
FlowMapper:
package org.example.flow;import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper;import java.io.IOException;//1.继承Mapper //2.重写Map函数 public class FlowMapper extends Mapper<LongWritable, Text, Text, FlowBean> {@Overrideprotected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {//1、获取一行数据,使用空格拆分//手机号就是第一个元素//上行流量就是第二个元素//下行流量就是第三个元素String[] split = value.toString().split(" ");String phone = split[0];Long upFlow = Long.parseLong(split[1]);Long downFlow = Long.parseLong(split[2]);//2.封装对象FlowBean flowBean = new FlowBean(phone,upFlow, downFlow);//写入手机号为key,值就是这个对象context.write(new Text(phone), flowBean);} }
FlowReducer:
package org.example.flow;import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Reducer;import java.io.IOException;//继承Reducer //重写reducer函数 public class FlowReducer extends Reducer<Text, FlowBean, Text, Text> {@Overrideprotected void reduce(Text key, Iterable<FlowBean> values, Context context) throws IOException, InterruptedException {//1.遍历集合,去除每一个元素,计算上行流量和下行流量的汇总Long upFlowSum = 0L;Long downFlowSum = 0L;for (FlowBean flowBean : values) {upFlowSum += flowBean.getUpFlow();downFlowSum += flowBean.getDownFlow();}//2/计算总的汇总long sumFlow = upFlowSum + downFlowSum;String flowDesc = String.format("总的上行流量: %d, 总的下行流量: %d, 总的流量: %d", upFlowSum, downFlowSum, sumFlow);context.write(key, new Text(flowDesc));}}
在recourse中建立一个文件log4j.properties,添加如下内容:
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n