1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
package mobvista.prd.datasource.source.mapreduce;
import com.google.common.collect.Sets;
import mobvista.dmp.util.MRUtils;
import mobvista.prd.datasource.table.MergeAppIDMR;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import java.io.IOException;
import java.util.Set;
/**
* Created by Administrator on 2017/5/18 0018.
* desc : 计算dsp与M系统的日志量
*/
public class CountDspAndMMR {
public static void main(String[] args) throws InterruptedException, IOException, ClassNotFoundException {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
Job job = Job.getInstance(conf, "count dsp and m");
job.setJarByClass(MergeAppIDMR.class);
FileOutputFormat.setCompressOutput(job, true);
FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
job.setMapperClass(CountDspAndMMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setReducerClass(CountDspAndMReducer.class);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
FileInputFormat.addInputPath(job, new Path(otherArgs[1]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[2]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
public static class CountDspAndMMapper extends Mapper <LongWritable, Text, Text, Text> {
Text outKey = new Text();
Text outValue = new Text();
public void map (LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] fields = value.toString().split("\t",-1);
String inputFile = context.getConfiguration().get("map.input.file");
if (inputFile.contains("etl_dsp_request_daily")) {
outKey.set(MRUtils.JOINER.join(fields[0], fields[1]));//device_id, device_type
outValue.set("dsp系统");
context.write(outKey, outValue);
}
if (inputFile.contains("etl_adn_sdk_req_3_day") || inputFile.contains("etl_adn_sdk_request_daily")) {
outKey.set(MRUtils.JOINER.join(fields[0], fields[1]));//device_id, device_type
outValue.set("m系统");
context.write(outKey, outValue);
}
}
}
public static class CountDspAndMReducer extends Reducer <Text, Text, Text, NullWritable> {
Text outKey = new Text();
Text outValue = new Text();
long dsp = 0L;
long m = 0L;
long dspAndM = 0;
public void reduce (Text key, Iterable<Text> values, Context context) {
Set<String> set = Sets.newHashSet();
for (Text val : values) {
set.add(val.toString());
}
if (set.contains("m系统")) {
m++;
}
if (set.contains("dsp系统")) {
dsp++;
}
if (set.contains("dsp系统") && set.contains("m系统")) {
dspAndM++;
}
}
public void cleanup (Context context) throws IOException, InterruptedException {
outKey.set(MRUtils.JOINER.join(m, dsp, dspAndM));
context.write(outKey, NullWritable.get());
}
}
}