package mobvista.prd.datasource.source.mapreduce;

import mobvista.dmp.util.MRUtils;
import mobvista.prd.datasource.table.MergeAppIDMR;
import mobvista.prd.datasource.tag.mapreduce.reduce.CountReducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import java.io.IOException;

/**
 * Created by Administrator on 2017/5/16 0016.
 * desc : 计算dsp与m系统分国家3天的数据
 */
public class CountDspMCountryMR {
    public static void main(String[] args) throws InterruptedException, IOException, ClassNotFoundException {
        Configuration conf = new Configuration();
        String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();

        Job job = Job.getInstance(conf, "dsp join M");

        job.setJarByClass(MergeAppIDMR.class);
        FileOutputFormat.setCompressOutput(job, true);
        FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);

        job.setMapperClass(CountDspMCountryMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(LongWritable.class);
        job.setReducerClass(CountReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(LongWritable.class);

        FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
        FileInputFormat.addInputPath(job, new Path(otherArgs[1]));
        FileOutputFormat.setOutputPath(job, new Path(otherArgs[2]));

        System.exit(job.waitForCompletion(true) ? 0 : 1);

    }
    public static class CountDspMCountryMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
        Text outKey = new Text();
        LongWritable outValue = new LongWritable(1);
        public void map (LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            String[] fields = MRUtils.SPLITTER.split(line,-1);
            String inputFile = context.getConfiguration().get("map.input.file");
            if (inputFile.contains("merge_dsp_3_day")) {
                outKey.set(MRUtils.JOINER.join(fields[2], fields[3]));//来源,国家
                context.write(outKey, outValue);
            } else if (inputFile.contains("etl_adn_sdk_req_3_day")) {
                outKey.set(MRUtils.JOINER.join("M系统", fields[2]));//来源,国家
                context.write(outKey, outValue);
            }

        }
    }
}