CalcPackageDictMR.java 3.71 KB
Newer Older
wang-jinfeng committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93
package mobvista.dmp.datasource.gender;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import java.io.IOException;

public class CalcPackageDictMR {
    /**
     * author:liushuai
     * data:2017-01-13
     *
     * @throws InterruptedException
     * @throws IOException
     * @throws ClassNotFoundException desc  :对CalcPackageGenderMR得到的结果进行数据清洗,留下符合条件的数据并添加标签tag
     */

    public static void main(String[] args) throws ClassNotFoundException, IOException, InterruptedException {

        Configuration conf = new Configuration();
        conf.set("mapreduce.map.speculative", "true");
        conf.set("mapreduce.reduce.speculative", "true");
        conf.set("mapreduce.task.io.sort.mb", "500");
        conf.set("mapreduce.reduce.java.opts", "-Xmx1536m");
        conf.set("mapreduce.reduce.memory.mb", "2048");
        conf.set("mapreduce.reduce.shuffle.parallelcopies", "50");
        String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();

        conf.set("l_threshold", otherArgs[2]);
        conf.set("h_threshold", otherArgs[3]);
        conf.set("m_ratio", otherArgs[4]);
        conf.set("f_ratio", otherArgs[5]);

        Job job = Job.getInstance(conf, "CalcPackageDictMR");
        job.setJarByClass(CalcPackageDictMR.class);
        job.setMapperClass(CalcPackageDictMapper.class);

        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(NullWritable.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(NullWritable.class);

        FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
        FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));

        System.exit(job.waitForCompletion(true) ? 0 : 1);

    }

    public static class CalcPackageDictMapper extends Mapper<LongWritable, Text, Text, NullWritable> {
        Text outKey = new Text();
        int lowThreshold;
        int highThreshold;
        double mRatio;
        double fRatio;

        public void setup(Context context) throws IOException, InterruptedException {
            lowThreshold = Integer.parseInt(context.getConfiguration().get("l_threshold"));
            highThreshold = Integer.parseInt(context.getConfiguration().get("h_threshold"));
            mRatio = Double.parseDouble(context.getConfiguration().get("m_ratio"));
            fRatio = Double.parseDouble(context.getConfiguration().get("f_ratio"));
        }

        public void map(LongWritable key, Text value, Context context) {
            try {
                String line = value.toString();
                String[] fields = MRUtils.SPLITTER.split(line, -1);
                String tag = "";
                if (Double.parseDouble((fields[5])) == 0.0 || Integer.parseInt(fields[2]) < lowThreshold) {
                    return;
                }
                if (Integer.parseInt(fields[2]) >= highThreshold && (Double.parseDouble(fields[5]) >= mRatio
                        || Double.parseDouble(fields[5]) <= fRatio)) {
                    tag = "confirm";
                } else {
                    tag = "calc";
                }
                outKey.set(MRUtils.JOINER.join(fields[0], fields[5], tag));
                context.write(outKey, NullWritable.get());
            } catch (Exception e) {
                return;
            }
        }
    }
}