public class DataCount {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(DataCount.class);
job.setMapperClass(DCMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(DataInfo.class);
job.setReducerClass(DCReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(DataInfo.class);
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.setPartitionerClass(DCPartitioner.class);
job.setNumReduceTasks(Integer.parseInt(args[2]));
job.waitForCompletion(true);
}
//Map
public static class DCMapper extends Mapper<LongWritable, Text, Text, DataInfo>{
private Text k = new Text();
@Override
protected void map(LongWritable key, Text value,
Mapper<LongWritable, Text, Text, DataInfo>.Context context)
throws IOException, InterruptedException {
String line = value.toString();
String[] fields = line.split("\t");
String tel = fields[1];
long up = Long.parseLong(fields[8]);
long down = Long.parseLong(fields[9]);
DataInfo dataInfo = new DataInfo(tel,up,down);
k.set(tel);
context.write(k, dataInfo);
}
}
public static class DCReducer extends Reducer<Text, DataInfo, Text, DataInfo>{
@Override
protected void reduce(Text key, Iterable<DataInfo> values,
Reducer<Text, DataInfo, Text, DataInfo>.Context context)
throws IOException, InterruptedException {
long up_sum = 0;
long down_sum = 0;
for(DataInfo d : values){
up_sum += d.getUpPayLoad();
down_sum += d.getDownPayLoad();
}
DataInfo dataInfo = new DataInfo("",up_sum,down_sum);
context.write(key, dataInfo);
}
}
public static class DCPartitioner extends Partitioner<Text, DataInfo>{
private static Map<String,Integer> provider = new HashMap<String,Integer>();
static{
provider.put("138", 1);
provider.put("139", 1);
provider.put("152", 2);
provider.put("153", 2);
provider.put("182", 3);
provider.put("183", 3);
}
@Override
public int getPartition(Text key, DataInfo value, int numPartitions) {
//向数据库或配置信息 读写
String tel_sub = key.toString().substring(0,3);
Integer count = provider.get(tel_sub);
if(count == null){
count = 0;
}
return count;
}
}
}public class InfoBean implements WritableComparable<InfoBean>{
private String account;
private double income;
private double expenses;
private double surplus;
public void set(String account,double income,double expenses){
this.account = account;
this.income = income;
this.expenses = expenses;
this.surplus = income - expenses;
}
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(account);
out.writeDouble(income);
out.writeDouble(expenses);
out.writeDouble(surplus);
}
@Override
public void readFields(DataInput in) throws IOException {
this.account = in.readUTF();
this.income = in.readDouble();
this.expenses = in.readDouble();
this.surplus = in.readDouble();
}
@Override
public int compareTo(InfoBean o) {
if(this.income == o.getIncome()){
return this.expenses > o.getExpenses() ? 1 : -1;
}
return this.income > o.getIncome() ? 1 : -1;
}
@Override
public String toString() {
return income + "\t" + expenses + "\t" + surplus;
}
// get set
}public class InverseIndex {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
//设置jar
job.setJarByClass(InverseIndex.class);
//设置Mapper相关的属性
job.setMapperClass(IndexMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
FileInputFormat.setInputPaths(job, new Path(args[0]));//words.txt
//设置Reducer相关属性
job.setReducerClass(IndexReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.setCombinerClass(IndexCombiner.class);
//提交任务
job.waitForCompletion(true);
}
public static class IndexMapper extends Mapper<LongWritable, Text, Text, Text>{
private Text k = new Text();
private Text v = new Text();
@Override
protected void map(LongWritable key, Text value,
Mapper<LongWritable, Text, Text, Text>.Context context)
throws IOException, InterruptedException {
String line = value.toString();
String[] fields = line.split(" ");
FileSplit inputSplit = (FileSplit) context.getInputSplit();
Path path = inputSplit.getPath();
String name = path.getName();
for(String f : fields){
k.set(f + "->" + name);
v.set("1");
context.write(k, v);
}
}
}
public static class IndexCombiner extends Reducer<Text, Text, Text, Text>{
private Text k = new Text();
private Text v = new Text();
@Override
protected void reduce(Text key, Iterable<Text> values,
Reducer<Text, Text, Text, Text>.Context context)
throws IOException, InterruptedException {
String[] fields = key.toString().split("->");
long sum = 0;
for(Text t : values){
sum += Long.parseLong(t.toString());
}
k.set(fields[0]);
v.set(fields[1] + "->" + sum);
context.write(k, v);
}
}
public static class IndexReducer extends Reducer<Text, Text, Text, Text>{
private Text v = new Text();
@Override
protected void reduce(Text key, Iterable<Text> values,
Reducer<Text, Text, Text, Text>.Context context)
throws IOException, InterruptedException {
String value = "";
for(Text t : values){
value += t.toString() + " ";
}
v.set(value);
context.write(key, v);
}
}
}hadoop(五) - 分布式计算利器MapReduce加强
原文:http://blog.csdn.net/zdp072/article/details/41949985