CountCard
import java.io.IOException;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class CounrCards extends Configured implements Tool{
public static class CardsDemo extends Mapper<LongWritable, Text, Text, LongWritable>
{
public void map(LongWritable offline, Text record, Context context) throws IOException,InterruptedException{
context.write(new Text(record.toString().split("\\|")[1]),new LongWritable(1));
}
}
@Override
public int run(String[] arg0) throws Exception {
Job job = Job.getInstance(getConf());
job.setMapperClass(CardsDemo.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
//job.setPartitionerClass(H)
job.setNumReduceTasks(2);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
job.setReducerClass(LongSumReducer.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.setInputPaths(job, new Path("/home/cloudera/workspace/Cardcount/a.txt"));
FileOutputFormat.setOutputPath(job, new Path("/home/cloudera/workspace/Cardcount/aa1"));
return job.waitForCompletion(true)?0:1;
}
public static void main(String[] args) throws Exception{
System.exit(ToolRunner.run(new CounrCards(), args));
}
}
------------------------------------------------------------------
a.txt
BLACK|SPADE|2
BLACK|SPADE|3
BLACK|SPADE|4
BLACK|SPADE|5
BLACK|SPADE|6
BLACK|SPADE|7
BLACK|SPADE|8
BLACK|SPADE|9
BLACK|SPADE|10
BLACK|SPADE|J
BLACK|SPADE|Q
BLACK|SPADE|K
BLACK|SPADE|A
BLACK|CLUB|2
BLACK|CLUB|3
BLACK|CLUB|4
BLACK|CLUB|5
BLACK|CLUB|6
BLACK|CLUB|7
BLACK|CLUB|8
BLACK|CLUB|9
BLACK|CLUB|10
BLACK|CLUB|J
BLACK|CLUB|Q
BLACK|CLUB|K
BLACK|CLUB|A
RED|DIAMOND|2
RED|DIAMOND|3
RED|DIAMOND|4
RED|DIAMOND|5
RED|DIAMOND|6
RED|DIAMOND|7
RED|DIAMOND|8
RED|DIAMOND|9
RED|DIAMOND|10
RED|DIAMOND|J
RED|DIAMOND|Q
RED|DIAMOND|K
RED|DIAMOND|A
RED|HEART|2
RED|HEART|3
RED|HEART|4
RED|HEART|5
RED|HEART|6
RED|HEART|7
RED|HEART|8
RED|HEART|9
RED|HEART|10
RED|HEART|J
RED|HEART|Q
RED|HEART|K
RED|HEART|A
--------------------------------
output
part-r-0000000_0
SPADE 13
part-r-0000000_1
CLUB 13
DIAMOND 13
HEART 13
---------------------------------------
import java.io.IOException;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class CounrCards extends Configured implements Tool{
public static class CardsDemo extends Mapper<LongWritable, Text, Text, LongWritable>
{
public void map(LongWritable offline, Text record, Context context) throws IOException,InterruptedException{
context.write(new Text(record.toString().split("\\|")[1]),new LongWritable(1));
}
}
@Override
public int run(String[] arg0) throws Exception {
Job job = Job.getInstance(getConf());
job.setMapperClass(CardsDemo.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
//job.setPartitionerClass(H)
job.setNumReduceTasks(2);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
job.setReducerClass(LongSumReducer.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.setInputPaths(job, new Path("/home/cloudera/workspace/Cardcount/a.txt"));
FileOutputFormat.setOutputPath(job, new Path("/home/cloudera/workspace/Cardcount/aa1"));
return job.waitForCompletion(true)?0:1;
}
public static void main(String[] args) throws Exception{
System.exit(ToolRunner.run(new CounrCards(), args));
}
}
------------------------------------------------------------------
a.txt
BLACK|SPADE|2
BLACK|SPADE|3
BLACK|SPADE|4
BLACK|SPADE|5
BLACK|SPADE|6
BLACK|SPADE|7
BLACK|SPADE|8
BLACK|SPADE|9
BLACK|SPADE|10
BLACK|SPADE|J
BLACK|SPADE|Q
BLACK|SPADE|K
BLACK|SPADE|A
BLACK|CLUB|2
BLACK|CLUB|3
BLACK|CLUB|4
BLACK|CLUB|5
BLACK|CLUB|6
BLACK|CLUB|7
BLACK|CLUB|8
BLACK|CLUB|9
BLACK|CLUB|10
BLACK|CLUB|J
BLACK|CLUB|Q
BLACK|CLUB|K
BLACK|CLUB|A
RED|DIAMOND|2
RED|DIAMOND|3
RED|DIAMOND|4
RED|DIAMOND|5
RED|DIAMOND|6
RED|DIAMOND|7
RED|DIAMOND|8
RED|DIAMOND|9
RED|DIAMOND|10
RED|DIAMOND|J
RED|DIAMOND|Q
RED|DIAMOND|K
RED|DIAMOND|A
RED|HEART|2
RED|HEART|3
RED|HEART|4
RED|HEART|5
RED|HEART|6
RED|HEART|7
RED|HEART|8
RED|HEART|9
RED|HEART|10
RED|HEART|J
RED|HEART|Q
RED|HEART|K
RED|HEART|A
--------------------------------
output
part-r-0000000_0
SPADE 13
part-r-0000000_1
CLUB 13
DIAMOND 13
HEART 13
---------------------------------------
No comments:
Post a Comment