/*
* Cloud9: A MapReduce Library for Hadoop
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package edu.umd.cloud9.example.hbase;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;
/**
* Simple word count demo using HBase for storage.
*
* @author Jimmy Lin
*/
public class HBaseWordCount extends Configured implements Tool {
private static final Logger LOG = Logger.getLogger(HBaseWordCount.class);
public static final String[] FAMILIES = { "c" };
public static final byte[] CF = FAMILIES[0].getBytes();
public static final byte[] COUNT = "count".getBytes();
// Mapper: emits (token, 1) for every word occurrence.
private static class MyMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
// Reuse objects to save overhead of object creation.
private final static IntWritable ONE = new IntWritable(1);
private final static Text WORD = new Text();
@Override
public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String line = ((Text) value).toString();
StringTokenizer itr = new StringTokenizer(line);
while (itr.hasMoreTokens()) {
WORD.set(itr.nextToken());
context.write(WORD, ONE);
}
}
}
// Reducer: sums up all the counts.
private static class MyReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
// Reuse objects.
private final static IntWritable SUM = new IntWritable();
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
// Sum up values.
Iterator<IntWritable> iter = values.iterator();
int sum = 0;
while (iter.hasNext()) {
sum += iter.next().get();
}
SUM.set(sum);
context.write(key, SUM);
}
}
public static class MyTableReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
Put put = new Put(Bytes.toBytes(key.toString()));
put.add(CF, COUNT, Bytes.toBytes(sum));
context.write(null, put);
}
}
/**
* Creates an instance of this tool.
*/
public HBaseWordCount() {}
private static final String INPUT = "input";
private static final String OUTPUT = "output";
private static final String NUM_REDUCERS = "numReducers";
/**
* Runs this tool.
*/
@SuppressWarnings({ "static-access" })
public int run(String[] args) throws Exception {
Options options = new Options();
options.addOption(OptionBuilder.withArgName("path").hasArg()
.withDescription("input path").create(INPUT));
options.addOption(OptionBuilder.withArgName("table").hasArg()
.withDescription("HBase table name").create(OUTPUT));
options.addOption(OptionBuilder.withArgName("num").hasArg()
.withDescription("number of reducers").create(NUM_REDUCERS));
CommandLine cmdline;
CommandLineParser parser = new GnuParser();
try {
cmdline = parser.parse(options, args);
} catch (ParseException exp) {
System.err.println("Error parsing command line: " + exp.getMessage());
return -1;
}
if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) {
System.out.println("args: " + Arrays.toString(args));
HelpFormatter formatter = new HelpFormatter();
formatter.setWidth(120);
formatter.printHelp(this.getClass().getName(), options);
ToolRunner.printGenericCommandUsage(System.out);
return -1;
}
String inputPath = cmdline.getOptionValue(INPUT);
String outputTable = cmdline.getOptionValue(OUTPUT);
int reduceTasks = cmdline.hasOption(NUM_REDUCERS) ?
Integer.parseInt(cmdline.getOptionValue(NUM_REDUCERS)) : 1;
// If the table doesn't already exist, create it.
Configuration conf = getConf();
conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
Configuration hbaseConfig = HBaseConfiguration.create(conf);
HBaseAdmin admin = new HBaseAdmin(hbaseConfig);
if (admin.tableExists(outputTable)) {
LOG.info(String.format("Table '%s' exists: dropping table and recreating.", outputTable));
LOG.info(String.format("Disabling table '%s'", outputTable));
admin.disableTable(outputTable);
LOG.info(String.format("Droppping table '%s'", outputTable));
admin.deleteTable(outputTable);
}
HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(outputTable));
for (int i = 0; i < FAMILIES.length; i++) {
HColumnDescriptor hColumnDesc = new HColumnDescriptor(FAMILIES[i]);
tableDesc.addFamily(hColumnDesc);
}
admin.createTable(tableDesc);
LOG.info(String.format("Successfully created table '%s'", outputTable));
admin.close();
// Now we're ready to start running MapReduce.
LOG.info("Tool: " + HBaseWordCount.class.getSimpleName());
LOG.info(" - input path: " + inputPath);
LOG.info(" - output table: " + outputTable);
LOG.info(" - number of reducers: " + reduceTasks);
Job job = Job.getInstance(conf);
job.setJobName(HBaseWordCount.class.getSimpleName());
job.setJarByClass(HBaseWordCount.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setMapperClass(MyMapper.class);
job.setCombinerClass(MyReducer.class);
job.setNumReduceTasks(reduceTasks);
FileInputFormat.setInputPaths(job, new Path(inputPath));
TableMapReduceUtil.initTableReducerJob(outputTable, MyTableReducer.class, job);
long startTime = System.currentTimeMillis();
job.waitForCompletion(true);
LOG.info("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");
return 0;
}
/**
* Dispatches command-line arguments to the tool via the {@code ToolRunner}.
*/
public static void main(String[] args) throws Exception {
ToolRunner.run(new HBaseWordCount(), args);
}
}