package io.lumify.dbpedia.mapreduce; import io.lumify.core.mapreduce.LumifyMRBase; import io.lumify.core.util.LumifyLogger; import io.lumify.core.util.LumifyLoggerFactory; import org.apache.accumulo.core.data.Mutation; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.util.ToolRunner; import org.securegraph.accumulo.mapreduce.AccumuloElementOutputFormat; public class ImportMR extends LumifyMRBase { private static final LumifyLogger LOGGER = LumifyLoggerFactory.getLogger(ImportMR.class); public static final String MULTI_VALUE_KEY = ImportMR.class.getName(); @Override protected String getJobName() { return "dbpediaImport"; } @Override protected void setupJob(Job job) throws Exception { job.setJarByClass(ImportMR.class); job.setMapperClass(ImportMRMapper.class); job.setNumReduceTasks(0); job.setMapOutputValueClass(Mutation.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(AccumuloElementOutputFormat.class); FileInputFormat.addInputPath(job, new Path(getConf().get("in"))); } @Override protected void parseArgs(JobConf conf, String[] args) { if (args.length != 1) { throw new RuntimeException("Required arguments <inputFileName>"); } String inFileName = args[0]; LOGGER.info("inFileName: %s", inFileName); conf.set("in", inFileName); } public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new ImportMR(), args); System.exit(res); } }