package com.thinkaurelius.faunus.formats;
import com.thinkaurelius.faunus.FaunusVertex;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import java.io.DataOutputStream;
import java.io.IOException;
/**
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
public abstract class FaunusFileOutputFormat extends FileOutputFormat<NullWritable, FaunusVertex> {
public DataOutputStream getDataOuputStream(final TaskAttemptContext job) throws IOException, InterruptedException {
final Configuration conf = job.getConfiguration();
boolean isCompressed = getCompressOutput(job);
CompressionCodec codec = null;
String extension = "";
if (isCompressed) {
final Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(job, DefaultCodec.class);
codec = ReflectionUtils.newInstance(codecClass, conf);
extension = codec.getDefaultExtension();
}
final Path file = super.getDefaultWorkFile(job, extension);
final FileSystem fs = file.getFileSystem(conf);
if (!isCompressed) {
return new DataOutputStream(fs.create(file, false));
} else {
return new DataOutputStream(codec.createOutputStream(fs.create(file, false)));
}
}
}