/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.pig.piggybank.storage.avro; import java.io.IOException; import org.apache.avro.file.DataFileWriter; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; /** * The RecordWriter used to output pig results as avro data */ public class PigAvroRecordWriter extends RecordWriter<NullWritable, Object> { private DataFileWriter<Object> writer; /** * construct with avro writer * @param writer avro data writer */ public PigAvroRecordWriter(DataFileWriter<Object> writer) { this.writer = writer; } @Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { writer.close(); } @Override public void write(NullWritable key, Object value) throws IOException, InterruptedException { writer.append(value); } }