/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.uci.ics.pregelix.dataflow; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.nio.ByteBuffer; import edu.uci.ics.hyracks.api.context.IHyracksTaskContext; import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable; import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider; import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor; import edu.uci.ics.hyracks.api.exceptions.HyracksDataException; import edu.uci.ics.hyracks.api.job.JobSpecification; import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameDeserializer; import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor; import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable; import edu.uci.ics.hyracks.dataflow.std.file.FileSplit; import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider; import edu.uci.ics.hyracks.dataflow.std.util.StringSerializationUtils; import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory; import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory; public class VertexWriteOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor { private static final long serialVersionUID = 1L; private final FileSplit[] splits; private final IRuntimeHookFactory preHookFactory; private final IRuntimeHookFactory postHookFactory; private final IRecordDescriptorFactory inputRdFactory; public VertexWriteOperatorDescriptor(JobSpecification spec, IRecordDescriptorFactory inputRdFactory, IFileSplitProvider fileSplitProvider, IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory) { super(spec, 1, 0); this.splits = fileSplitProvider.getFileSplits(); this.preHookFactory = preHookFactory; this.postHookFactory = postHookFactory; this.inputRdFactory = inputRdFactory; } @Override public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) throws HyracksDataException { IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() { private RecordDescriptor rd0; private FrameDeserializer frameDeserializer; private PrintWriter outputWriter; @Override public void open() throws HyracksDataException { rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0) : inputRdFactory.createRecordDescriptor(ctx); frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0); try { outputWriter = new PrintWriter(new OutputStreamWriter(new FileOutputStream(splits[partition] .getLocalFile().getFile()))); if (preHookFactory != null) preHookFactory.createRuntimeHook().configure(ctx); } catch (IOException e) { throw new HyracksDataException(e); } } @Override public void nextFrame(ByteBuffer frame) throws HyracksDataException { frameDeserializer.reset(frame); while (!frameDeserializer.done()) { Object[] tuple = frameDeserializer.deserializeRecord(); // output the vertex outputWriter.print(StringSerializationUtils.toString(tuple[tuple.length - 1])); outputWriter.println(); } } @Override public void fail() throws HyracksDataException { } @Override public void close() throws HyracksDataException { if (postHookFactory != null) postHookFactory.createRuntimeHook().configure(ctx); outputWriter.close(); } }; return op; } }