Java Examples for scala.runtime.BoxedUnit
The following java examples will help you to understand the usage of scala.runtime.BoxedUnit. These source code samples are taken from different open source projects.
Example 1
| Project: vaadin-in-akka-master File: StockHubActor.java View source code |
@Override
public PartialFunction<Object, BoxedUnit> receive() {
return ReceiveBuilder.match(WatchStock.class, msg -> {
getOrCreateStockActor(msg).forward(msg, getContext());
}).match(UnwatchStock.class, msg -> {
if (msg.hasSymbol()) {
getContext().getChild(msg.getSymbol()).forward(msg, getContext());
} else {
getContext().getChildren().forEach( ref -> ref.forward(msg, getContext()));
}
}).build();
}Example 2
| Project: deep-spark-master File: DeepRDD.java View source code |
@Override
public Iterator<T> compute(Partition split, TaskContext context) {
initExtractorClient();
extractorClient.initIterator(split, config.getValue());
context.addTaskCompletionListener(new AbstractFunction1<TaskContext, BoxedUnit>() {
@Override
public BoxedUnit apply(TaskContext v1) {
extractorClient.close();
return null;
}
});
java.util.Iterator<T> iterator = new java.util.Iterator<T>() {
@Override
public boolean hasNext() {
return extractorClient.hasNext();
}
@Override
public T next() {
return extractorClient.next();
}
@Override
public void remove() {
throw new DeepIOException("Method not implemented (and won't be implemented anytime soon!!!)");
}
};
return new InterruptibleIterator<>(context, asScalaIterator(iterator));
}Example 3
| Project: distributedlog-master File: BKDistributedLogManager.java View source code |
protected Future<List<LogSegmentMetadata>> getLogSegmentsAsync() {
final BKLogReadHandler readHandler = createReadHandler();
return readHandler.asyncGetFullLedgerList(true, false).ensure(new AbstractFunction0<BoxedUnit>() {
@Override
public BoxedUnit apply() {
readHandler.asyncClose();
return BoxedUnit.UNIT;
}
});
}Example 4
| Project: flink-master File: JobManagerHARecoveryTest.java View source code |
@Override
public PartialFunction<Object, BoxedUnit> handleMessage() {
return ReceiveBuilder.match(JobManagerMessages.RecoverSubmittedJob.class, new FI.UnitApply<JobManagerMessages.RecoverSubmittedJob>() {
@Override
public void apply(JobManagerMessages.RecoverSubmittedJob submitJob) throws Exception {
recoveredJobs.add(submitJob.submittedJobGraph().getJobId());
}
}).matchAny(new FI.UnitApply<Object>() {
@Override
public void apply(Object o) throws Exception {
TestingFailingHAJobManager.super.handleMessage().apply(o);
}
}).build();
}Example 5
| Project: siebog-master File: Annotator.java View source code |
@Override
protected void onMessage(ACLMessage msg) {
if (msg.performative == Performative.REQUEST) {
query = (QueryDesc) msg.contentObj;
makeRequest();
} else if (// annotated URIs received
msg.performative == Performative.INFORM) {
@SuppressWarnings("unchecked") HashSet<String> uris = (HashSet<String>) msg.contentObj;
Iterator<String> i = uris.iterator();
while (i.hasNext()) if (i.next().equals(query.getQuestion()))
i.remove();
pendingLearners = uris.size();
for (String u : uris) createNewLearner(u);
} else if (msg.performative == Performative.CONFIRM) {
--pendingLearners;
LOG.info("Pending learners: {}.", pendingLearners);
if (pendingLearners == 0) {
DNarsGraph graph = DNarsGraphFactory.create(query.getKnownProperties(), null);
try {
graph.forEachStatement(new AbstractFunction1<Statement, BoxedUnit>() {
@Override
public BoxedUnit apply(Statement st) {
if (st.toString().contains(query.getQuestion()))
if (st.truth().conf() > 0.7)
System.out.println(st);
return null;
}
});
} finally {
graph.shutdown();
}
}
}
}Example 6
| Project: spork-streaming-master File: StoreConverter.java View source code |
@Override
public BoxedUnit call(RDD<Tuple2<Text, Tuple>> rdd) throws Exception {
try {
PairRDDFunctions<Text, Tuple> pairRDDFunctions = new PairRDDFunctions<Text, Tuple>(rdd, SparkUtil.getManifest(Text.class), SparkUtil.getManifest(Tuple.class));
JobConf storeJobConf = SparkUtil.newJobConf(pigContext);
POStore poStore = configureStorer(storeJobConf, poperator);
pairRDDFunctions.saveAsNewAPIHadoopFile(poStore.getSFile().getFileName(), Text.class, Tuple.class, PigOutputFormat.class, storeJobConf);
} catch (Exception e) {
System.out.println("CRASSSSSHHHHHHHHH");
e.printStackTrace();
}
return null;
}Example 7
| Project: aperture-tiles-master File: TileRegistrator.java View source code |
public void registerClasses(Kryo kryo) {
// throw exception if class is being serialized, but has not been registered
kryo.setRegistrationRequired(true);
kryo.register(scala.runtime.BoxedUnit.class);
kryo.register(scala.None.class);
kryo.register(scala.None$.class);
kryo.register(scala.Some.class);
kryo.register(scala.Some$.class);
kryo.register(scala.collection.immutable.Range.class);
kryo.register(scala.util.matching.Regex.class);
kryo.register(scala.collection.immutable.Nil$.class);
kryo.register(scala.Tuple2[].class);
kryo.register(scala.collection.immutable.$colon$colon.class);
try {
kryo.register(Class.forName("scala.reflect.ClassTag$$anon$1"));
kryo.register(Class.forName("scala.collection.immutable.Map$EmptyMap$"));
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
kryo.register(char[].class);
kryo.register(char[][].class);
kryo.register(char[][][].class);
kryo.register(char[][][][].class);
kryo.register(byte[].class);
kryo.register(byte[][].class);
kryo.register(byte[][][].class);
kryo.register(byte[][][][].class);
kryo.register(short[].class);
kryo.register(short[][].class);
kryo.register(short[][][].class);
kryo.register(short[][][][].class);
kryo.register(int[].class);
kryo.register(int[][].class);
kryo.register(int[][][].class);
kryo.register(int[][][][].class);
kryo.register(long[].class);
kryo.register(long[][].class);
kryo.register(long[][][].class);
kryo.register(long[][][][].class);
kryo.register(float[].class);
kryo.register(float[][].class);
kryo.register(float[][][].class);
kryo.register(float[][][][].class);
kryo.register(double[].class);
kryo.register(double[][].class);
kryo.register(double[][][].class);
kryo.register(double[][][][].class);
kryo.register(boolean[].class);
kryo.register(boolean[][].class);
kryo.register(boolean[][][].class);
kryo.register(boolean[][][][].class);
kryo.register(Object[].class);
kryo.register(java.lang.Class.class);
kryo.register(java.util.HashMap.class);
kryo.register(java.util.ArrayList.class);
kryo.register(java.util.Date.class);
kryo.register(org.apache.spark.scheduler.CompressedMapStatus.class);
kryo.register(org.apache.spark.util.collection.CompactBuffer[].class);
kryo.register(org.apache.spark.sql.Row.class);
kryo.register(org.apache.spark.sql.Row[].class);
kryo.register(org.apache.spark.sql.catalyst.expressions.GenericRow.class);
kryo.register(org.apache.spark.sql.catalyst.expressions.GenericMutableRow.class);
// com.oculusinfo.tilegen.spark
kryo.register(MinMaxAccumulableParam.class);
kryo.register(DoubleMinAccumulatorParam.class);
kryo.register(DoubleMaxAccumulatorParam.class);
kryo.register(IntMinAccumulatorParam.class);
kryo.register(IntMaxAccumulatorParam.class);
// com.oculusinfo.tilegen.tiling
kryo.register(TileIO.class);
kryo.register(HBaseTileIO.class);
kryo.register(SequenceFileTileIO.class);
kryo.register(Analytic.class);
kryo.register(AnalysisDescription.class);
kryo.register(ArrayAnalytic.class);
kryo.register(BitSetAnalytic.class);
kryo.register(IPv4Analytics.class);
kryo.register(NumericMeanAnalytic.class);
kryo.register(NumericMeanBinningAnalytic.class);
kryo.register(NumericMeanTileAnalytic.class);
kryo.register(NumericStatsAnalytic.class);
kryo.register(NumericStatsBinningAnalytic.class);
kryo.register(NumericStatsTileAnalytic.class);
kryo.register(StringScoreAnalytic.class);
kryo.register(StringScoreBinningAnalytic.class);
kryo.register(StringScoreTileAnalytic.class);
kryo.register(OrderedStringTileAnalytic.class);
kryo.register(CategoryValueAnalytic.class);
kryo.register(CategoryValueTileAnalytic.class);
kryo.register(StringAnalytic.class);
// com.oculusinfo.tilegen.util
kryo.register(PropertiesWrapper.class);
kryo.register(Rectangle.class);
kryo.register(TypeConversion.class);
kryo.register(EndPointsToLine.class);
// com.oculusinfo.binning
kryo.register(BinIndex.class);
kryo.register(TileAndBinIndices.class);
kryo.register(PyramidComparator.class);
kryo.register(TileData.class);
kryo.register(TileData[].class);
kryo.register(TilePyramid.class);
kryo.register(TileIndex.class);
// com.oculusinfo.binning.impl
kryo.register(AOITilePyramid.class);
kryo.register(DenseTileData.class);
kryo.register(DenseTileData[].class);
kryo.register(DenseTileSliceView.class);
kryo.register(SparseTileData.class);
kryo.register(SparseTileData[].class);
kryo.register(SparseTileSliceView.class);
kryo.register(SubTileDataView.class);
kryo.register(WebMercatorTilePyramid.class);
// com.oculusinfo.binning.util
kryo.register(Pair.class);
}Example 8
| Project: terrapin-master File: TerrapinServiceImpl.java View source code |
@Override
public Future<TerrapinSingleResponse> get(final TerrapinGetRequest request) {
final long startTimeMillis = System.currentTimeMillis();
if (request.getClusterList().isEmpty()) {
return Future.exception(new TerrapinGetException("Cluster list is empty", TerrapinGetErrorCode.INVALID_REQUEST));
}
ReplicatedTerrapinClient terrapinClient = getReplicatedTerrapinClient(request.getClusterList());
if (terrapinClient == null) {
return Future.exception(new TerrapinGetException("Clusters [" + Joiner.on(", ").join(request.getClusterList()) + "] not found.", TerrapinGetErrorCode.CLUSTER_NOT_FOUND));
}
RequestOptions options;
if (request.isSetOptions()) {
options = request.getOptions();
} else {
options = new RequestOptions();
}
try {
return terrapinClient.getMany(request.getFileSet(), Sets.newHashSet(ByteBuffer.wrap(request.getKey())), options).map(new ExceptionalFunction<TerrapinResponse, TerrapinSingleResponse>() {
@Override
public TerrapinSingleResponse applyE(TerrapinResponse response) throws TerrapinGetException {
ByteBuffer keyBuf = ByteBuffer.wrap(request.getKey());
if (response.getResponseMap().containsKey(keyBuf)) {
TerrapinSingleResponse returnResponse = response.getResponseMap().get(keyBuf);
if (returnResponse.isSetErrorCode()) {
throw new TerrapinGetException("Read failed.", returnResponse.getErrorCode());
} else {
Stats.addMetric(request.getFileSet() + "-value-size", returnResponse.getValue().length);
Stats.addMetric("value-size", returnResponse.getValue().length);
return returnResponse;
}
} else {
return new TerrapinSingleResponse();
}
}
}).rescue(new Function<Throwable, Future<TerrapinSingleResponse>>() {
@Override
public Future<TerrapinSingleResponse> apply(Throwable t) {
return getExceptionFuture(t);
}
}).ensure(new Function0<BoxedUnit>() {
@Override
public BoxedUnit apply() {
int timeMillis = (int) (System.currentTimeMillis() - startTimeMillis);
Stats.addMetric(request.getFileSet() + "-lookup-latency-ms", timeMillis);
Stats.addMetric("lookup-latency-ms", timeMillis);
return BoxedUnit.UNIT;
}
});
} catch (Exception e) {
return getExceptionFuture(e);
}
}Example 9
| Project: beam-master File: SparkUnboundedSource.java View source code |
@Override
public scala.Option<RDD<BoxedUnit>> compute(Time validTime) {
// compute parent.
scala.Option<RDD<Metadata>> parentRDDOpt = parent.getOrCompute(validTime);
final Accumulator<MetricsContainerStepMap> metricsAccum = MetricsAccumulator.getInstance();
long count = 0;
SparkWatermarks sparkWatermark = null;
Instant globalLowWatermarkForBatch = BoundedWindow.TIMESTAMP_MIN_VALUE;
Instant globalHighWatermarkForBatch = BoundedWindow.TIMESTAMP_MIN_VALUE;
long maxReadDuration = 0;
if (parentRDDOpt.isDefined()) {
JavaRDD<Metadata> parentRDD = parentRDDOpt.get().toJavaRDD();
for (Metadata metadata : parentRDD.collect()) {
count += metadata.getNumRecords();
// compute the global input watermark - advance to latest of all partitions.
Instant partitionLowWatermark = metadata.getLowWatermark();
globalLowWatermarkForBatch = globalLowWatermarkForBatch.isBefore(partitionLowWatermark) ? partitionLowWatermark : globalLowWatermarkForBatch;
Instant partitionHighWatermark = metadata.getHighWatermark();
globalHighWatermarkForBatch = globalHighWatermarkForBatch.isBefore(partitionHighWatermark) ? partitionHighWatermark : globalHighWatermarkForBatch;
// Update metrics reported in the read
final Gauge gauge = Metrics.gauge(NAMESPACE, READ_DURATION_MILLIS);
final MetricsContainer container = metadata.getMetricsContainers().getContainer(stepName);
try (Closeable ignored = MetricsEnvironment.scopedMetricsContainer(container)) {
final long readDurationMillis = metadata.getReadDurationMillis();
if (readDurationMillis > maxReadDuration) {
gauge.set(readDurationMillis);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
metricsAccum.value().updateAll(metadata.getMetricsContainers());
}
sparkWatermark = new SparkWatermarks(globalLowWatermarkForBatch, globalHighWatermarkForBatch, new Instant(validTime.milliseconds()));
// add to watermark queue.
GlobalWatermarkHolder.add(inputDStreamId, sparkWatermark);
}
// report - for RateEstimator and visibility.
report(validTime, count, sparkWatermark);
return scala.Option.empty();
}Example 10
| Project: hpg-bigdata-master File: ParentDataset.java View source code |
public void foreach(scala.Function1<Row, scala.runtime.BoxedUnit> f) {
ds.foreach(f);
}