/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.util.stream; import java.util.DoubleSummaryStatistics; import java.util.Objects; import java.util.OptionalDouble; import java.util.PrimitiveIterator; import java.util.Spliterator; import java.util.Spliterators; import java.util.function.BiConsumer; import java.util.function.BinaryOperator; import java.util.function.DoubleBinaryOperator; import java.util.function.DoubleConsumer; import java.util.function.DoubleFunction; import java.util.function.DoublePredicate; import java.util.function.DoubleToIntFunction; import java.util.function.DoubleToLongFunction; import java.util.function.DoubleUnaryOperator; import java.util.function.IntFunction; import java.util.function.ObjDoubleConsumer; import java.util.function.Supplier; /** * Abstract base class for an intermediate pipeline stage or pipeline source * stage implementing whose elements are of type {@code double}. * * @param <E_IN> type of elements in the upstream source * * @since 1.8 */ abstract class DoublePipeline<E_IN> extends AbstractPipeline<E_IN, Double, DoubleStream> implements DoubleStream { /** * Constructor for the head of a stream pipeline. * * @param source {@code Supplier<Spliterator>} describing the stream source * @param sourceFlags the source flags for the stream source, described in * {@link StreamOpFlag} */ DoublePipeline(Supplier<? extends Spliterator<Double>> source, int sourceFlags, boolean parallel) { super(source, sourceFlags, parallel); } /** * Constructor for the head of a stream pipeline. * * @param source {@code Spliterator} describing the stream source * @param sourceFlags the source flags for the stream source, described in * {@link StreamOpFlag} */ DoublePipeline(Spliterator<Double> source, int sourceFlags, boolean parallel) { super(source, sourceFlags, parallel); } /** * Constructor for appending an intermediate operation onto an existing * pipeline. * * @param upstream the upstream element source. * @param opFlags the operation flags */ DoublePipeline(AbstractPipeline<?, E_IN, ?> upstream, int opFlags) { super(upstream, opFlags); } /** * Adapt a {@code Sink<Double> to a {@code DoubleConsumer}, ideally simply * by casting. */ private static DoubleConsumer adapt(Sink<Double> sink) { if (sink instanceof DoubleConsumer) { return (DoubleConsumer) sink; } else { if (Tripwire.ENABLED) Tripwire.trip(AbstractPipeline.class, "using DoubleStream.adapt(Sink<Double> s)"); return sink::accept; } } /** * Adapt a {@code Spliterator<Double>} to a {@code Spliterator.OfDouble}. * * @implNote * The implementation attempts to cast to a Spliterator.OfDouble, and throws * an exception if this cast is not possible. */ private static Spliterator.OfDouble adapt(Spliterator<Double> s) { if (s instanceof Spliterator.OfDouble) { return (Spliterator.OfDouble) s; } else { if (Tripwire.ENABLED) Tripwire.trip(AbstractPipeline.class, "using DoubleStream.adapt(Spliterator<Double> s)"); throw new UnsupportedOperationException("DoubleStream.adapt(Spliterator<Double> s)"); } } // Shape-specific methods @Override final StreamShape getOutputShape() { return StreamShape.DOUBLE_VALUE; } @Override final <P_IN> Node<Double> evaluateToNode(PipelineHelper<Double> helper, Spliterator<P_IN> spliterator, boolean flattenTree, IntFunction<Double[]> generator) { return Nodes.collectDouble(helper, spliterator, flattenTree); } @Override final <P_IN> Spliterator<Double> wrap(PipelineHelper<Double> ph, Supplier<Spliterator<P_IN>> supplier, boolean isParallel) { return new StreamSpliterators.DoubleWrappingSpliterator<>(ph, supplier, isParallel); } @Override @SuppressWarnings("unchecked") final Spliterator.OfDouble lazySpliterator(Supplier<? extends Spliterator<Double>> supplier) { return new StreamSpliterators.DelegatingSpliterator.OfDouble((Supplier<Spliterator.OfDouble>) supplier); } @Override final void forEachWithCancel(Spliterator<Double> spliterator, Sink<Double> sink) { Spliterator.OfDouble spl = adapt(spliterator); DoubleConsumer adaptedSink = adapt(sink); do { } while (!sink.cancellationRequested() && spl.tryAdvance(adaptedSink)); } @Override final Node.Builder<Double> makeNodeBuilder(long exactSizeIfKnown, IntFunction<Double[]> generator) { return Nodes.doubleBuilder(exactSizeIfKnown); } // DoubleStream @Override public final PrimitiveIterator.OfDouble iterator() { return Spliterators.iterator(spliterator()); } @Override public final Spliterator.OfDouble spliterator() { return adapt(super.spliterator()); } // Stateless intermediate ops from DoubleStream @Override public final Stream<Double> boxed() { return mapToObj(Double::valueOf); } @Override public final DoubleStream map(DoubleUnaryOperator mapper) { Objects.requireNonNull(mapper); return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return new Sink.ChainedDouble<Double>(sink) { @Override public void accept(double t) { downstream.accept(mapper.applyAsDouble(t)); } }; } }; } @Override public final <U> Stream<U> mapToObj(DoubleFunction<? extends U> mapper) { Objects.requireNonNull(mapper); return new ReferencePipeline.StatelessOp<Double, U>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) { @Override Sink<Double> opWrapSink(int flags, Sink<U> sink) { return new Sink.ChainedDouble<U>(sink) { @Override public void accept(double t) { downstream.accept(mapper.apply(t)); } }; } }; } @Override public final IntStream mapToInt(DoubleToIntFunction mapper) { Objects.requireNonNull(mapper); return new IntPipeline.StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) { @Override Sink<Double> opWrapSink(int flags, Sink<Integer> sink) { return new Sink.ChainedDouble<Integer>(sink) { @Override public void accept(double t) { downstream.accept(mapper.applyAsInt(t)); } }; } }; } @Override public final LongStream mapToLong(DoubleToLongFunction mapper) { Objects.requireNonNull(mapper); return new LongPipeline.StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT) { @Override Sink<Double> opWrapSink(int flags, Sink<Long> sink) { return new Sink.ChainedDouble<Long>(sink) { @Override public void accept(double t) { downstream.accept(mapper.applyAsLong(t)); } }; } }; } @Override public final DoubleStream flatMap(DoubleFunction<? extends DoubleStream> mapper) { return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SORTED | StreamOpFlag.NOT_DISTINCT | StreamOpFlag.NOT_SIZED) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return new Sink.ChainedDouble<Double>(sink) { @Override public void begin(long size) { downstream.begin(-1); } @Override public void accept(double t) { try (DoubleStream result = mapper.apply(t)) { // We can do better that this too; optimize for depth=0 case and just grab spliterator and forEach it if (result != null) result.sequential().forEach(i -> downstream.accept(i)); } } }; } }; } @Override public DoubleStream unordered() { if (!isOrdered()) return this; return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_ORDERED) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return sink; } }; } @Override public final DoubleStream filter(DoublePredicate predicate) { Objects.requireNonNull(predicate); return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, StreamOpFlag.NOT_SIZED) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return new Sink.ChainedDouble<Double>(sink) { @Override public void begin(long size) { downstream.begin(-1); } @Override public void accept(double t) { if (predicate.test(t)) downstream.accept(t); } }; } }; } @Override public final DoubleStream peek(DoubleConsumer action) { Objects.requireNonNull(action); return new StatelessOp<Double>(this, StreamShape.DOUBLE_VALUE, 0) { @Override Sink<Double> opWrapSink(int flags, Sink<Double> sink) { return new Sink.ChainedDouble<Double>(sink) { @Override public void accept(double t) { action.accept(t); downstream.accept(t); } }; } }; } // Stateful intermediate ops from DoubleStream @Override public final DoubleStream limit(long maxSize) { if (maxSize < 0) throw new IllegalArgumentException(Long.toString(maxSize)); return SliceOps.makeDouble(this, (long) 0, maxSize); } @Override public final DoubleStream substream(long startingOffset) { if (startingOffset < 0) throw new IllegalArgumentException(Long.toString(startingOffset)); if (startingOffset == 0) return this; else { long limit = -1; return SliceOps.makeDouble(this, startingOffset, limit); } } @Override public final DoubleStream substream(long startingOffset, long endingOffset) { if (startingOffset < 0 || endingOffset < startingOffset) throw new IllegalArgumentException(String.format("substream(%d, %d)", startingOffset, endingOffset)); return SliceOps.makeDouble(this, startingOffset, endingOffset - startingOffset); } @Override public final DoubleStream sorted() { return SortedOps.makeDouble(this); } @Override public final DoubleStream distinct() { // While functional and quick to implement, this approach is not very efficient. // An efficient version requires a double-specific map/set implementation. return boxed().distinct().mapToDouble(i -> (double) i); } // Terminal ops from DoubleStream @Override public void forEach(DoubleConsumer consumer) { evaluate(ForEachOps.makeDouble(consumer, false)); } @Override public void forEachOrdered(DoubleConsumer consumer) { evaluate(ForEachOps.makeDouble(consumer, true)); } @Override public final double sum() { // TODO: better algorithm to compensate for errors return reduce(0.0, Double::sum); } @Override public final OptionalDouble min() { return reduce(Math::min); } @Override public final OptionalDouble max() { return reduce(Math::max); } @Override public final OptionalDouble average() { double[] avg = collect(() -> new double[2], (ll, i) -> { ll[0]++; ll[1] += i; }, (ll, rr) -> { ll[0] += rr[0]; ll[1] += rr[1]; }); return avg[0] > 0 ? OptionalDouble.of(avg[1] / avg[0]) : OptionalDouble.empty(); } @Override public final long count() { return mapToObj(e -> null).mapToInt(e -> 1).sum(); } @Override public final DoubleSummaryStatistics summaryStatistics() { return collect(DoubleSummaryStatistics::new, DoubleSummaryStatistics::accept, DoubleSummaryStatistics::combine); } @Override public final double reduce(double identity, DoubleBinaryOperator op) { return evaluate(ReduceOps.makeDouble(identity, op)); } @Override public final OptionalDouble reduce(DoubleBinaryOperator op) { return evaluate(ReduceOps.makeDouble(op)); } @Override public final <R> R collect(Supplier<R> supplier, ObjDoubleConsumer<R> accumulator, BiConsumer<R, R> combiner) { BinaryOperator<R> operator = (left, right) -> { combiner.accept(left, right); return left; }; return evaluate(ReduceOps.makeDouble(supplier, accumulator, operator)); } @Override public final boolean anyMatch(DoublePredicate predicate) { return evaluate(MatchOps.makeDouble(predicate, MatchOps.MatchKind.ANY)); } @Override public final boolean allMatch(DoublePredicate predicate) { return evaluate(MatchOps.makeDouble(predicate, MatchOps.MatchKind.ALL)); } @Override public final boolean noneMatch(DoublePredicate predicate) { return evaluate(MatchOps.makeDouble(predicate, MatchOps.MatchKind.NONE)); } @Override public final OptionalDouble findFirst() { return evaluate(FindOps.makeDouble(true)); } @Override public final OptionalDouble findAny() { return evaluate(FindOps.makeDouble(false)); } @Override public final double[] toArray() { return Nodes.flattenDouble((Node.OfDouble) evaluateToArrayNode(Double[]::new)) .asPrimitiveArray(); } // /** * Source stage of a DoubleStream * * @param <E_IN> type of elements in the upstream source */ static class Head<E_IN> extends DoublePipeline<E_IN> { /** * Constructor for the source stage of a DoubleStream. * * @param source {@code Supplier<Spliterator>} describing the stream * source * @param sourceFlags the source flags for the stream source, described * in {@link StreamOpFlag} * @param parallel {@code true} if the pipeline is parallel */ Head(Supplier<? extends Spliterator<Double>> source, int sourceFlags, boolean parallel) { super(source, sourceFlags, parallel); } /** * Constructor for the source stage of a DoubleStream. * * @param source {@code Spliterator} describing the stream source * @param sourceFlags the source flags for the stream source, described * in {@link StreamOpFlag} * @param parallel {@code true} if the pipeline is parallel */ Head(Spliterator<Double> source, int sourceFlags, boolean parallel) { super(source, sourceFlags, parallel); } @Override final boolean opIsStateful() { throw new UnsupportedOperationException(); } @Override final Sink<E_IN> opWrapSink(int flags, Sink<Double> sink) { throw new UnsupportedOperationException(); } // Optimized sequential terminal operations for the head of the pipeline @Override public void forEach(DoubleConsumer consumer) { if (!isParallel()) { adapt(sourceStageSpliterator()).forEachRemaining(consumer); } else { super.forEach(consumer); } } @Override public void forEachOrdered(DoubleConsumer consumer) { if (!isParallel()) { adapt(sourceStageSpliterator()).forEachRemaining(consumer); } else { super.forEachOrdered(consumer); } } } /** * Base class for a stateless intermediate stage of a DoubleStream. * * @param <E_IN> type of elements in the upstream source * @since 1.8 */ abstract static class StatelessOp<E_IN> extends DoublePipeline<E_IN> { /** * Construct a new DoubleStream by appending a stateless intermediate * operation to an existing stream. * * @param upstream the upstream pipeline stage * @param inputShape the stream shape for the upstream pipeline stage * @param opFlags operation flags for the new stage */ StatelessOp(AbstractPipeline<?, E_IN, ?> upstream, StreamShape inputShape, int opFlags) { super(upstream, opFlags); assert upstream.getOutputShape() == inputShape; } @Override final boolean opIsStateful() { return false; } } /** * Base class for a stateful intermediate stage of a DoubleStream. * * @param <E_IN> type of elements in the upstream source * @since 1.8 */ abstract static class StatefulOp<E_IN> extends DoublePipeline<E_IN> { /** * Construct a new DoubleStream by appending a stateful intermediate * operation to an existing stream. * * @param upstream the upstream pipeline stage * @param inputShape the stream shape for the upstream pipeline stage * @param opFlags operation flags for the new stage */ StatefulOp(AbstractPipeline<?, E_IN, ?> upstream, StreamShape inputShape, int opFlags) { super(upstream, opFlags); assert upstream.getOutputShape() == inputShape; } @Override final boolean opIsStateful() { return true; } @Override abstract <P_IN> Node<Double> opEvaluateParallel(PipelineHelper<Double> helper, Spliterator<P_IN> spliterator, IntFunction<Double[]> generator); } }