Java Examples for com.google.common.io.CountingInputStream

The following java examples will help you to understand the usage of com.google.common.io.CountingInputStream. These source code samples are taken from different open source projects.

Example 1
Project: common-java-cookbook-master  File: MeteredExample.java View source code
public static void main(String[] args) throws Exception {
    InputStream fis = new FileInputStream(new File("data", "large.txt"));
    CountingInputStream cis = new CountingInputStream(fis);
    while (cis.read() != -1) {
        long bytesRead = cis.getCount();
        if (bytesRead % 50 == 0) {
            System.out.printf("Read %d bytes...\n", bytesRead);
        }
    }
    OutputStream fos = new FileOutputStream(new File("output.dat"));
    CountingOutputStream cos = new CountingOutputStream(fos);
    String testString = "TEST STRING";
    cos.write(testString.getBytes(Charset.defaultCharset()));
    System.out.printf("Just wrote %d bytes to output.dat", cos.getCount());
}
Example 2
Project: scrutineer-master  File: ElasticSearchSorter.java View source code
public void sort(InputStream inputStream, OutputStream outputStream) {
    long begin = System.currentTimeMillis();
    CountingInputStream countingInputStream = new CountingInputStream(inputStream);
    doSort(countingInputStream, outputStream);
    LogUtils.infoTimeTaken(LOG, begin, countingInputStream.getCount(), "Sorted stream of %d bytes", countingInputStream.getCount());
}
Example 3
Project: airship-master  File: TestValidatingResponseHandler.java View source code
private static Response fakeJsonResponse(String json) {
    InputStream input = new ByteArrayInputStream(json.getBytes(Charsets.UTF_8));
    final CountingInputStream countingInputStream = new CountingInputStream(input);
    return new Response() {

        @Override
        public int getStatusCode() {
            return HttpStatus.OK.code();
        }

        @Override
        public String getStatusMessage() {
            return HttpStatus.OK.reason();
        }

        @Override
        public String getHeader(String name) {
            List<String> list = getHeaders().get(name);
            return list.isEmpty() ? null : list.get(0);
        }

        @Override
        public ListMultimap<String, String> getHeaders() {
            return ImmutableListMultimap.<String, String>builder().put(HttpHeaders.CONTENT_TYPE, MediaType.JSON_UTF_8.toString()).build();
        }

        @Override
        public long getBytesRead() {
            return countingInputStream.getCount();
        }

        @Override
        public InputStream getInputStream() throws IOException {
            return countingInputStream;
        }
    };
}
Example 4
Project: jabylon-master  File: LogTail.java View source code
public void nextChunk(int maxLines, Deque<String> buffer) {
    BufferedReader reader = null;
    try {
        CountingInputStream in = new CountingInputStream(new FileInputStream(logFile));
        //buffer of 1 is slow, but at least predictable, so we can reset
        reader = new BufferedReader(new InputStreamReader(in), 1);
        reader.skip(currentChunk);
        String s = null;
        int lines = 0;
        while ((s = reader.readLine()) != null) {
            buffer.add(s);
            lines++;
            //unless it's the first chunk we stop once we reached max lines
            if (currentChunk > 0 && lines == maxLines)
                break;
        }
        currentChunk = in.getCount();
    } catch (FileNotFoundException e) {
        LOG.warn("Logfile does not seem to exist (yet)", e);
    } catch (IOException e) {
        LOG.warn("Failed to read logfile", e);
    } finally {
        try {
            reader.close();
        } catch (IOException e) {
            LOG.error("Failed to close the logfile", e);
        }
    }
}
Example 5
Project: servo-master  File: BaseHandler.java View source code
public void handle(HttpExchange exchange) throws IOException {
    CountingInputStream input = new CountingInputStream(exchange.getRequestBody());
    CountingOutputStream output = new CountingOutputStream(exchange.getResponseBody());
    exchange.setStreams(input, output);
    Stopwatch stopwatch = latency.start();
    try {
        handleImpl(exchange);
    } finally {
        stopwatch.stop();
        bytesReceived.increment(input.getCount());
        bytesSent.increment(output.getCount());
    }
}
Example 6
Project: b1-pack-master  File: VolumeCursor.java View source code
private void openVolume(long number) throws IOException {
    if (inputStream != null) {
        inputStream.close();
    }
    volumeNumber = number;
    volume = Preconditions.checkNotNull(provider.getVolume(number), "Volume %s not found", number);
    inputStream = new CountingInputStream(volume.getInputStream());
    headerSet = readHead(number);
    checkVolume(headerSet.getSchemaVersion() != null);
    Preconditions.checkState(headerSet.getSchemaVersion() <= Volumes.SCHEMA_VERSION, "B1 archive version not supported (%s): %s", headerSet.getSchemaVersion(), volume.getName());
    checkVolume(headerSet.getArchiveId() != null && headerSet.getArchiveId().equals(archiveId));
    checkVolume(headerSet.getVolumeNumber() != null && headerSet.getVolumeNumber() == volumeNumber);
}
Example 7
Project: GeoGig-master  File: SendObjectResource.java View source code
@Override
public void post(Representation entity) {
    InputStream input = null;
    Request request = getRequest();
    try {
        LOGGER.info("Receiving objects from {}", request.getClientInfo().getAddress());
        Representation representation = request.getEntity();
        input = representation.getStream();
        final GeoGIG ggit = getGeogig(request).get();
        final BinaryPackedObjects unpacker = new BinaryPackedObjects(ggit.getRepository().objectDatabase());
        CountingInputStream countingStream = new CountingInputStream(input);
        Stopwatch sw = Stopwatch.createStarted();
        IngestResults ingestResults = unpacker.ingest(countingStream);
        sw.stop();
        LOGGER.info(String.format("SendObjectResource: Processed %,d objects.\nInserted: %,d.\nExisting: %,d.\nTime to process: %s.\nStream size: %,d bytes.\n", ingestResults.total(), ingestResults.getInserted(), ingestResults.getExisting(), sw, countingStream.getCount()));
    } catch (IOException e) {
        LOGGER.warn("Error processing incoming objects from {}", request.getClientInfo().getAddress(), e);
        throw new RestletException(e.getMessage(), Status.SERVER_ERROR_INTERNAL, e);
    } finally {
        if (input != null)
            Closeables.closeQuietly(input);
    }
}
Example 8
Project: gig-master  File: SendObjectResource.java View source code
@Override
public void post(Representation entity) {
    InputStream input = null;
    Request request = getRequest();
    try {
        LOGGER.info("Receiving objects from {}", request.getClientInfo().getAddress());
        Representation representation = request.getEntity();
        input = representation.getStream();
        final GeoGIG ggit = getGeogig(request).get();
        final BinaryPackedObjects unpacker = new BinaryPackedObjects(ggit.getRepository().objectDatabase());
        CountingInputStream countingStream = new CountingInputStream(input);
        Stopwatch sw = Stopwatch.createStarted();
        IngestResults ingestResults = unpacker.ingest(countingStream);
        sw.stop();
        LOGGER.info(String.format("SendObjectResource: Processed %,d objects.\nInserted: %,d.\nExisting: %,d.\nTime to process: %s.\nStream size: %,d bytes.\n", ingestResults.total(), ingestResults.getInserted(), ingestResults.getExisting(), sw, countingStream.getCount()));
    } catch (IOException e) {
        LOGGER.warn("Error processing incoming objects from {}", request.getClientInfo().getAddress(), e);
        throw new RestletException(e.getMessage(), Status.SERVER_ERROR_INTERNAL, e);
    } finally {
        if (input != null)
            Closeables.closeQuietly(input);
    }
}
Example 9
Project: webdav-cassandra-master  File: FileStorageService.java View source code
public void createFile(final String fullFilePath, final InputContext inputContext) throws DavException {
    if (cassandraDao.getFile(fullFilePath) != null) {
        throw new DavException(DavServletResponse.SC_CONFLICT);
    }
    final String parentDirectory = getParentDirectory(fullFilePath);
    final String fileName = PathUtils.getFileName(fullFilePath);
    final UUID parentId = cassandraDao.getFile(parentDirectory);
    try {
        final UUID fileUUID = cassandraFileDao.createFile(parentId, fileName);
        if (inputContext.hasStream() && inputContext.getContentLength() >= 0) {
            final CountingInputStream countingInputStream = new CountingInputStream(inputContext.getInputStream());
            cassandraFileDao.writeFile(fileUUID, countingInputStream);
            cassandraFileDao.updateFileInfo(fileUUID, countingInputStream.getCount());
        }
    } catch (ConnectionException e) {
        throw new RuntimeException(e);
    }
}
Example 10
Project: gradle-master  File: AbstractExternalResource.java View source code
public ExternalResourceReadResult<Void> writeTo(OutputStream output) {
    try {
        CountingInputStream input = openUnbuffered();
        try {
            IOUtils.copyLarge(input, output);
        } finally {
            input.close();
        }
        return ExternalResourceReadResult.of(input.getCount());
    } catch (Exception e) {
        throw ResourceExceptions.getFailed(getURI(), e);
    }
}
Example 11
Project: jpmml-sklearn-master  File: CompressedInputStreamStorage.java View source code
@Override
public void close() throws IOException {
    if (this.closed) {
        return;
    }
    this.closed = true;
    long size = ((CountingInputStream) super.in).getCount();
    super.close();
    if (size != expectedSize) {
        throw new IOException("Expected " + expectedSize + " byte(s) of uncompressed data, got " + size + " byte(s)");
    }
}
Example 12
Project: POL-POM-5-master  File: Tar.java View source code
List<File> uncompressTarBz2File(File inputFile, File outputDir, Consumer<ProgressEntity> stateCallback) {
    try (CountingInputStream countingInputStream = new CountingInputStream(new FileInputStream(inputFile));
        InputStream inputStream = new BZip2CompressorInputStream(countingInputStream)) {
        final long finalSize = FileUtils.sizeOf(inputFile);
        return uncompress(inputStream, countingInputStream, outputDir, finalSize, stateCallback);
    } catch (IOException e) {
        throw new ArchiveException(TAR_ERROR_MESSAGE, e);
    }
}
Example 13
Project: timestamper-plugin-master  File: TimestampsWriterTest.java View source code
private List<Integer> writtenTimestampData() throws Exception {
    byte[] fileContents = Files.toByteArray(timestampsFile);
    CountingInputStream inputStream = new CountingInputStream(new ByteArrayInputStream(fileContents));
    List<Integer> timestampData = new ArrayList<Integer>();
    while (inputStream.getCount() < fileContents.length) {
        timestampData.add((int) Varint.read(inputStream));
    }
    return timestampData;
}
Example 14
Project: Europeana-Cloud-master  File: SwiftContentDAO.java View source code
/**
     * Puts given content to storage under given fileName. Counts and returns
     * content length and md5 checksum from given data.
     * 
     * @param fileName
     *            name of the file
     * @param data
     *            content of file to be saved
     * @return md5 and content length
     * @throws IOException
     *             if an I/O error occurs
     */
@Override
public PutResult putContent(String fileName, InputStream data) throws IOException, ContainerNotFoundException {
    BlobStore blobStore = connectionProvider.getBlobStore();
    String container = connectionProvider.getContainer();
    CountingInputStream countingInputStream = new CountingInputStream(data);
    DigestInputStream md5DigestInputStream = md5InputStream(countingInputStream);
    BlobBuilder builder = blobStore.blobBuilder(fileName);
    builder = builder.name(fileName);
    builder = builder.payload(md5DigestInputStream);
    Blob blob = builder.build();
    blobStore.putBlob(container, blob);
    String md5 = BaseEncoding.base16().lowerCase().encode(md5DigestInputStream.getMessageDigest().digest());
    Long contentLength = countingInputStream.getCount();
    return new PutResult(md5, contentLength);
}
Example 15
Project: hale-master  File: LookupStreamResource.java View source code
@Override
public InputStream getInput() throws IOException {
    if (input.getCount() > lookupLimit) {
        throw new IllegalStateException("Input stream can only be consumed once.");
    }
    input.reset();
    return new PreventMark(new FilterInputStream(input) {

        /**
					 * @see java.io.FilterInputStream#close()
					 */
        @Override
        public void close() throws IOException {
            if (((CountingInputStream) in).getCount() > lookupLimit) {
                // close only if lookupLimit has been exceeded
                super.close();
            } else {
                // otherwise reset
                reset();
            }
        }

        @Override
        protected void finalize() throws Throwable {
            super.finalize();
            // close the underlying stream if not yet done
            super.close();
        }
    });
}
Example 16
Project: hbase-master  File: TestCellMessageCodec.java View source code
@Test
public void testEmptyWorks() throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    CountingOutputStream cos = new CountingOutputStream(baos);
    DataOutputStream dos = new DataOutputStream(cos);
    MessageCodec cmc = new MessageCodec();
    Codec.Encoder encoder = cmc.getEncoder(dos);
    encoder.flush();
    dos.close();
    long offset = cos.getCount();
    assertEquals(0, offset);
    CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
    DataInputStream dis = new DataInputStream(cis);
    Codec.Decoder decoder = cmc.getDecoder(dis);
    assertFalse(decoder.advance());
    dis.close();
    assertEquals(0, cis.getCount());
}
Example 17
Project: pbase-master  File: TestCellMessageCodec.java View source code
@Test
public void testEmptyWorks() throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    CountingOutputStream cos = new CountingOutputStream(baos);
    DataOutputStream dos = new DataOutputStream(cos);
    MessageCodec cmc = new MessageCodec();
    Codec.Encoder encoder = cmc.getEncoder(dos);
    encoder.flush();
    dos.close();
    long offset = cos.getCount();
    assertEquals(0, offset);
    CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(baos.toByteArray()));
    DataInputStream dis = new DataInputStream(cis);
    Codec.Decoder decoder = cmc.getDecoder(dis);
    assertFalse(decoder.advance());
    dis.close();
    assertEquals(0, cis.getCount());
}
Example 18
Project: openscoring-master  File: ModelRegistry.java View source code
@SuppressWarnings(value = { "resource" })
public Model load(InputStream is) throws Exception {
    CountingInputStream countingIs = new CountingInputStream(is);
    HashingInputStream hashingIs = new HashingInputStream(Hashing.md5(), countingIs);
    ModelEvaluator<?> evaluator = unmarshal(hashingIs, this.validate);
    PMML pmml = evaluator.getPMML();
    for (Class<? extends Visitor> visitorClazz : this.visitorClazzes) {
        Visitor visitor = visitorClazz.newInstance();
        visitor.applyTo(pmml);
    }
    evaluator.verify();
    Model model = new Model(evaluator);
    model.putProperty(Model.PROPERTY_FILE_SIZE, countingIs.getCount());
    model.putProperty(Model.PROPERTY_FILE_MD5SUM, (hashingIs.hash()).toString());
    return model;
}
Example 19
Project: SmallCloudEmoji-master  File: DownloadAsyncTask.java View source code
@Override
protected Integer doInBackground(Repository... params) {
    InputStream inputStream = null;
    HttpURLConnection connection = null;
    Repository repository = params[0];
    try {
        URL url = new URL(repository.getUrl());
        for (int i = 0; i < 10; ++i) {
            // Limit redirection (between HTTP and HTTPS) < 10 times.
            connection = (HttpURLConnection) url.openConnection();
            int statusCode = connection.getResponseCode();
            if (statusCode == HttpURLConnection.HTTP_OK) {
                break;
            } else if (statusCode == HttpURLConnection.HTTP_MOVED_TEMP || statusCode == HttpURLConnection.HTTP_MOVED_PERM) {
                url = connection.getURL();
            } else if (statusCode == HttpURLConnection.HTTP_NOT_FOUND) {
                return RESULT_ERROR_NOT_FOUND;
            } else {
                return RESULT_ERROR_OTHER_HTTP;
            }
        }
        if (connection == null)
            return RESULT_ERROR_OTHER_HTTP;
        final int fileLength = connection.getContentLength();
        final CountingInputStream counting = new CountingInputStream(connection.getInputStream());
        inputStream = counting;
        RepositoryLoader repositoryLoader;
        String contentType = connection.getContentType();
        String filename = connection.getURL().getFile().toLowerCase();
        if (contentType.startsWith("text/xml") || filename.endsWith(".xml"))
            repositoryLoader = new RepositoryXmlLoader(daoSession);
        else if (contentType.startsWith("application/json") || filename.endsWith(".json"))
            repositoryLoader = new RepositoryJsonLoader(daoSession);
        else
            return RESULT_ERROR_UNSUPPORTED_FORMAT;
        repositoryLoader.setLoaderEventListener(new RepositoryLoaderEventListener() {

            private long lastUpdateProcess;

            public boolean onLoadingCategory(Category category) {
                return isCancelled();
            }

            @Override
            public boolean onEntryLoaded(Entry entry) {
                if (System.currentTimeMillis() - lastUpdateProcess >= 100) {
                    int process = (int) counting.getCount() * 100 / fileLength;
                    publishProgress(process);
                    lastUpdateProcess = System.currentTimeMillis();
                }
                return isCancelled();
            }
        });
        repositoryLoader.loadToDatabase(repository, new BufferedReader(new InputStreamReader(inputStream)));
        // Update source install state.
        SourceDao sourceDao = daoSession.getSourceDao();
        List<Source> sources = sourceDao.queryBuilder().where(SourceDao.Properties.CodeUrl.eq(repository.getUrl()), SourceDao.Properties.Installed.eq(false)).list();
        if (!sources.isEmpty()) {
            for (Source source : sources) source.setInstalled(true);
            sourceDao.updateInTx(sources);
        }
    } catch (LoadingCancelException e) {
        return RESULT_CANCELLED;
    } catch (MalformedURLException e) {
        return RESULT_ERROR_MALFORMED_URL;
    } catch (IOException e) {
        return RESULT_ERROR_IO;
    } catch (XmlPullParserException e) {
        return RESULT_ERROR_XML_PARSER;
    } catch (Exception e) {
        e.printStackTrace();
        return RESULT_ERROR_UNKNOWN;
    } finally {
        DatabaseHelper.getInstance(context).close();
        try {
            if (inputStream != null)
                inputStream.close();
        } catch (IOException e) {
        }
        if (connection != null)
            connection.disconnect();
    }
    return RESULT_SUCCESS;
}
Example 20
Project: archive-commons-master  File: GZIPMembersInputStream.java View source code
@Override
protected boolean readTrailer() throws IOException {
    int c = inf.getRemaining();
    currentMemberEnd = ((CountingInputStream) in).getCount() - (c - 8);
    //        return super.readTrailer();
    // REIMPLEMENTED TO FIX MISUSE OF available()
    InputStream in = this.in;
    int n = inf.getRemaining();
    if (n > 0) {
        in = new SequenceInputStream(new ByteArrayInputStream(buf, len - n, n), in);
    }
    // Uses left-to-right evaluation order
    if ((readUInt(in) != crc.getValue()) || // rfc1952; ISIZE is the input size modulo 2^32
    (readUInt(in) != (inf.getBytesWritten() & 0xffffffffL)))
        throw new ZipException("Corrupt GZIP trailer");
    // always try concatenated case; EOF or other IOException
    // will let us know if we're wrong
    // this.trailer
    int m = 8;
    try {
        // next.header
        m += readHeader(in);
    } catch (IOException ze) {
        return true;
    }
    inf.reset();
    if (n > m)
        inf.setInput(buf, len - n + m, n - m);
    return false;
}
Example 21
Project: dcm4chee-storage2-master  File: CloudStorageSystemProvider.java View source code
private void upload(StorageContext ctx, InputStream in, String name, long len) throws IOException {
    String container = system.getStorageSystemContainer();
    BlobStore blobStore = context.getBlobStore();
    if (blobStore.blobExists(container, name))
        throw new ObjectAlreadyExistsException(system.getStorageSystemPath(), container + '/' + name);
    CountingInputStream cin = new CountingInputStream(in);
    Payload payload = new InputStreamPayload(cin);
    if (len != -1) {
        payload.getContentMetadata().setContentLength(len);
    }
    Blob blob = blobStore.blobBuilder(name).payload(payload).build();
    String etag = (multipartUploader != null) ? multipartUploader.upload(container, blob) : blobStore.putBlob(container, blob);
    ctx.setFileSize(cin.getCount());
    log.info("Uploaded[uri={}, container={}, name={}, etag={}]", system.getStorageSystemPath(), container, name, etag);
}
Example 22
Project: OpenMapKit-master  File: OSMMapBuilder.java View source code
@Override
protected JTSModel doInBackground(File... params) {
    File f = params[0];
    fileName = f.getName();
    String absPath = f.getAbsolutePath();
    Log.i("BEGIN_PARSING", fileName);
    setFileSize(f.length());
    try {
        InputStream is = new FileInputStream(f);
        countingInputStream = new CountingInputStream(is);
        OSMDataSet ds = OSMXmlParserInOSMMapBuilder.parseFromInputStream(countingInputStream, this);
        if (isOSMEdit) {
            jtsModel.mergeEditedOSMDataSet(absPath, ds);
        } else {
            jtsModel.addOSMDataSet(absPath, ds);
        }
        loadedOSMFiles.add(absPath);
    } catch (Exception e) {
        e.printStackTrace();
    }
    return jtsModel;
}
Example 23
Project: webarchive-commons-master  File: GZIPMembersInputStream.java View source code
@Override
protected boolean readTrailer() throws IOException {
    int c = inf.getRemaining();
    currentMemberEnd = ((CountingInputStream) in).getCount() - (c - 8);
    //        return super.readTrailer();
    // REIMPLEMENTED TO FIX MISUSE OF available()
    InputStream in = this.in;
    int n = inf.getRemaining();
    if (n > 0) {
        in = new SequenceInputStream(new ByteArrayInputStream(buf, len - n, n), in);
    }
    // Uses left-to-right evaluation order
    if ((readUInt(in) != crc.getValue()) || // rfc1952; ISIZE is the input size modulo 2^32
    (readUInt(in) != (inf.getBytesWritten() & 0xffffffffL)))
        throw new ZipException("Corrupt GZIP trailer");
    // always try concatenated case; EOF or other IOException
    // will let us know if we're wrong
    // this.trailer
    int m = 8;
    try {
        // next.header
        m += readHeader(in);
    } catch (IOException ze) {
        return true;
    }
    inf.reset();
    if (n > m)
        inf.setInput(buf, len - n + m, n - m);
    return false;
}
Example 24
Project: beam-master  File: CoderProperties.java View source code
@VisibleForTesting
static <T> T decode(Coder<T> coder, Coder.Context context, byte[] bytes) throws CoderException, IOException {
    @SuppressWarnings("unchecked") Coder<T> deserializedCoder = SerializableUtils.clone(coder);
    byte[] buffer;
    if (context == Coder.Context.NESTED) {
        buffer = new byte[bytes.length + 1];
        System.arraycopy(bytes, 0, buffer, 0, bytes.length);
        buffer[bytes.length] = 1;
    } else {
        buffer = bytes;
    }
    CountingInputStream cis = new CountingInputStream(new ByteArrayInputStream(buffer));
    T value = deserializedCoder.decode(new UnownedInputStream(cis), context);
    assertThat("consumed bytes equal to encoded bytes", cis.getCount(), equalTo((long) bytes.length));
    return value;
}
Example 25
Project: commoncrawl-crawler-master  File: BlekkoURLListTransfer.java View source code
private static long readWriteNextLine(CountingInputStream is, ByteBuffer inputBuffer, DataOutputBuffer outputBuffer, SequenceFile.Writer writer) throws IOException {
    outputBuffer.reset();
    for (; ; ) {
        if (inputBuffer.remaining() == 0) {
            int bytesRead = is.read(inputBuffer.array());
            if (bytesRead == -1) {
                throw new EOFException();
            } else {
                inputBuffer.clear();
                inputBuffer.limit(bytesRead);
            }
        }
        int scanStartPos = inputBuffer.position();
        boolean eos = false;
        while (inputBuffer.remaining() != 0) {
            byte nextChar = inputBuffer.get();
            if ((nextChar == '\n') || (nextChar == '\r')) {
                eos = true;
                break;
            }
        }
        // put whatever we read into the output buffer .. .
        outputBuffer.write(inputBuffer.array(), scanStartPos, inputBuffer.position() - scanStartPos);
        if (eos) {
            break;
        }
    }
    String line = new String(outputBuffer.getData(), 0, outputBuffer.getLength(), Charset.forName("UTF-8"));
    int spaceDelimiter = line.indexOf(' ');
    if (spaceDelimiter != -1 && spaceDelimiter < line.length() - 1) {
        String url = line.substring(0, spaceDelimiter);
        String metadata = line.substring(spaceDelimiter + 1);
        if (url.length() != 0 && metadata.length() != 0) {
            writer.append(new Text(url), new Text(metadata));
        // System.out.println("URL:" + url + " Metadata:" + metadata);
        }
    }
    return is.getCount() + inputBuffer.position();
}
Example 26
Project: imhotep-master  File: SimpleFlamdexWriter.java View source code
public static void writeIntBTree(String directory, String intField, File btreeDir) throws IOException {
    final String termsFilename = Files.buildPath(directory, SimpleIntFieldWriter.getTermsFilename(intField));
    if (!new File(termsFilename).exists() || new File(termsFilename).length() == 0L)
        return;
    final CountingInputStream termsList = new CountingInputStream(new BufferedInputStream(new FileInputStream(termsFilename), 65536));
    try {
        ImmutableBTreeIndex.Writer.write(btreeDir, new AbstractIterator<Generation.Entry<Long, LongPair>>() {

            private long lastTerm = 0;

            private long lastTermDocOffset = 0L;

            private long lastTermFileOffset = 0L;

            private long key;

            private LongPair value;

            @Override
            protected Generation.Entry<Long, LongPair> computeNext() {
                try {
                    if (!nextTerm())
                        return endOfData();
                    key = lastTerm;
                    value = new LongPair(lastTermFileOffset, lastTermDocOffset);
                    for (int i = 0; i < BLOCK_SIZE - 1; ++i) {
                        if (!nextTerm()) {
                            break;
                        }
                    }
                    return Generation.Entry.create(key, value);
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }
            }

            private boolean nextTerm() throws IOException {
                final long termDelta;
                //sorry
                try {
                    termDelta = FlamdexUtils.readVLong(termsList);
                } catch (EOFException e) {
                    return false;
                }
                lastTerm += termDelta;
                final long offsetDelta = FlamdexUtils.readVLong(termsList);
                lastTermDocOffset += offsetDelta;
                lastTermFileOffset = termsList.getCount();
                // termDocFreq
                FlamdexUtils.readVLong(termsList);
                return true;
            }
        }, new LongSerializer(), new LongPairSerializer(), 65536, false);
    } finally {
        termsList.close();
    }
}
Example 27
Project: knowledgestore-master  File: Serializer.java View source code
@SuppressWarnings("resource")
@Override
public Object readFrom(final Class<Object> type, final Type genericType, final Annotation[] annotations, final MediaType mediaType, final MultivaluedMap<String, String> headers, final InputStream input) throws IOException, WebApplicationException {
    final String mimeType = mediaType.getType() + "/" + mediaType.getSubtype();
    final CountingInputStream in = new CountingInputStream(input);
    final boolean chunked = "true".equalsIgnoreCase(headers.getFirst(Protocol.HEADER_CHUNKED));
    final long ts = System.currentTimeMillis();
    try {
        if (type.isAssignableFrom(Representation.class)) {
            final InputStream stream = interceptClose(in, ts);
            final Representation representation = Representation.create(stream);
            readMetadata(representation.getMetadata(), headers);
            return representation;
        } else if (isAssignable(genericType, Protocol.STREAM_OF_RECORDS.getType())) {
            final RDFFormat format = formatFor(mimeType);
            final AtomicLong numStatements = new AtomicLong();
            final AtomicLong numRecords = new AtomicLong();
            Stream<Statement> statements = RDFUtil.readRDF(in, format, null, null, false);
            statements = statements.track(numStatements, null);
            Stream<Record> records = Record.decode(statements, null, chunked);
            records = records.track(numRecords, null);
            interceptClose(records, in, ts, numRecords, "record(s)", numStatements, "statement(s)");
            return records;
        } else if (isAssignable(genericType, Protocol.STREAM_OF_OUTCOMES.getType())) {
            final RDFFormat format = formatFor(mimeType);
            final AtomicLong numStatements = new AtomicLong();
            final AtomicLong numOutcomes = new AtomicLong();
            Stream<Statement> statements = RDFUtil.readRDF(in, format, null, null, false);
            statements = statements.track(numStatements, null);
            Stream<Outcome> outcomes = Outcome.decode(statements, chunked);
            outcomes = outcomes.track(numOutcomes, null);
            interceptClose(outcomes, in, ts, numOutcomes, "outcome(s)", numStatements, "statement(s)");
            return outcomes;
        } else if (isAssignable(genericType, Protocol.STREAM_OF_STATEMENTS.getType())) {
            final RDFFormat format = formatFor(mimeType);
            final AtomicLong numStatements = new AtomicLong();
            Stream<Statement> statements = RDFUtil.readRDF(in, format, null, null, false);
            statements = statements.track(numStatements, null);
            interceptClose(statements, in, ts, numStatements, "statement(s)");
            return statements;
        } else if (isAssignable(genericType, Protocol.STREAM_OF_TUPLES.getType())) {
            final TupleQueryResultFormat format;
            format = TupleQueryResultFormat.forMIMEType(mimeType);
            final AtomicLong numTuples = new AtomicLong();
            Stream<BindingSet> tuples = RDFUtil.readSparqlTuples(format, in);
            tuples = tuples.track(numTuples, null);
            interceptClose(tuples, in, ts, numTuples, "tuple(s)");
            return tuples;
        } else if (isAssignable(genericType, Protocol.STREAM_OF_BOOLEANS.getType())) {
            final BooleanQueryResultFormat format;
            format = BooleanQueryResultFormat.forMIMEType(mimeType);
            final boolean result = RDFUtil.readSparqlBoolean(format, in);
            final Stream<Boolean> stream = Stream.create(result);
            interceptClose(stream, in, ts, 1, "boolean");
            return stream;
        }
    } catch (final Throwable ex) {
        Util.closeQuietly(in);
        Throwables.propagateIfPossible(ex, IOException.class);
        throw Throwables.propagate(ex);
    }
    throw new IllegalArgumentException("Cannot deserialize " + genericType + " from " + mimeType);
}
Example 28
Project: buck-master  File: ProjectBuildFileParser.java View source code
/** Initialize the parser, starting buck.py. */
private void init() throws IOException {
    projectBuildFileParseEventStarted = new ProjectBuildFileParseEvents.Started();
    buckEventBus.post(projectBuildFileParseEventStarted);
    try (SimplePerfEvent.Scope scope = SimplePerfEvent.scope(buckEventBus, PerfEventId.of("ParserInit"))) {
        ImmutableMap.Builder<String, String> pythonEnvironmentBuilder = ImmutableMap.builder();
        // Strip out PYTHONPATH. buck.py manually sets this to include only nailgun. We don't want
        // to inject nailgun into the parser's PYTHONPATH, so strip that value out.
        // If we wanted to pass on some environmental PYTHONPATH, we would have to do some actual
        // merging of this and the BuckConfig's python module search path.
        pythonEnvironmentBuilder.putAll(Maps.filterKeys(environment,  k -> !PYTHONPATH_ENV_VAR_NAME.equals(k)));
        if (options.getPythonModuleSearchPath().isPresent()) {
            pythonEnvironmentBuilder.put(PYTHONPATH_ENV_VAR_NAME, options.getPythonModuleSearchPath().get());
        }
        ImmutableMap<String, String> pythonEnvironment = pythonEnvironmentBuilder.build();
        ProcessExecutorParams params = ProcessExecutorParams.builder().setCommand(buildArgs()).setEnvironment(pythonEnvironment).build();
        LOG.debug("Starting buck.py command: %s environment: %s", params.getCommand(), params.getEnvironment());
        buckPyProcess = processExecutor.launchProcess(params);
        LOG.debug("Started process %s successfully", buckPyProcess);
        buckPyProcessInput = new CountingInputStream(buckPyProcess.getInputStream());
        buckPyProcessJsonGenerator = ObjectMappers.createGenerator(buckPyProcess.getOutputStream());
        // We have to wait to create the JsonParser until after we write our
        // first request, because Jackson "helpfully" synchronously reads
        // from the InputStream trying to detect whether the encoding is
        // UTF-8 or UTF-16 as soon as you create a JsonParser:
        //
        // https://git.io/vSgnA
        //
        // Since buck.py doesn't write any data until after it receives
        // a query, creating the JsonParser here would hang indefinitely.
        InputStream stderr = buckPyProcess.getErrorStream();
        InputStreamConsumer stderrConsumer = new InputStreamConsumer(stderr, (InputStreamConsumer.Handler)  line -> buckEventBus.post(ConsoleEvent.warning("Warning raised by BUCK file parser: %s", line)));
        stderrConsumerTerminationFuture = new FutureTask<>(stderrConsumer);
        stderrConsumerThread = Threads.namedThread(ProjectBuildFileParser.class.getSimpleName(), stderrConsumerTerminationFuture);
        stderrConsumerThread.start();
    }
}
Example 29
Project: platform_build-master  File: ProjectBuildFileParser.java View source code
/** Initialize the parser, starting buck.py. */
private void init() throws IOException {
    projectBuildFileParseEventStarted = new ProjectBuildFileParseEvents.Started();
    buckEventBus.post(projectBuildFileParseEventStarted);
    try (SimplePerfEvent.Scope scope = SimplePerfEvent.scope(buckEventBus, PerfEventId.of("ParserInit"))) {
        ImmutableMap.Builder<String, String> pythonEnvironmentBuilder = ImmutableMap.builder();
        // Strip out PYTHONPATH. buck.py manually sets this to include only nailgun. We don't want
        // to inject nailgun into the parser's PYTHONPATH, so strip that value out.
        // If we wanted to pass on some environmental PYTHONPATH, we would have to do some actual
        // merging of this and the BuckConfig's python module search path.
        pythonEnvironmentBuilder.putAll(Maps.filterKeys(environment,  k -> !PYTHONPATH_ENV_VAR_NAME.equals(k)));
        if (options.getPythonModuleSearchPath().isPresent()) {
            pythonEnvironmentBuilder.put(PYTHONPATH_ENV_VAR_NAME, options.getPythonModuleSearchPath().get());
        }
        ImmutableMap<String, String> pythonEnvironment = pythonEnvironmentBuilder.build();
        ProcessExecutorParams params = ProcessExecutorParams.builder().setCommand(buildArgs()).setEnvironment(pythonEnvironment).build();
        LOG.debug("Starting buck.py command: %s environment: %s", params.getCommand(), params.getEnvironment());
        buckPyProcess = processExecutor.launchProcess(params);
        LOG.debug("Started process %s successfully", buckPyProcess);
        buckPyProcessInput = new CountingInputStream(buckPyProcess.getInputStream());
        buckPyProcessJsonGenerator = ObjectMappers.createGenerator(buckPyProcess.getOutputStream());
        // We have to wait to create the JsonParser until after we write our
        // first request, because Jackson "helpfully" synchronously reads
        // from the InputStream trying to detect whether the encoding is
        // UTF-8 or UTF-16 as soon as you create a JsonParser:
        //
        // https://git.io/vSgnA
        //
        // Since buck.py doesn't write any data until after it receives
        // a query, creating the JsonParser here would hang indefinitely.
        InputStream stderr = buckPyProcess.getErrorStream();
        InputStreamConsumer stderrConsumer = new InputStreamConsumer(stderr, (InputStreamConsumer.Handler)  line -> buckEventBus.post(ConsoleEvent.warning("Warning raised by BUCK file parser: %s", line)));
        stderrConsumerTerminationFuture = new FutureTask<>(stderrConsumer);
        stderrConsumerThread = Threads.namedThread(ProjectBuildFileParser.class.getSimpleName(), stderrConsumerTerminationFuture);
        stderrConsumerThread.start();
    }
}
Example 30
Project: smooth-build-master  File: Blob.java View source code
private long size() {
    try (CountingInputStream inputStream = new CountingInputStream(openInputStream())) {
        Streams.copy(inputStream, ByteStreams.nullOutputStream());
        return inputStream.getCount();
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}
Example 31
Project: appengine-java-mapreduce-master  File: BlobstoreInputReader.java View source code
// -------------------------- INSTANCE METHODS --------------------------
private void createStreams() throws IOException {
    input = new CountingInputStream(new BufferedInputStream(new BlobstoreInputStream(new BlobKey(blobKey), startOffset + offset), DEFAULT_BUFFER_SIZE));
    recordIterator = new InputStreamIterator(input, endOffset - startOffset - offset, startOffset != 0L && offset == 0L, terminator);
}
Example 32
Project: alluxio-master  File: ObjectUnderFileInputStream.java View source code
/**
   * Open a new stream.
   *
   * @param options for opening a stream
   */
private void openStream(OpenOptions options) throws IOException {
    if (mStream != null) {
        mStream.close();
    }
    mInitPos = options.getOffset();
    mStream = new CountingInputStream(mUfs.openObject(mKey, options));
}
Example 33
Project: tachyon-master  File: ObjectUnderFileInputStream.java View source code
/**
   * Open a new stream.
   *
   * @param options for opening a stream
   */
private void openStream(OpenOptions options) throws IOException {
    if (mStream != null) {
        mStream.close();
    }
    mInitPos = options.getOffset();
    mStream = new CountingInputStream(mUfs.openObject(mKey, options));
}
Example 34
Project: elasticinbox-master  File: IOUtils.java View source code
/**
	 * Calculate InputStream size by reading through it
	 * 
	 * @param in
	 * @return Size of the data
	 * @throws IOException
	 */
public static long getInputStreamSize(InputStream in) throws IOException {
    CountingInputStream cin = new CountingInputStream(in);
    ByteStreams.copy(cin, new NullOutputStream());
    return cin.getCount();
}
Example 35
Project: c5-replicator-master  File: SequentialLogWithHeader.java View source code
private static HeaderWithSize readHeaderFromPersistence(BytePersistence persistence) throws IOException {
    try (CountingInputStream input = getCountingInputStream(persistence.getReader())) {
        final OLogHeader header = decodeAndCheckCrc(input, HEADER_SCHEMA);
        final long headerSize = input.getCount();
        return new HeaderWithSize(header, headerSize);
    }
}
Example 36
Project: tephra-master  File: HDFSTransactionStateStorage.java View source code
private TransactionSnapshot readSnapshotInputStream(InputStream in) throws IOException {
    CountingInputStream countingIn = new CountingInputStream(in);
    TransactionSnapshot snapshot = codecProvider.decode(countingIn);
    LOG.info("Read encoded transaction snapshot of {} bytes", countingIn.getCount());
    return snapshot;
}