Java Examples for org.elasticsearch.search.aggregations.AggregationBuilders

The following java examples will help you to understand the usage of org.elasticsearch.search.aggregations.AggregationBuilders. These source code samples are taken from different open source projects.

Example 1
Project: elasticsearch-java-client-examples-master  File: NestedObjectMappingExamples.java View source code
//  GET /nested/blogpost/_search?search_type=count
//  {
//    "aggs": {
//      "comments": { 
//        "nested": {
//          "path": "comments"
//        },
//        "aggs": {
//          "by_month": {
//            "date_histogram": { 
//              "field":    "comments.date",
//              "interval": "month",
//              "format":   "yyyy-MM"
//            },
//            "aggs": {
//              "avg_stars": {
//                "avg": { 
//                  "field": "comments.stars"
//                }
//              }
//            }
//          }
//        }
//      }
//    }
//  }
@Test
public void testQueryWithAggOnNested() {
    String aggOnComments = "comments", aggOnMonths = "by_month", aggOnAvgStars = "avg_stars";
    SearchResponse response = client.prepareSearch(indexName).setTypes(typeName).setSearchType(SearchType.COUNT).addAggregation(AggregationBuilders.nested(aggOnComments).path(aggOnComments).subAggregation(AggregationBuilders.dateHistogram(aggOnMonths).field("comments.date").interval(Interval.MONTH).format("yyyy-MM").subAggregation(AggregationBuilders.avg(aggOnAvgStars).field("comments.stars")))).execute().actionGet();
    // read response and agg
    System.out.println(response);
}
Example 2
Project: elassandra-master  File: SearchQueryIT.java View source code
@Test
public void testMultiMatchQuery() throws Exception {
    createIndex("test");
    indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value4", "field3", "value3"), client().prepareIndex("test", "type1", "2").setSource("field1", "value2", "field2", "value5", "field3", "value2"), client().prepareIndex("test", "type1", "3").setSource("field1", "value3", "field2", "value6", "field3", "value1"));
    MultiMatchQueryBuilder builder = multiMatchQuery("value1 value2 value4", "field1", "field2");
    SearchResponse searchResponse = client().prepareSearch().setQuery(builder).addAggregation(AggregationBuilders.terms("field1").field("field1")).get();
    assertHitCount(searchResponse, 2l);
    // this uses dismax so scores are equal and the order can be arbitrary
    assertSearchHits(searchResponse, "1", "2");
    builder.useDisMax(false);
    searchResponse = client().prepareSearch().setQuery(builder).get();
    assertHitCount(searchResponse, 2l);
    assertSearchHits(searchResponse, "1", "2");
    client().admin().indices().prepareRefresh("test").get();
    builder = multiMatchQuery("value1", "field1", "field2").operator(// Operator only applies on terms inside a field! Fields are always OR-ed together.
    MatchQueryBuilder.Operator.AND);
    searchResponse = client().prepareSearch().setQuery(builder).get();
    assertHitCount(searchResponse, 1l);
    assertFirstHit(searchResponse, hasId("1"));
    refresh();
    builder = multiMatchQuery("value1", "field1", "field3^1.5").operator(// Operator only applies on terms inside a field! Fields are always OR-ed together.
    MatchQueryBuilder.Operator.AND);
    searchResponse = client().prepareSearch().setQuery(builder).get();
    assertHitCount(searchResponse, 2l);
    assertSearchHits(searchResponse, "3", "1");
    client().admin().indices().prepareRefresh("test").get();
    builder = multiMatchQuery("value1").field("field1").field("field3", 1.5f).operator(// Operator only applies on terms inside a field! Fields are always OR-ed together.
    MatchQueryBuilder.Operator.AND);
    searchResponse = client().prepareSearch().setQuery(builder).get();
    assertHitCount(searchResponse, 2l);
    assertSearchHits(searchResponse, "3", "1");
    // Test lenient
    client().prepareIndex("test", "type1", "3").setSource("field1", "value7", "field2", "value8", "field4", 5).get();
    refresh();
    builder = multiMatchQuery("value1", "field1", "field2", "field4");
    assertFailures(client().prepareSearch().setQuery(builder), RestStatus.BAD_REQUEST, containsString("NumberFormatException[For input string: \"value1\"]"));
    builder.lenient(true);
    searchResponse = client().prepareSearch().setQuery(builder).get();
    assertHitCount(searchResponse, 1l);
    assertFirstHit(searchResponse, hasId("1"));
}
Example 3
Project: elasticsearch-master  File: ChildQuerySearchIT.java View source code
public void testScopedFacet() throws Exception {
    assertAcked(prepareCreate("test").setSettings("index.mapping.single_type", false).addMapping("parent").addMapping("child", "_parent", "type=parent", "c_field", "type=keyword"));
    ensureGreen();
    // index simple data
    client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get();
    client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get();
    client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get();
    client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get();
    client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get();
    client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get();
    refresh();
    SearchResponse searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")), ScoreMode.None)).addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.filter("filter", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow"))).subAggregation(AggregationBuilders.terms("facet1").field("c_field")))).get();
    assertNoFailures(searchResponse);
    assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
    assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
    assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1")));
    Global global = searchResponse.getAggregations().get("global");
    Filter filter = global.getAggregations().get("filter");
    Terms termsFacet = filter.getAggregations().get("facet1");
    assertThat(termsFacet.getBuckets().size(), equalTo(2));
    assertThat(termsFacet.getBuckets().get(0).getKeyAsString(), equalTo("red"));
    assertThat(termsFacet.getBuckets().get(0).getDocCount(), equalTo(2L));
    assertThat(termsFacet.getBuckets().get(1).getKeyAsString(), equalTo("yellow"));
    assertThat(termsFacet.getBuckets().get(1).getDocCount(), equalTo(1L));
}
Example 4
Project: liferay-portal-master  File: DefaultStatsTranslator.java View source code
@Override
public void translate(SearchRequestBuilder searchRequestBuilder, Stats stats) {
    if (!stats.isEnabled()) {
        return;
    }
    String field = stats.getField();
    if (stats.isCount()) {
        ValueCountBuilder valueCountBuilder = AggregationBuilders.count(field + "_count");
        valueCountBuilder.field(field);
        searchRequestBuilder.addAggregation(valueCountBuilder);
    }
    if (stats.isMax()) {
        MaxBuilder maxBuilder = AggregationBuilders.max(field + "_max");
        maxBuilder.field(field);
        searchRequestBuilder.addAggregation(maxBuilder);
    }
    if (stats.isMean()) {
        StatsBuilder statsBuilder = AggregationBuilders.stats(field + "_stats");
        statsBuilder.field(field);
        searchRequestBuilder.addAggregation(statsBuilder);
    }
    if (stats.isMin()) {
        MinBuilder minBuilder = AggregationBuilders.min(field + "_min");
        minBuilder.field(field);
        searchRequestBuilder.addAggregation(minBuilder);
    }
    if (stats.isMissing()) {
        MissingBuilder missingBuilder = AggregationBuilders.missing(field + "_missing");
        missingBuilder.field(field);
        searchRequestBuilder.addAggregation(missingBuilder);
    }
    if (stats.isStandardDeviation() || stats.isSumOfSquares()) {
        ExtendedStatsBuilder extendedStatsBuilder = AggregationBuilders.extendedStats(field + "_extendedStats");
        extendedStatsBuilder.field(field);
        searchRequestBuilder.addAggregation(extendedStatsBuilder);
    }
    if (stats.isSum()) {
        SumBuilder sumBuilder = AggregationBuilders.sum(field + "_sum");
        sumBuilder.field(field);
        searchRequestBuilder.addAggregation(sumBuilder);
    }
}
Example 5
Project: nuxeo-master  File: ESAuditBackend.java View source code
/**
     * Ensures the audit sequence returns an UID greater or equal than the maximum log entry id.
     */
protected void ensureUIDSequencer(Client esClient) {
    boolean auditIndexExists = esClient.admin().indices().prepareExists(getESIndexName()).execute().actionGet().isExists();
    if (!auditIndexExists) {
        return;
    }
    // Get max log entry id
    SearchRequestBuilder builder = getSearchRequestBuilder(esClient);
    builder.setQuery(QueryBuilders.matchAllQuery()).addAggregation(AggregationBuilders.max("maxAgg").field("id"));
    SearchResponse searchResponse = builder.execute().actionGet();
    Max agg = searchResponse.getAggregations().get("maxAgg");
    int maxLogEntryId = (int) agg.getValue();
    // Get next sequence id
    UIDGeneratorService uidGeneratorService = Framework.getService(UIDGeneratorService.class);
    UIDSequencer seq = uidGeneratorService.getSequencer();
    seq.init();
    int nextSequenceId = seq.getNext(SEQ_NAME);
    // Increment sequence to max log entry id if needed
    if (nextSequenceId < maxLogEntryId) {
        log.info(String.format("Next UID returned by %s sequence is %d, initializing sequence to %d", SEQ_NAME, nextSequenceId, maxLogEntryId));
        seq.initSequence(SEQ_NAME, maxLogEntryId);
    }
}
Example 6
Project: spring-data-elasticsearch-master  File: TermFacetRequest.java View source code
@Override
public AbstractAggregationBuilder getFacet() {
    Assert.notEmpty(fields, "Please select at last one field !!!");
    TermsBuilder termsBuilder = AggregationBuilders.terms(getName()).field(fields[0]).size(this.size);
    switch(order) {
        case descTerm:
            termsBuilder.order(Terms.Order.term(false));
            break;
        case ascTerm:
            termsBuilder.order(Terms.Order.term(true));
            break;
        case descCount:
            termsBuilder.order(Terms.Order.count(false));
            break;
        default:
            termsBuilder.order(Terms.Order.count(true));
    }
    if (ArrayUtils.isNotEmpty(excludeTerms)) {
        termsBuilder.exclude(excludeTerms);
    }
    if (allTerms) {
        termsBuilder.size(Integer.MAX_VALUE);
    }
    if (StringUtils.isNotBlank(regex)) {
        termsBuilder.include(regex);
    }
    return termsBuilder;
}
Example 7
Project: alien4cloud-master  File: AbstractToscaIndexSearchService.java View source code
public FacetedSearchResult search(Class<? extends T> clazz, String query, Integer size, Map<String, String[]> filters) {
    TopHitsBuilder topHitAggregation = AggregationBuilders.topHits("highest_version").setSize(1).addSort(new FieldSortBuilder("nestedVersion.majorVersion").order(SortOrder.DESC)).addSort(new FieldSortBuilder("nestedVersion.minorVersion").order(SortOrder.DESC)).addSort(new FieldSortBuilder("nestedVersion.incrementalVersion").order(SortOrder.DESC)).addSort(new FieldSortBuilder("nestedVersion.qualifier").order(SortOrder.DESC).missing("_first"));
    AggregationBuilder aggregation = AggregationBuilders.terms("query_aggregation").field(getAggregationField()).size(size).subAggregation(topHitAggregation);
    FacetedSearchResult<? extends T> searchResult = alienDAO.buildSearchQuery(clazz, query).setFilters(FilterUtil.singleKeyFilter(filters, "workspace", AlienConstants.GLOBAL_WORKSPACE_ID)).prepareSearch().setFetchContext(FetchContext.SUMMARY, topHitAggregation).facetedSearch(new IAggregationQueryManager() {

        @Override
        public AggregationBuilder getQueryAggregation() {
            return aggregation;
        }

        @Override
        @SneakyThrows({ IOException.class })
        public void setData(ObjectMapper objectMapper, Function getClassFromType, FacetedSearchResult result, Aggregation aggregation) {
            List<Object> resultData = Lists.newArrayList();
            List<String> resultTypes = Lists.newArrayList();
            if (aggregation == null) {
                result.setData(getArray(0));
                result.setTypes(new String[0]);
            }
            for (Terms.Bucket bucket : ((Terms) aggregation).getBuckets()) {
                TopHits topHits = bucket.getAggregations().get("highest_version");
                for (SearchHit hit : topHits.getHits()) {
                    resultTypes.add(hit.getType());
                    resultData.add(objectMapper.readValue(hit.getSourceAsString(), ((Function<String, Class>) getClassFromType).apply(hit.getType())));
                }
            }
            result.setData(resultData.toArray(getArray(resultData.size())));
            result.setTypes(resultTypes.toArray(new String[resultTypes.size()]));
            result.setFrom(0);
            result.setTo(resultData.size());
            if (size == Integer.MAX_VALUE || resultData.size() < size) {
                result.setTotalResults(resultData.size());
            } else {
                // just to show that there is more results to fetch but iteration is not possible through aggregations.
                result.setTotalResults(resultData.size() + ((Terms) aggregation).getSumOfOtherDocCounts());
            }
        }
    });
    return searchResult;
}
Example 8
Project: sky-walking-master  File: NodeRefResSumSearchWithTimeSlice.java View source code
@Override
public void onWork(Object request, Object response) throws Exception {
    if (request instanceof RequestEntity) {
        RequestEntity search = (RequestEntity) request;
        SearchRequestBuilder searchRequestBuilder = EsClient.INSTANCE.getClient().prepareSearch(NodeRefResSumIndex.INDEX);
        searchRequestBuilder.setTypes(search.getSliceType());
        searchRequestBuilder.setSearchType(SearchType.DFS_QUERY_THEN_FETCH);
        searchRequestBuilder.setQuery(QueryBuilders.rangeQuery(NodeRefResSumIndex.TIME_SLICE).gte(search.getStartTime()).lte(search.getEndTime()));
        searchRequestBuilder.setSize(0);
        TermsAggregationBuilder aggregationBuilder = AggregationBuilders.terms(NodeRefResSumIndex.AGG_COLUMN).field(NodeRefResSumIndex.AGG_COLUMN);
        aggregationBuilder.subAggregation(AggregationBuilders.sum(NodeRefResSumIndex.ONE_SECOND_LESS).field(NodeRefResSumIndex.ONE_SECOND_LESS));
        aggregationBuilder.subAggregation(AggregationBuilders.sum(NodeRefResSumIndex.THREE_SECOND_LESS).field(NodeRefResSumIndex.THREE_SECOND_LESS));
        aggregationBuilder.subAggregation(AggregationBuilders.sum(NodeRefResSumIndex.FIVE_SECOND_LESS).field(NodeRefResSumIndex.FIVE_SECOND_LESS));
        aggregationBuilder.subAggregation(AggregationBuilders.sum(NodeRefResSumIndex.FIVE_SECOND_GREATER).field(NodeRefResSumIndex.FIVE_SECOND_GREATER));
        aggregationBuilder.subAggregation(AggregationBuilders.sum(NodeRefResSumIndex.ERROR).field(NodeRefResSumIndex.ERROR));
        aggregationBuilder.subAggregation(AggregationBuilders.sum(NodeRefResSumIndex.SUMMARY).field(NodeRefResSumIndex.SUMMARY));
        searchRequestBuilder.addAggregation(aggregationBuilder);
        SearchResponse searchResponse = searchRequestBuilder.execute().actionGet();
        JsonArray nodeRefResSumArray = new JsonArray();
        Terms aggTerms = searchResponse.getAggregations().get(NodeRefResSumIndex.AGG_COLUMN);
        for (Terms.Bucket bucket : aggTerms.getBuckets()) {
            String aggId = String.valueOf(bucket.getKey());
            Sum oneSecondLess = bucket.getAggregations().get(NodeRefResSumIndex.ONE_SECOND_LESS);
            Sum threeSecondLess = bucket.getAggregations().get(NodeRefResSumIndex.THREE_SECOND_LESS);
            Sum fiveSecondLess = bucket.getAggregations().get(NodeRefResSumIndex.FIVE_SECOND_LESS);
            Sum fiveSecondGreater = bucket.getAggregations().get(NodeRefResSumIndex.FIVE_SECOND_GREATER);
            Sum error = bucket.getAggregations().get(NodeRefResSumIndex.ERROR);
            Sum summary = bucket.getAggregations().get(NodeRefResSumIndex.SUMMARY);
            logger.debug("aggId: %s, oneSecondLess: %s, threeSecondLess: %s, fiveSecondLess: %s, fiveSecondGreater: %s, error: %s, summary: %s", aggId, oneSecondLess.getValue(), threeSecondLess.getValue(), fiveSecondLess.getValue(), fiveSecondGreater.getValue(), error.getValue(), summary.getValue());
            JsonObject nodeRefResSumObj = new JsonObject();
            String[] ids = aggId.split(Const.IDS_SPLIT);
            String front = ids[0];
            String behind = ids[1];
            nodeRefResSumObj.addProperty("front", front);
            nodeRefResSumObj.addProperty("behind", behind);
            nodeRefResSumObj.addProperty(NodeRefResSumIndex.ONE_SECOND_LESS, oneSecondLess.getValue());
            nodeRefResSumObj.addProperty(NodeRefResSumIndex.THREE_SECOND_LESS, threeSecondLess.getValue());
            nodeRefResSumObj.addProperty(NodeRefResSumIndex.FIVE_SECOND_LESS, fiveSecondLess.getValue());
            nodeRefResSumObj.addProperty(NodeRefResSumIndex.FIVE_SECOND_GREATER, fiveSecondGreater.getValue());
            nodeRefResSumObj.addProperty(NodeRefResSumIndex.ERROR, error.getValue());
            nodeRefResSumObj.addProperty(NodeRefResSumIndex.SUMMARY, summary.getValue());
            nodeRefResSumArray.add(nodeRefResSumObj);
        }
        JsonObject resJsonObj = (JsonObject) response;
        resJsonObj.add("result", nodeRefResSumArray);
    } else {
        logger.error("unhandled message, message instance must NodeRefResSumSearchWithTimeSlice.RequestEntity, but is %s", request.getClass().toString());
    }
}
Example 9
Project: camunda-bpm-elasticsearch-master  File: ProcessInstanceHistogramResource.java View source code
@GET
public AggregationsResult getDateHistogramAggregrations(@QueryParam("interval") String interval, @QueryParam("timeframe") String timeframe) {
    Client client = ElasticSearchClientProvider.getClient(getProcessEngine());
    DateHistogram.Interval dateInterval = null;
    switch(interval) {
        case "s":
        case "m":
        case "h":
        case "d":
        case "w":
        case "M":
        case "q":
        case "y":
        default:
            dateInterval = DateHistogram.Interval.SECOND;
            break;
    }
    // create buckets based on startTime
    DateHistogramBuilder histogramStartTime = AggregationBuilders.dateHistogram("dateHistogram").minDocCount(0).interval(dateInterval).field("startTime");
    // only get the running process instances
    FilterAggregationBuilder runningPIsAgg = AggregationBuilders.filter("running").filter(FilterBuilders.missingFilter("endTime"));
    runningPIsAgg.subAggregation(histogramStartTime);
    // create buckets based on endTime
    DateHistogramBuilder histogramEndTime = AggregationBuilders.dateHistogram("dateHistogram").minDocCount(0).interval(dateInterval).field("endTime");
    // only get the ended process instances
    FilterAggregationBuilder endedPIsAgg = AggregationBuilders.filter("ended").filter(FilterBuilders.existsFilter("endTime"));
    endedPIsAgg.subAggregation(histogramEndTime);
    SearchRequestBuilder searchRequestBuilder = client.prepareSearch(ES_DEFAULT_INDEX_NAME_CAMUNDA_BPM).setQuery(QueryBuilders.matchAllQuery()).addAggregation(runningPIsAgg).addAggregation(endedPIsAgg).setSearchType(SearchType.COUNT);
    System.out.println(searchRequestBuilder);
    SearchResponse searchResponse = searchRequestBuilder.get();
    long totalHits = searchResponse.getHits().getTotalHits();
    Filter running = searchResponse.getAggregations().get("running");
    //    long runningTotal = running.getDocCount();
    DateHistogram runningDateHistogram = running.getAggregations().get("dateHistogram");
    List<DateHistogramBucketPair> runningDateHistogramBuckets = parseDateHistogramAggregation(runningDateHistogram);
    Filter ended = searchResponse.getAggregations().get("ended");
    //    long endedTotal = ended.getDocCount();
    DateHistogram endedDateHistogram = ended.getAggregations().get("dateHistogram");
    List<DateHistogramBucketPair> endedDateHistogramBuckets = parseDateHistogramAggregation(endedDateHistogram);
    HashMap<String, List<DateHistogramBucketPair>> dateHistogramBucketPairs = new HashMap<>();
    dateHistogramBucketPairs.put("running", runningDateHistogramBuckets);
    dateHistogramBucketPairs.put("ended", endedDateHistogramBuckets);
    AggregationsResult aggregationsResult = new AggregationsResult();
    aggregationsResult.setDateHistogramBuckets(dateHistogramBucketPairs);
    aggregationsResult.setTotalHits(totalHits);
    return aggregationsResult;
}
Example 10
Project: elasticsearch-http-master  File: PercolateActionHandlerTest.java View source code
@Test
public void should_percolate_with_agg() throws IOException, ExecutionException, InterruptedException {
    // some parts of this code taken from Elasticsearch's test suite
    createMapping();
    client().prepareIndex(THE_INDEX, PercolatorService.TYPE_NAME, Integer.toString(1)).setSource(jsonBuilder().startObject().field("query", matchQuery("field1", 1)).field("some_metadata", "b").endObject()).execute().actionGet();
    client().admin().indices().prepareRefresh(THE_INDEX).execute().actionGet();
    PercolateRequestBuilder percolateRequestBuilder = client().preparePercolate().setIndices(THE_INDEX).setDocumentType("my-type").setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", 1).endObject()));
    Aggregator.SubAggCollectionMode aggCollectionMode = randomFrom(Aggregator.SubAggCollectionMode.values());
    percolateRequestBuilder.addAggregation(AggregationBuilders.terms("the_terms").field("some_metadata").collectMode(aggCollectionMode));
    refresh();
    PercolateResponse response = httpClient.percolate(percolateRequestBuilder.request()).get();
    Assertions.assertThat(response.getTotal()).isEqualTo(1);
    Assertions.assertThat(response.getMatches()).hasSize(1);
    Terms terms = response.getAggregations().getTerms("the_terms");
    Assertions.assertThat(terms.getBuckets()).hasSize(1);
    Assertions.assertThat(terms.getBuckets().get(0).getKey()).isEqualTo("b");
}
Example 11
Project: gazetteer-master  File: StatisticAPI.java View source code
public JSONObject read(Request request, Response response) {
    Set<String> classes = RequestUtils.getSet(request, SearchAPI.POI_CLASS_HEADER);
    Set<String> refs = getSet(request, REFERENCES_HEADER);
    Locale locale = null;
    if (L10n.supported.contains(request.getHeader("lang"))) {
        locale = Locale.forLanguageTag(request.getHeader("lang"));
    }
    apiDefaultHierarchy = GazetteerWeb.osmdocProperties().getApiDefaultHierarchy();
    String hname = request.getHeader(SearchAPI.HIERARCHY_CODE_HEADER, apiDefaultHierarchy);
    SearchAPI.addPOIGroups(request, classes, hname);
    boolean doc4Found = RequestUtils.getBooleanHeader(request, "doc4found", true);
    OSMDocFacade osmdoc = OSMDocSinglton.get().getFacade();
    List<Feature> features = new ArrayList<>();
    for (String clazz : classes) {
        Feature feature = osmdoc.getFeature(clazz);
        if (feature != null) {
            features.add(feature);
        }
    }
    if (features.isEmpty()) {
        response.setResponseCode(404);
        return null;
    }
    Client client = ESNodeHolder.getClient();
    BoolQueryBuilder filters = QueryBuilders.boolQuery().must(QueryBuilders.termQuery("type", "poipnt")).must(QueryBuilders.termsQuery("poi_class", classes));
    if (refs != null && !refs.isEmpty()) {
        filters.must(QueryBuilders.termsQuery("refs", refs));
    }
    SearchRequestBuilder searchQ = client.prepareSearch("gazetteer").setTypes(IndexHolder.LOCATION).setQuery(filters);
    JSONObject tagOptions = osmdoc.collectCommonTagsWithTraitsJSON(osmdoc.getFeature(classes), locale);
    Set<String> allTagKeys = getTagKeys(tagOptions);
    allTagKeys.removeAll(GazetteerWeb.osmdocProperties().getIgnoreTagsGrouping());
    for (String tagKey : allTagKeys) {
        searchQ.addAggregation(AggregationBuilders.terms(tagKey).field("more_tags." + tagKey).minDocCount(10));
    }
    searchQ.addAggregation(AggregationBuilders.terms("name").field("name.exact").minDocCount(10).size(25).order(Order.count(false)));
    searchQ.setSearchType(SearchType.COUNT);
    SearchResponse esResponse = searchQ.execute().actionGet();
    Aggregations aggregations = esResponse.getAggregations();
    JSONObject result = new JSONObject();
    result.put("poi_class", new JSONArray(classes));
    result.put("total_count", esResponse.getHits().getTotalHits());
    result.put("tag_options", tagOptions);
    // Order tags by key
    JSONObject statistic = new JSONObject();
    result.put("tagValuesStatistic", statistic);
    for (Aggregation agg : aggregations.asList()) {
        if (agg instanceof Terms) {
            Terms termsAgg = (Terms) agg;
            JSONObject values = new JSONObject();
            for (Bucket bucket : termsAgg.getBuckets()) {
                values.put(bucket.getKey(), bucket.getDocCount());
            }
            if ("name".equals(agg.getName())) {
                result.put("names", values);
            } else if ("type".equals(agg.getName())) {
                result.put("types", values);
            } else if (values.length() > 0) {
                statistic.put(agg.getName(), values);
            }
        }
    }
    if (doc4Found) {
        Set<String> foundedKeys = statistic.keySet();
        Set<String> notFound = new HashSet<>(allTagKeys);
        notFound.removeAll(foundedKeys);
        JSONObject groupedTags = tagOptions.getJSONObject("groupedTags");
        JSONArray options = tagOptions.getJSONArray("commonTagOptions");
        for (String notFoundKey : notFound) {
            groupedTags.remove(notFoundKey);
        }
        TreeSet<Integer> remove = new TreeSet<>();
        for (int i = 0; i < options.length(); i++) {
            JSONObject filter = options.getJSONObject(i);
            if (notFound.contains(filter.getString("key"))) {
                remove.add(i);
            } else if (filter.getString("key").startsWith("trait_")) {
                JSONArray group = filter.optJSONArray("options");
                TreeSet<Integer> gropRemove = new TreeSet<>();
                for (int j = 0; j < group.length(); j++) {
                    if (notFound.contains(group.getJSONObject(j).getString("valueKey"))) {
                        gropRemove.add(j);
                    }
                }
                for (Iterator<Integer> gri = gropRemove.descendingIterator(); gri.hasNext(); ) {
                    group.remove(gri.next());
                }
                if (group.length() == 0) {
                    remove.add(i);
                }
            }
        }
        for (Iterator<Integer> ri = remove.descendingIterator(); ri.hasNext(); ) {
            options.remove(ri.next());
        }
    }
    return result;
}
Example 12
Project: oerworldmap-master  File: ResourceIndex.java View source code
public Result read(String id, String version, String extension) throws IOException {
    Resource resource = mBaseRepository.getResource(id, version);
    if (null == resource) {
        return notFound("Not found");
    }
    String type = resource.get(JsonLdConstants.TYPE).toString();
    if (type.equals("Concept")) {
        ResourceList relatedList = mBaseRepository.query("about.about.@id:\"".concat(id).concat("\" OR about.audience.@id:\"").concat(id).concat("\""), 0, 999, null, null);
        resource.put("related", relatedList.getItems());
    }
    if (type.equals("ConceptScheme")) {
        Resource conceptScheme = null;
        String field = null;
        if ("https://w3id.org/class/esc/scheme".equals(id)) {
            conceptScheme = Resource.fromJson(mEnv.classLoader().getResourceAsStream("public/json/esc.json"));
            field = "about.about.@id";
        } else if ("https://w3id.org/isced/1997/scheme".equals(id)) {
            field = "about.audience.@id";
            conceptScheme = Resource.fromJson(mEnv.classLoader().getResourceAsStream("public/json/isced-1997.json"));
        }
        if (!(null == conceptScheme)) {
            AggregationBuilder conceptAggregation = AggregationBuilders.filter("services").filter(QueryBuilders.termQuery("about.@type", "Service"));
            for (Resource topLevelConcept : conceptScheme.getAsList("hasTopConcept")) {
                conceptAggregation.subAggregation(AggregationProvider.getNestedConceptAggregation(topLevelConcept, field));
            }
            Resource nestedConceptAggregation = mBaseRepository.aggregate(conceptAggregation);
            resource.put("aggregation", nestedConceptAggregation);
            return ok(render("", "ResourceIndex/ConceptScheme/read.mustache", resource));
        }
    }
    List<Resource> comments = new ArrayList<>();
    for (String commentId : resource.getIdList("comment")) {
        comments.add(mBaseRepository.getResource(commentId));
    }
    String title;
    try {
        title = ((Resource) ((ArrayList<?>) resource.get("name")).get(0)).get("@value").toString();
    } catch (NullPointerException e) {
        title = id;
    }
    boolean mayEdit = (getUser() != null) && ((resource.getType().equals("Person") && getUser().getId().equals(id)) || (!resource.getType().equals("Person")) || mAccountService.getGroups(getHttpBasicAuthUser()).contains("admin"));
    boolean mayLog = (getUser() != null) && (mAccountService.getGroups(getHttpBasicAuthUser()).contains("admin") || mAccountService.getGroups(getHttpBasicAuthUser()).contains("editor"));
    boolean mayAdminister = (getUser() != null) && mAccountService.getGroups(getHttpBasicAuthUser()).contains("admin");
    boolean mayComment = (getUser() != null) && (!resource.getType().equals("Person"));
    boolean mayDelete = (getUser() != null) && (resource.getType().equals("Person") && getUser().getId().equals(id) || mAccountService.getGroups(getHttpBasicAuthUser()).contains("admin"));
    Map<String, Object> permissions = new HashMap<>();
    permissions.put("edit", mayEdit);
    permissions.put("log", mayLog);
    permissions.put("administer", mayAdminister);
    permissions.put("comment", mayComment);
    permissions.put("delete", mayDelete);
    Map<String, String> alternates = new HashMap<>();
    String baseUrl = mConf.getString("proxy.host");
    alternates.put("JSON", baseUrl.concat(routes.ResourceIndex.read(id, version, "json").url()));
    alternates.put("CSV", baseUrl.concat(routes.ResourceIndex.read(id, version, "csv").url()));
    if (resource.getType().equals("Event")) {
        alternates.put("iCal", baseUrl.concat(routes.ResourceIndex.read(id, version, "ics").url()));
    }
    List<Commit> history = mBaseRepository.log(id);
    resource = new Record(resource);
    resource.put(Record.CONTRIBUTOR, history.get(0).getHeader().getAuthor());
    try {
        resource.put(Record.AUTHOR, history.get(history.size() - 1).getHeader().getAuthor());
    } catch (NullPointerException e) {
        Logger.trace("Could not read author from commit " + history.get(history.size() - 1), e);
    }
    resource.put(Record.DATE_MODIFIED, history.get(0).getHeader().getTimestamp().format(DateTimeFormatter.ISO_OFFSET_DATE_TIME));
    try {
        resource.put(Record.DATE_CREATED, history.get(history.size() - 1).getHeader().getTimestamp().format(DateTimeFormatter.ISO_OFFSET_DATE_TIME));
    } catch (NullPointerException e) {
        Logger.trace("Could not read timestamp from commit " + history.get(history.size() - 1), e);
    }
    Map<String, Object> scope = new HashMap<>();
    scope.put("resource", resource);
    scope.put("comments", comments);
    scope.put("permissions", permissions);
    scope.put("alternates", alternates);
    String format = null;
    if (!StringUtils.isEmpty(extension)) {
        switch(extension) {
            case "html":
                format = "text/html";
                break;
            case "json":
                format = "application/json";
                break;
            case "csv":
                format = "text/csv";
                break;
            case "ics":
                format = "text/calendar";
                break;
        }
    } else if (request().accepts("text/html")) {
        format = "text/html";
    } else if (request().accepts("text/csv")) {
        format = "text/csv";
    } else if (request().accepts("text/calendar")) {
        format = "text/calendar";
    } else {
        format = "application/json";
    }
    if (format == null) {
        return notFound("Not found");
    } else if (format.equals("text/html")) {
        return ok(render(title, "ResourceIndex/read.mustache", scope));
    } else if (format.equals("application/json")) {
        return ok(resource.toString()).as("application/json");
    } else if (format.equals("text/csv")) {
        return ok(new CsvWithNestedIdsExporter().export(resource)).as("text/csv");
    } else if (format.equals("text/calendar")) {
        String ical = new CalendarExporter(Locale.ENGLISH).export(resource);
        if (ical != null) {
            return ok(ical).as("text/calendar");
        }
    }
    return notFound("Not found");
}
Example 13
Project: sonarqube-master  File: IssueIndex.java View source code
private Optional<AggregationBuilder> getCreatedAtFacet(IssueQuery query, Map<String, QueryBuilder> filters, QueryBuilder esQuery) {
    long startTime;
    Date createdAfter = query.createdAfter();
    if (createdAfter == null) {
        Optional<Long> minDate = getMinCreatedAt(filters, esQuery);
        if (!minDate.isPresent()) {
            return Optional.empty();
        }
        startTime = minDate.get();
    } else {
        startTime = createdAfter.getTime();
    }
    Date createdBefore = query.createdBefore();
    long endTime = createdBefore == null ? system.now() : createdBefore.getTime();
    Duration timeSpan = new Duration(startTime, endTime);
    DateHistogramInterval bucketSize = DateHistogramInterval.YEAR;
    if (timeSpan.isShorterThan(TWENTY_DAYS)) {
        bucketSize = DateHistogramInterval.DAY;
    } else if (timeSpan.isShorterThan(TWENTY_WEEKS)) {
        bucketSize = DateHistogramInterval.WEEK;
    } else if (timeSpan.isShorterThan(TWENTY_MONTHS)) {
        bucketSize = DateHistogramInterval.MONTH;
    }
    // from GMT to server TZ
    int offsetInSeconds = -system.getDefaultTimeZone().getRawOffset() / 1_000;
    AggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(PARAM_CREATED_AT).field(IssueIndexDefinition.FIELD_ISSUE_FUNC_CREATED_AT).interval(bucketSize).minDocCount(0L).format(DateUtils.DATETIME_FORMAT).timeZone(TimeZone.getTimeZone("GMT").getID()).offset(offsetInSeconds + "s").extendedBounds(// ES dateHistogram bounds are inclusive while createdBefore parameter is exclusive
    startTime, endTime - 1_000L);
    dateHistogram = addEffortAggregationIfNeeded(query, dateHistogram);
    return Optional.of(dateHistogram);
}
Example 14
Project: blackmarket-master  File: ExileToolsSearchClientTest.java View source code
@Test
@Ignore
public void testDistinctCurrencyIconValues() throws Exception {
    SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
    searchSourceBuilder.aggregation(AggregationBuilders.terms("rarities").field("shop.verified").size(0));
    SearchResult result = client.execute(searchSourceBuilder.toString()).getSearchResult();
    logger.info(result.getJsonString());
    System.out.println("-------");
    result.getAggregations().getTermsAggregation("rarities").getBuckets().stream().map( e -> e.getKey()).sorted().filter( k -> k.contains("Currency")).forEach( k -> System.out.println(k));
}
Example 15
Project: fess-master  File: FessEsClient.java View source code
public boolean build() {
    if (StringUtil.isBlank(query)) {
        return false;
    }
    final QueryHelper queryHelper = ComponentUtil.getQueryHelper();
    final FessConfig fessConfig = ComponentUtil.getFessConfig();
    if (offset > fessConfig.getQueryMaxSearchResultOffsetAsInteger()) {
        throw new ResultOffsetExceededException("The number of result size is exceeded.");
    }
    final QueryContext queryContext = queryHelper.build(searchRequestType, query,  context -> {
        if (SearchRequestType.ADMIN_SEARCH.equals(searchRequestType)) {
            context.skipRoleQuery();
        } else if (similarDocHash != null) {
            final DocumentHelper documentHelper = ComponentUtil.getDocumentHelper();
            context.addQuery( boolQuery -> {
                boolQuery.filter(QueryBuilders.termQuery(fessConfig.getIndexFieldContentMinhashBits(), documentHelper.decodeSimilarDocHash(similarDocHash)));
            });
        }
        if (geoInfo != null && geoInfo.toQueryBuilder() != null) {
            context.addQuery( boolQuery -> {
                boolQuery.filter(geoInfo.toQueryBuilder());
            });
        }
    });
    searchRequestBuilder.setFrom(offset).setSize(size);
    if (responseFields != null) {
        searchRequestBuilder.setFetchSource(responseFields, null);
    }
    // sort
    queryContext.sortBuilders().forEach( sortBuilder -> searchRequestBuilder.addSort(sortBuilder));
    // highlighting
    final HighlightBuilder highlightBuilder = new HighlightBuilder();
    queryHelper.highlightedFields( stream -> stream.forEach( hf -> highlightBuilder.field(new HighlightBuilder.Field(hf).highlighterType(fessConfig.getQueryHighlightType()).fragmentSize(fessConfig.getQueryHighlightFragmentSizeAsInteger()).numOfFragments(fessConfig.getQueryHighlightNumberOfFragmentsAsInteger()))));
    searchRequestBuilder.highlighter(highlightBuilder);
    // facets
    if (facetInfo != null) {
        stream(facetInfo.field).of( stream -> stream.forEach( f -> {
            if (queryHelper.isFacetField(f)) {
                final String encodedField = BaseEncoding.base64().encode(f.getBytes(StandardCharsets.UTF_8));
                final TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(Constants.FACET_FIELD_PREFIX + encodedField).field(f);
                if ("term".equals(facetInfo.sort)) {
                    termsBuilder.order(Order.term(true));
                } else if ("count".equals(facetInfo.sort)) {
                    termsBuilder.order(Order.count(true));
                }
                if (facetInfo.size != null) {
                    termsBuilder.size(facetInfo.size);
                }
                if (facetInfo.minDocCount != null) {
                    termsBuilder.minDocCount(facetInfo.minDocCount);
                }
                if (facetInfo.missing != null) {
                    termsBuilder.missing(facetInfo.missing);
                }
                searchRequestBuilder.addAggregation(termsBuilder);
            } else {
                throw new SearchQueryException("Invalid facet field: " + f);
            }
        }));
        stream(facetInfo.query).of( stream -> stream.forEach( fq -> {
            final QueryContext facetContext = new QueryContext(fq, false);
            queryHelper.buildBaseQuery(facetContext,  c -> {
            });
            final String encodedFacetQuery = BaseEncoding.base64().encode(fq.getBytes(StandardCharsets.UTF_8));
            final FilterAggregationBuilder filterBuilder = AggregationBuilders.filter(Constants.FACET_QUERY_PREFIX + encodedFacetQuery, facetContext.getQueryBuilder());
            searchRequestBuilder.addAggregation(filterBuilder);
        }));
    }
    if (!SearchRequestType.ADMIN_SEARCH.equals(searchRequestType) && fessConfig.isResultCollapsed() && similarDocHash == null) {
        searchRequestBuilder.setCollapse(getCollapseBuilder(fessConfig));
    }
    searchRequestBuilder.setQuery(queryContext.getQueryBuilder());
    return true;
}
Example 16
Project: foxtrot-master  File: TrendAction.java View source code
private AbstractAggregationBuilder buildAggregation(TrendRequest request) {
    DateHistogram.Interval interval = Utils.getHistogramInterval(request.getPeriod());
    String field = request.getField();
    DateHistogramBuilder histogramBuilder = Utils.buildDateHistogramAggregation(request.getTimestamp(), interval);
    if (!CollectionUtils.isNullOrEmpty(getParameter().getUniqueCountOn())) {
        histogramBuilder.subAggregation(Utils.buildCardinalityAggregation(getParameter().getUniqueCountOn()));
    }
    return AggregationBuilders.terms(Utils.sanitizeFieldForAggregation(field)).field(field).size(0).subAggregation(histogramBuilder);
}
Example 17
Project: molgenis-master  File: AggregateQueryGenerator.java View source code
private List<AggregationBuilder<?>> createAggregations(LinkedList<Attribute> attrs, Attribute parentAttr, Attribute distinctAttr) {
    Attribute attr = attrs.pop();
    List<AggregationBuilder<?>> aggs = new ArrayList<AggregationBuilder<?>>();
    // term aggregation
    String termsAggName = attr.getName() + AGGREGATION_TERMS_POSTFIX;
    String termsAggFieldName = getAggregateFieldName(attr);
    AggregationBuilder<?> termsAgg = AggregationBuilders.terms(termsAggName).size(MAX_VALUE).field(termsAggFieldName);
    aggs.add(termsAgg);
    // missing term aggregation
    if (attr.isNillable()) {
        String missingAggName = attr.getName() + AGGREGATION_MISSING_POSTFIX;
        String missingAggFieldName = attr.getName();
        AggregationBuilder<?> missingTermsAgg = AggregationBuilders.missing(missingAggName).field(missingAggFieldName);
        aggs.add(missingTermsAgg);
    }
    // add distinct term aggregations
    if (attrs.isEmpty() && distinctAttr != null) {
        // http://www.elasticsearch.org/guide/en/elasticsearch/reference/1.x/search-aggregations-metrics-cardinality-aggregation.html
        // The precision_threshold options allows to trade memory for accuracy, and defines a unique count below
        // which counts are expected to be close to accurate. Above this value, counts might become a bit more
        // fuzzy. The maximum supported value is 40000, thresholds above this number will have the same effect as a
        // threshold of 40000.
        String cardinalityAggName = distinctAttr.getName() + AGGREGATION_DISTINCT_POSTFIX;
        String cardinalityAggFieldName = getAggregateFieldName(distinctAttr);
        CardinalityBuilder distinctAgg = AggregationBuilders.cardinality(cardinalityAggName).field(cardinalityAggFieldName).precisionThreshold(40000L);
        // CardinalityBuilder does not implement AggregationBuilder interface, so we need some more code
        AbstractAggregationBuilder wrappedDistinctAgg;
        if (isNestedType(distinctAttr)) {
            String nestedAggName = distinctAttr.getName() + AGGREGATION_NESTED_POSTFIX;
            String nestedAggFieldName = distinctAttr.getName();
            NestedBuilder nestedBuilder = AggregationBuilders.nested(nestedAggName).path(nestedAggFieldName);
            nestedBuilder.subAggregation(distinctAgg);
            if (isNestedType(attr)) {
                String reverseAggName = attr.getName() + AggregateQueryGenerator.AGGREGATION_REVERSE_POSTFIX;
                ReverseNestedBuilder reverseNestedBuilder = AggregationBuilders.reverseNested(reverseAggName);
                reverseNestedBuilder.subAggregation(nestedBuilder);
                wrappedDistinctAgg = reverseNestedBuilder;
            } else {
                wrappedDistinctAgg = nestedBuilder;
            }
        } else {
            if (isNestedType(attr)) {
                String reverseAggName = attr.getName() + AggregateQueryGenerator.AGGREGATION_REVERSE_POSTFIX;
                ReverseNestedBuilder reverseNestedBuilder = AggregationBuilders.reverseNested(reverseAggName);
                reverseNestedBuilder.subAggregation(distinctAgg);
                wrappedDistinctAgg = reverseNestedBuilder;
            } else {
                wrappedDistinctAgg = distinctAgg;
            }
        }
        // add wrapped distinct term aggregation to aggregations
        for (AggregationBuilder<?> agg : aggs) {
            agg.subAggregation(wrappedDistinctAgg);
        }
    }
    // add sub aggregations
    if (!attrs.isEmpty()) {
        List<AggregationBuilder<?>> subAggs = createAggregations(attrs, attr, distinctAttr);
        for (AggregationBuilder<?> agg : aggs) {
            for (AggregationBuilder<?> subAgg : subAggs) {
                agg.subAggregation(subAgg);
            }
        }
    }
    // wrap in nested aggregation is this aggregation is nested
    if (isNestedType(attr)) {
        String nestedAggName = attr.getName() + AGGREGATION_NESTED_POSTFIX;
        String nestedAggFieldName = attr.getName();
        NestedBuilder nestedAgg = AggregationBuilders.nested(nestedAggName).path(nestedAggFieldName);
        for (AggregationBuilder<?> agg : aggs) {
            nestedAgg.subAggregation(agg);
        }
        aggs = Collections.<AggregationBuilder<?>>singletonList(nestedAgg);
    }
    // wrap in reverse nested aggregation if parent aggregation is nested
    if (parentAttr != null && isNestedType(parentAttr)) {
        String reverseAggName = parentAttr.getName() + AggregateQueryGenerator.AGGREGATION_REVERSE_POSTFIX;
        ReverseNestedBuilder reverseNestedAgg = AggregationBuilders.reverseNested(reverseAggName);
        for (AggregationBuilder<?> agg : aggs) {
            reverseNestedAgg.subAggregation(agg);
        }
        aggs = Collections.<AggregationBuilder<?>>singletonList(reverseNestedAgg);
    }
    return aggs;
}
Example 18
Project: BioSolr-master  File: ElasticDocumentSearch.java View source code
@Override
public ResultsList<Document> searchDocuments(String term, int start, int rows, List<String> additionalFields, List<String> filters) throws SearchEngineException {
    // Build the query
    MultiMatchQueryBuilder qb = QueryBuilders.multiMatchQuery(term, DEFAULT_FIELDS).minimumShouldMatch("2<25%");
    if (additionalFields != null && additionalFields.size() > 0) {
        List<String> parsedAdditional = parseAdditionalFields(additionalFields);
        parsedAdditional.forEach(qb::field);
    }
    TopHitsBuilder topHitsBuilder = AggregationBuilders.topHits(HITS_AGGREGATION).setFrom(0).setSize(1);
    /* Build the terms aggregation, since we need a result set grouped by study ID.
		 * The "top_score" sub-agg allows us to sort by the top score of the results;
		 * the topHits sub-agg actually pulls back the record data, returning just the first
		 * hit in the aggregation.
		 * Note that we have to get _all_ rows up to and including the last required, annoyingly. */
    AggregationBuilder termsAgg = AggregationBuilders.terms(HITS_AGGREGATION).field(GROUP_FIELD).order(Terms.Order.aggregation(SCORE_AGGREGATION, false)).size(start + rows).subAggregation(AggregationBuilders.max(SCORE_AGGREGATION).script(new Script("_score", ScriptService.ScriptType.INLINE, "expression", null))).subAggregation(topHitsBuilder);
    // Build the actual search request, including another aggregation to get
    // the number of unique study IDs returned.
    SearchRequestBuilder srb = getClient().prepareSearch(getIndexName()).setTypes(getDocumentType()).setQuery(qb).setSize(0).addAggregation(termsAgg).addAggregation(AggregationBuilders.cardinality(COUNT_AGGREGATION).field(GROUP_FIELD));
    LOGGER.debug("ES Query: {}", srb.toString());
    SearchResponse response = srb.execute().actionGet();
    // Handle the response
    long total = ((Cardinality) (response.getAggregations().get(COUNT_AGGREGATION))).getValue();
    List<Document> docs;
    if (total == 0) {
        docs = new ArrayList<>();
    } else {
        // Build a map - need to look up annotation data separately.
        // This is because it's not in _source, and the fields() method
        // is not visible for a TopHitsBuilder.
        Map<String, Document> documentMap = new LinkedHashMap<>(rows);
        ObjectMapper mapper = buildObjectMapper();
        int lastIdx = (int) (start + rows <= total ? start + rows : total);
        StringTerms terms = response.getAggregations().get(HITS_AGGREGATION);
        List<Terms.Bucket> termBuckets = terms.getBuckets().subList(start, lastIdx);
        for (Terms.Bucket bucket : termBuckets) {
            TopHits hits = bucket.getAggregations().get(HITS_AGGREGATION);
            SearchHit hit = hits.getHits().getAt(0);
            documentMap.put(hit.getId(), extractDocument(mapper, hit));
        }
        // Populate annotation data for the document
        lookupAnnotationFields(documentMap);
        docs = new ArrayList<>(documentMap.values());
    }
    return new ResultsList<>(docs, start, (start / rows), total);
}
Example 19
Project: blueflood-master  File: AbstractElasticIO.java View source code
/**
     * Performs terms aggregation by metric_name which returns doc_count by
     * metric_name index that matches the given regex.
     *
     *  Sample request body:
     *
     *  {
     *      "size": 0,
     *      "query": {
     *          "bool" : {
     *              "must" : [ {
     *                  "term" : {
     *                      "tenantId" : "ratanasv"
     *                  }
     *              }, {
     *                  "regexp" : {
     *                      "metric_name" : {
     *                         "value" : "<regex>"
     *                      }
     *                  }
     *              } ]
     *          }
     *      },
     *      "aggs": {
     *          "metric_name_tokens": {
     *              "terms": {
     *                  "field" : "metric_name",
     *                  "include": "<regex>",
     *                  "execution_hint": "map",
     *                  "size": 0
     *              }
     *          }
     *      }
     *  }
     *
     * The two regex expressions used in the query above would be same, one to filter
     * at query level and another to filter the aggregation buckets.
     *
     * Execution hint of "map" works by using field values directly instead of ordinals
     * in order to aggregate data per-bucket
     *
     * @param tenant
     * @param regexMetricName
     * @return
     */
private SearchResponse getMetricNamesFromES(final String tenant, final String regexMetricName) {
    AggregationBuilder aggregationBuilder = AggregationBuilders.terms(METRICS_TOKENS_AGGREGATE).field(ESFieldLabel.metric_name.name()).include(regexMetricName).executionHint("map").size(0);
    TermQueryBuilder tenantIdQuery = QueryBuilders.termQuery(ESFieldLabel.tenantId.toString(), tenant);
    RegexpQueryBuilder metricNameQuery = QueryBuilders.regexpQuery(ESFieldLabel.metric_name.name(), regexMetricName);
    return client.prepareSearch(new String[] { ELASTICSEARCH_INDEX_NAME_READ }).setRouting(tenant).setSize(0).setVersion(true).setQuery(QueryBuilders.boolQuery().must(tenantIdQuery).must(metricNameQuery)).addAggregation(aggregationBuilder).execute().actionGet();
}
Example 20
Project: elasticsearch-sql-master  File: AggMaker.java View source code
/**
     * 分组查的��函数
     *
     * @param field
     * @return
     * @throws SqlParseException
     */
public AggregationBuilder makeGroupAgg(Field field) throws SqlParseException {
    if (field instanceof MethodField && field.getName().equals("script")) {
        MethodField methodField = (MethodField) field;
        TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(methodField.getAlias()).script(new Script(methodField.getParams().get(1).value.toString()));
        groupMap.put(methodField.getAlias(), new KVValue("KEY", termsBuilder));
        return termsBuilder;
    }
    if (field instanceof MethodField) {
        MethodField methodField = (MethodField) field;
        if (methodField.getName().equals("filter")) {
            Map<String, Object> paramsAsMap = methodField.getParamsAsMap();
            Where where = (Where) paramsAsMap.get("where");
            return AggregationBuilders.filter(paramsAsMap.get("alias").toString(), QueryMaker.explan(where));
        }
        return makeRangeGroup(methodField);
    } else {
        TermsAggregationBuilder termsBuilder = AggregationBuilders.terms(field.getName()).field(field.getName());
        groupMap.put(field.getName(), new KVValue("KEY", termsBuilder));
        return termsBuilder;
    }
}
Example 21
Project: jframe-master  File: TestTransportClient.java View source code
@Test
public void testAggregation() {
    SearchResponse sr = client.prepareSearch().setQuery(QueryBuilders.matchAllQuery()).addAggregation(AggregationBuilders.terms("agg1").field("field")).addAggregation(AggregationBuilders.dateHistogram("agg2").field("birth").interval(DateHistogramInterval.YEAR)).execute().actionGet();
    // Get your facet results
    Terms agg1 = sr.getAggregations().get("agg1");
    // DateHistogram agg2 = sr.getAggregations().get("agg2");
    sr = client.prepareSearch("index1").setTerminateAfter(1000).get();
    if (sr.isTerminatedEarly()) {
    // We finished early
    }
    // sr = client.prepareSearch()
    // .addAggregation(
    // AggregationBuilders.terms("by_country").field("country")
    // .subAggregation(AggregationBuilders.dateHistogram("by_year")
    // .field("dateOfBirth")
    // .interval((DateHistogramInterval.YEAR)
    // .subAggregation(AggregationBuilders.avg("avg_children").field("children"))
    // )
    // ).execute().actionGet();
    MetricsAggregationBuilder aggregation = AggregationBuilders.max("agg").field("height");
}
Example 22
Project: storm-crawler-master  File: AggregationSpout.java View source code
@Override
protected void populateBuffer() {
    if (lastDate == null) {
        lastDate = new Date();
    }
    String formattedLastDate = String.format(DATEFORMAT, lastDate);
    LOG.info("{} Populating buffer with nextFetchDate <= {}", logIdprefix, formattedLastDate);
    QueryBuilder rangeQueryBuilder = QueryBuilders.rangeQuery("nextFetchDate").lte(formattedLastDate);
    SearchRequestBuilder srb = client.prepareSearch(indexName).setTypes(docType).setSearchType(SearchType.QUERY_THEN_FETCH).setQuery(rangeQueryBuilder).setFrom(0).setSize(0).setExplain(false);
    TermsAggregationBuilder aggregations = AggregationBuilders.terms("partition").field(partitionField).size(maxBucketNum);
    TopHitsAggregationBuilder tophits = AggregationBuilders.topHits("docs").size(maxURLsPerBucket).explain(false);
    // sort within a bucket
    if (StringUtils.isNotBlank(bucketSortField)) {
        FieldSortBuilder sorter = SortBuilders.fieldSort(bucketSortField).order(SortOrder.ASC);
        tophits.sort(sorter);
    }
    aggregations.subAggregation(tophits);
    // sort between buckets
    if (StringUtils.isNotBlank(totalSortField)) {
        MinAggregationBuilder minBuilder = AggregationBuilders.min("top_hit").field(totalSortField);
        aggregations.subAggregation(minBuilder);
        aggregations.order(Terms.Order.aggregation("top_hit", true));
    }
    if (sample) {
        DiversifiedAggregationBuilder sab = new DiversifiedAggregationBuilder("sample");
        sab.field(partitionField).maxDocsPerValue(maxURLsPerBucket);
        sab.shardSize(maxURLsPerBucket * maxBucketNum);
        sab.subAggregation(aggregations);
        srb.addAggregation(sab);
    } else {
        srb.addAggregation(aggregations);
    }
    // _shards:2,3
    if (shardID != -1) {
        srb.setPreference("_shards:" + shardID);
    }
    // dump query to log
    LOG.debug("{} ES query {}", logIdprefix, srb.toString());
    timeStartESQuery = System.currentTimeMillis();
    isInESQuery.set(true);
    srb.execute(this);
}
Example 23
Project: stratio-connector-elasticsearch-master  File: ConnectorQueryBuilder.java View source code
/**
     * Method that creates the appropriate nested term aggregation properties to elasticsearch based on those specified by the "group by" clause
     *
     * @param groupBy           GROUP BY clause that defines the nested term aggregations to be retrieved
     * @throws ExecutionException
     */
private void createNestedTermAggregation(GroupBy groupBy) throws ExecutionException {
    // If any "group by" fields are defined they are used in order to create the appropriate aggregations
    if (null != groupBy && null != groupBy.getIds()) {
        AggregationBuilder aggregationBuilder = null;
        // First field is used as the parent aggregation level and next ones are added as nested sub-aggregations of the previous one
        for (Selector term : groupBy.getIds()) {
            String fieldName = SelectorUtils.getSelectorFieldName(term);
            if (aggregationBuilder == null) {
                aggregationBuilder = AggregationBuilders.terms(fieldName).field(fieldName);
            } else {
                aggregationBuilder.subAggregation(AggregationBuilders.terms(fieldName).field(fieldName));
            }
        }
        // No results are needed when requesting an aggregation therefore the size is set to 0 and the aggregation properties are added to the query
        requestBuilder.addAggregation(aggregationBuilder);
        requestBuilder.setSize(0);
    }
}
Example 24
Project: web-crawler-master  File: AggregationSpout.java View source code
@Override
protected void populateBuffer() {
    if (lastDate == null) {
        lastDate = new Date();
    }
    String formattedLastDate = String.format(DATEFORMAT, lastDate);
    LOG.info("{} Populating buffer with nextFetchDate <= {}", logIdprefix, formattedLastDate);
    QueryBuilder rangeQueryBuilder = QueryBuilders.rangeQuery("nextFetchDate").lte(formattedLastDate);
    SearchRequestBuilder srb = client.prepareSearch(indexName).setTypes(docType).setSearchType(SearchType.QUERY_THEN_FETCH).setQuery(rangeQueryBuilder).setFrom(0).setSize(0).setExplain(false);
    TermsAggregationBuilder aggregations = AggregationBuilders.terms("partition").field(partitionField).size(maxBucketNum);
    TopHitsAggregationBuilder tophits = AggregationBuilders.topHits("docs").size(maxURLsPerBucket).explain(false);
    // sort within a bucket
    if (StringUtils.isNotBlank(bucketSortField)) {
        FieldSortBuilder sorter = SortBuilders.fieldSort(bucketSortField).order(SortOrder.ASC);
        tophits.sort(sorter);
    }
    aggregations.subAggregation(tophits);
    // sort between buckets
    if (StringUtils.isNotBlank(totalSortField)) {
        MinAggregationBuilder minBuilder = AggregationBuilders.min("top_hit").field(totalSortField);
        aggregations.subAggregation(minBuilder);
        aggregations.order(Terms.Order.aggregation("top_hit", true));
    }
    if (sample) {
        DiversifiedAggregationBuilder sab = new DiversifiedAggregationBuilder("sample");
        sab.field(partitionField).maxDocsPerValue(maxURLsPerBucket);
        sab.shardSize(maxURLsPerBucket * maxBucketNum);
        sab.subAggregation(aggregations);
        srb.addAggregation(sab);
    } else {
        srb.addAggregation(aggregations);
    }
    // _shards:2,3
    if (shardID != -1) {
        srb.setPreference("_shards:" + shardID);
    }
    // dump query to log
    LOG.debug("{} ES query {}", logIdprefix, srb.toString());
    timeStartESQuery = System.currentTimeMillis();
    isInESQuery.set(true);
    srb.execute(this);
}
Example 25
Project: aperture-tiles-master  File: ElasticsearchPyramidIO.java View source code
private SearchResponse timeFilteredRequest(double startX, double endX, double startY, double endY, JSONObject filterJSON) {
    // the first filter added excludes everything outside of the tile boundary
    // on both the xField and the yField
    BoolFilterBuilder boundaryFilter = FilterBuilders.boolFilter();
    boundaryFilter.must(FilterBuilders.rangeFilter(this.xField).gte(//startx is min val
    startX).lte(endX), FilterBuilders.rangeFilter(this.yField).gte(//endy is min val
    endY).lte(startY));
    Map<String, Object> filterMap = null;
    // transform filter list json to a map
    try {
        filterMap = JsonUtilities.jsonObjToMap(filterJSON);
    } catch (Exception e) {
        filterMap = null;
    }
    if (filterMap != null) {
        Set<String> strings = filterMap.keySet();
        List<Map> filterList = new ArrayList<>();
        for (String str : strings) {
            filterList.add((Map) filterMap.get(str));
        }
        for (Map filter : filterList) {
            String type = (String) filter.get("type");
            String filterPath = (String) filter.get("path");
            switch(type) {
                case "terms":
                    Map termsMap = (Map) filter.get("terms");
                    List<String> termsList = new ArrayList<>();
                    for (Object key : termsMap.keySet()) {
                        termsList.add((String) termsMap.get(key));
                    }
                    boundaryFilter.must(FilterBuilders.termsFilter(filterPath, termsList).execution("or"));
                    break;
                case "range":
                    // Note range filter requires a numeric value to filter on,
                    // doesn't work if passing in formatted date strings like "2015-03-01"
                    // check for existence of from OR to or FROM and TO
                    RangeFilterBuilder rangeFilterBuilder = FilterBuilders.rangeFilter(filterPath);
                    if (filter.containsKey("from") && filter.get("from") != null) {
                        rangeFilterBuilder.from(filter.get("from"));
                    }
                    if (filter.containsKey("to") && filter.get("to") != null) {
                        rangeFilterBuilder.to(filter.get("to"));
                    }
                    boundaryFilter.must(rangeFilterBuilder);
                    break;
                case "UDF":
                    // build a user defined facet
                    BoolQueryBuilder boolQuery = new BoolQueryBuilder();
                    boolQuery.must(QueryBuilders.queryStringQuery(StringEscapeUtils.escapeJavaScript((String) filter.get("query"))).field("body.en"));
                    boundaryFilter.must(FilterBuilders.queryFilter(boolQuery));
                    break;
                default:
                    LOGGER.error("Unsupported filter type");
            }
        }
    }
    SearchRequestBuilder searchRequest = baseQuery(boundaryFilter).addAggregation(AggregationBuilders.histogram("xField").field(this.xField).interval(getHistogramIntervalFromBounds(startX, endX)).minDocCount(1).subAggregation(AggregationBuilders.histogram("yField").field(this.yField).interval(getHistogramIntervalFromBounds(endY, startY)).minDocCount(1)));
    return searchRequest.execute().actionGet();
}
Example 26
Project: Bam-master  File: NativeClientAggregationsBuilder.java View source code
/**
     * <p>Serializes a core function.</p>
     * <p>Example of SUM function serialization:</p>
     * <code>
     *     "column_id" : {
     *          "sum" : { "field" : "change" }
     *     }
     * </code>
     * @return
     */
protected ValuesSourceMetricsAggregationBuilder serializeCoreFunction(GroupFunction groupFunction) {
    if (null != groupFunction) {
        String sourceId = groupFunction.getSourceId();
        if (sourceId != null && !existColumnInMetadataDef(sourceId)) {
            throw new RuntimeException("Aggregation by column [" + sourceId + "] failed. No column with the given id.");
        }
        if (sourceId == null) {
            sourceId = metadata.getColumnId(0);
        }
        if (sourceId == null) {
            throw new IllegalArgumentException("Aggregation from unknown column id.");
        }
        String columnId = groupFunction.getColumnId();
        if (columnId == null)
            columnId = sourceId;
        AggregateFunctionType type = groupFunction.getFunction();
        ColumnType sourceColumnType = metadata.getColumnType(sourceId);
        // ColumnType resultingColumnType = sourceColumnType.equals(ColumnType.DATE) ? ColumnType.DATE : ColumnType.NUMBER;
        ValuesSourceMetricsAggregationBuilder result = null;
        switch(type) {
            case COUNT:
                result = AggregationBuilders.count(columnId).field(sourceId);
                break;
            case DISTINCT:
                result = AggregationBuilders.cardinality(columnId).field(sourceId);
                break;
            case AVERAGE:
                result = AggregationBuilders.avg(columnId).field(sourceId);
                break;
            case SUM:
                result = AggregationBuilders.sum(columnId).field(sourceId);
                break;
            case MIN:
                result = AggregationBuilders.min(columnId).field(sourceId);
                break;
            case MAX:
                result = AggregationBuilders.max(columnId).field(sourceId);
                break;
        }
        if (null == result) {
            throw new RuntimeException("Core function not supported as an Elastic Search aggregation [type=" + type.name() + "]");
        }
        return result;
    }
    return null;
}
Example 27
Project: dashbuilder-master  File: NativeClientAggregationsBuilder.java View source code
/**
     * <p>Serializes a core function.</p>
     * <p>Example of SUM function serialization:</p>
     * <code>
     *     "column_id" : {
     *          "sum" : { "field" : "change" }
     *     }
     * </code>
     * @return
     */
protected ValuesSourceMetricsAggregationBuilder serializeCoreFunction(GroupFunction groupFunction) {
    if (null != groupFunction) {
        String sourceId = groupFunction.getSourceId();
        if (sourceId != null && !existColumnInMetadataDef(sourceId)) {
            throw new RuntimeException("Aggregation by column [" + sourceId + "] failed. No column with the given id.");
        }
        if (sourceId == null) {
            sourceId = metadata.getColumnId(0);
        }
        if (sourceId == null) {
            throw new IllegalArgumentException("Aggregation from unknown column id.");
        }
        String columnId = groupFunction.getColumnId();
        if (columnId == null)
            columnId = sourceId;
        AggregateFunctionType type = groupFunction.getFunction();
        ColumnType sourceColumnType = metadata.getColumnType(sourceId);
        // ColumnType resultingColumnType = sourceColumnType.equals(ColumnType.DATE) ? ColumnType.DATE : ColumnType.NUMBER;
        ValuesSourceMetricsAggregationBuilder result = null;
        switch(type) {
            case COUNT:
                result = AggregationBuilders.count(columnId).field(sourceId);
                break;
            case DISTINCT:
                result = AggregationBuilders.cardinality(columnId).field(sourceId);
                break;
            case AVERAGE:
                result = AggregationBuilders.avg(columnId).field(sourceId);
                break;
            case SUM:
                result = AggregationBuilders.sum(columnId).field(sourceId);
                break;
            case MIN:
                result = AggregationBuilders.min(columnId).field(sourceId);
                break;
            case MAX:
                result = AggregationBuilders.max(columnId).field(sourceId);
                break;
        }
        if (null == result) {
            throw new RuntimeException("Core function not supported as an Elastic Search aggregation [type=" + type.name() + "]");
        }
        return result;
    }
    return null;
}
Example 28
Project: HERD-master  File: ElasticsearchFunctions.java View source code
private void addFacetFieldAggregations(Set<String> facetFieldsList, ElasticsearchResponseDto elasticsearchResponseDto, SearchRequestBuilder searchRequestBuilder) {
    if (!CollectionUtils.isEmpty(facetFieldsList) && (facetFieldsList.contains(TAG_FACET))) {
        searchRequestBuilder.addAggregation(AggregationBuilders.nested(TAG_FACET_AGGS, NESTED_BDEFTAGS_PATH).subAggregation(AggregationBuilders.terms(TAGTYPE_CODE_AGGREGATION).field(TAGTYPE_CODE_FIELD).subAggregation(AggregationBuilders.terms(TAGTYPE_NAME_AGGREGATION).field(TAGTYPE_NAME_FIELD).subAggregation(AggregationBuilders.terms(TAG_CODE_AGGREGATION).field(TAG_CODE_FIELD).subAggregation(AggregationBuilders.terms(TAG_NAME_AGGREGATION).field(TAG_NAME_FIELD))))));
        searchRequestBuilder.addAggregation(AggregationBuilders.terms(TAG_TYPE_FACET_AGGS).field(TAGTYPE_CODE_FIELD).subAggregation(AggregationBuilders.terms(NAMESPACE_CODE_AGGS).field(NAMESPACE_FIELD).subAggregation(AggregationBuilders.terms(BDEF_NAME_AGGS).field(BDEF_NAME_FIELD))));
        elasticsearchResponseDto.setTagTypeIndexSearchResponseDtos(searchResponseIntoFacetInformation(searchRequestBuilder));
    }
}
Example 29
Project: heroic-master  File: SuggestBackendKV.java View source code
@Override
public AsyncFuture<TagValuesSuggest> tagValuesSuggest(final TagValuesSuggest.Request request) {
    return connection.doto((final Connection c) -> {
        final BoolFilterBuilder bool = boolFilter();
        if (!(request.getFilter() instanceof TrueFilter)) {
            bool.must(filter(request.getFilter()));
        }
        for (final String e : request.getExclude()) {
            bool.mustNot(termFilter(TAG_SKEY_RAW, e));
        }
        final QueryBuilder query = bool.hasClauses() ? filteredQuery(matchAllQuery(), bool) : matchAllQuery();
        final SearchRequestBuilder builder = c.search(TAG_TYPE).setSearchType(SearchType.COUNT).setQuery(query).setTimeout(TIMEOUT);
        final OptionalLimit limit = request.getLimit();
        final OptionalLimit groupLimit = request.getGroupLimit();
        {
            final TermsBuilder terms = AggregationBuilders.terms("keys").field(TAG_SKEY_RAW);
            limit.asInteger().ifPresent( l -> terms.size(l + 1));
            builder.addAggregation(terms);
            // make value bucket one entry larger than necessary to figure out when limiting
            // is applied.
            final TermsBuilder cardinality = AggregationBuilders.terms("values").field(TAG_SVAL_RAW);
            groupLimit.asInteger().ifPresent( l -> cardinality.size(l + 1));
            terms.subAggregation(cardinality);
        }
        return bind(builder.execute()).directTransform((SearchResponse response) -> {
            final List<TagValuesSuggest.Suggestion> suggestions = new ArrayList<>();
            if (response.getAggregations() == null) {
                return TagValuesSuggest.of(Collections.emptyList(), Boolean.FALSE);
            }
            final Terms terms = response.getAggregations().get("keys");
            final List<Bucket> buckets = terms.getBuckets();
            for (final Terms.Bucket bucket : limit.limitList(buckets)) {
                final Terms valueTerms = bucket.getAggregations().get("values");
                final List<Bucket> valueBuckets = valueTerms.getBuckets();
                final SortedSet<String> result = new TreeSet<>();
                for (final Terms.Bucket valueBucket : valueBuckets) {
                    result.add(valueBucket.getKey());
                }
                final SortedSet<String> values = groupLimit.limitSortedSet(result);
                final boolean limited = groupLimit.isGreater(valueBuckets.size());
                suggestions.add(new TagValuesSuggest.Suggestion(bucket.getKey(), values, limited));
            }
            return TagValuesSuggest.of(ImmutableList.copyOf(suggestions), limit.isGreater(buckets.size()));
        });
    });
}
Example 30
Project: htwplus-master  File: ElasticsearchService.java View source code
/**
     * Build search query based on all provided fields
     *
     * @param caller           - Define normal search or autocomplete
     * @param query            - Terms to search for (e.g. 'informatik')
     * @param filter           - Filter for searchfacets (e.g. user, group, comment)
     * @param page             - Which results should be shown (e.g. 1: 1-10 ; 2: 11-20 etc.)
     * @param currentAccountId - AccountId from user who is logged in (for scoring)
     * @param mustFields       - All fields to search on
     * @param scoringFields    - All fields which affect the scoring
     * @return - JSON response from Elasticsearch
     * @throws ExecutionException
     * @throws InterruptedException
     */
@Override
public SearchResponse doSearch(String caller, String query, String filter, HashMap<String, String[]> facets, int page, String currentAccountId, List<String> mustFields, List<String> scoringFields) throws ExecutionException, InterruptedException {
    QueryBuilder searchQuery;
    if (query.isEmpty() || query == null) {
        // Build searchQuery to search for everything
        searchQuery = QueryBuilders.matchAllQuery();
    } else {
        // Build searchQuery by provided fields (mustFields) to search on
        searchQuery = QueryBuilders.multiMatchQuery(query, mustFields.toArray(new String[mustFields.size()]));
    }
    // Build scoringQuery by provided fields (shouldFields) to increase the scoring of a better matching hit
    QueryBuilder scoringQuery = QueryBuilders.multiMatchQuery(currentAccountId, scoringFields.toArray(new String[scoringFields.size()]));
    // Build boolQuery to enable filter possibilities
    BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
    // Add should filter to show authorized posts only
    boolQuery.should(QueryBuilders.termQuery("viewable", currentAccountId)).should(QueryBuilders.termQuery("public", true));
    // Add mode-filter to filter only for users/group or posts
    if (!filter.equals("all")) {
        boolQuery.must(QueryBuilders.typeQuery(filter));
    }
    // Add facet-filter to filter for mode related stuff (eg. user -> students or group -> open)
    if (facets != null) {
        if (facets.get("studycourse").length != 0) {
            for (String facet : facets.get("studycourse")) {
                boolQuery.must(QueryBuilders.termQuery("studycourse", facet));
            }
        }
        if (facets.get("degree").length != 0) {
            for (String facet : facets.get("degree")) {
                boolQuery.must(QueryBuilders.termQuery("degree", facet));
            }
        }
        if (facets.get("semester").length != 0) {
            for (String facet : facets.get("semester")) {
                boolQuery.must(QueryBuilders.termQuery("semester", facet));
            }
        }
        if (facets.get("role").length != 0) {
            for (String facet : facets.get("role")) {
                boolQuery.must(QueryBuilders.termQuery("role", facet));
            }
        }
        if (facets.get("grouptype").length != 0) {
            for (String facet : facets.get("grouptype")) {
                boolQuery.must(QueryBuilders.termQuery("grouptype", facet));
            }
        }
    }
    // Build completeQuery with search- and scoringQuery
    QueryBuilder completeQuery = QueryBuilders.boolQuery().must(searchQuery).should(scoringQuery).filter(boolQuery);
    // Build searchRequest which will be executed after fields to highlight are added.
    SearchRequestBuilder searchRequest = client.prepareSearch(ES_INDEX).setQuery(completeQuery);
    // Add highlighting on all fields to search on
    for (String field : mustFields) {
        searchRequest.addHighlightedField(field);
    }
    // Define html tags for highlighting
    searchRequest = searchRequest.setHighlighterPreTags("[startStrong]").setHighlighterPostTags("[endStrong]").setHighlighterNumOfFragments(0);
    // Enable pagination
    searchRequest = searchRequest.setFrom((page * ES_RESULT_SIZE) - ES_RESULT_SIZE);
    // Add term aggregation for facet count
    searchRequest = searchRequest.addAggregation(AggregationBuilders.terms("types").field("_type"));
    // Add user aggregations
    if (filter.equals("user")) {
        searchRequest = searchRequest.addAggregation(AggregationBuilders.terms("studycourse").field("studycourse"));
        searchRequest = searchRequest.addAggregation(AggregationBuilders.terms("degree").field("degree"));
        searchRequest = searchRequest.addAggregation(AggregationBuilders.terms("semester").field("semester"));
        searchRequest = searchRequest.addAggregation(AggregationBuilders.terms("role").field("role"));
    }
    // Add group aggregations
    if (filter.equals("group")) {
        searchRequest = searchRequest.addAggregation(AggregationBuilders.terms("grouptype").field("grouptype"));
    }
    // Apply PostFilter if request mode is not 'all'
    /**final BoolFilterBuilder boolFilterBuilder2 = boolFilter();

         if(boolFilterBuilder2.hasClauses()) {
         searchRequest.setPostFilter(boolFilterBuilder2);
         }*/
    //logger.info(searchRequest.toString());
    // Execute searchRequest
    SearchResponse response = searchRequest.execute().get();
    return response;
}
Example 31
Project: elasticsearch-taste-master  File: ElasticsearchDataModel.java View source code
protected synchronized void loadValueStats() {
    if (stats != null) {
        return;
    }
    // TODO join userQueryBuilder and itemQueryBuilder
    final SearchResponse response = client.prepareSearch(preferenceIndex).setTypes(preferenceType).setQuery(getLastAccessedFilterQuery()).setSize(0).addAggregation(AggregationBuilders.stats(valueField).field(valueField)).execute().actionGet();
    final Aggregations aggregations = response.getAggregations();
    stats = aggregations.get(valueField);
}
Example 32
Project: opencast-master  File: AbstractSearchIndex.java View source code
/**
   * Returns all the known terms for a field (aka facets).
   *
   * @param field
   *          the field name
   * @param types
   *          an optional array of document types, if none is set all types are searched
   * @return the list of terms
   */
public List<String> getTermsForField(String field, Option<String[]> types) {
    final String facetName = "terms";
    TermsBuilder aggBuilder = AggregationBuilders.terms(facetName).field(field);
    SearchRequestBuilder search = getSearchClient().prepareSearch(getIndexName()).addAggregation(aggBuilder);
    if (types.isSome())
        search = search.setTypes(types.get());
    SearchResponse response = search.execute().actionGet();
    List<String> terms = new ArrayList<>();
    Terms aggs = response.getAggregations().get(facetName);
    for (Bucket bucket : aggs.getBuckets()) {
        terms.add(bucket.getKey());
    }
    return terms;
}
Example 33
Project: graylog2-server-master  File: Searches.java View source code
public TermsResult terms(String field, int size, String query, String filter, TimeRange range, Sorting.Direction sorting) {
    final Terms.Order termsOrder = sorting == Sorting.Direction.DESC ? Terms.Order.count(false) : Terms.Order.count(true);
    final SearchSourceBuilder searchSourceBuilder = filter == null ? standardSearchRequest(query, range) : filteredSearchRequest(query, filter, range);
    final FilterAggregationBuilder filterBuilder = AggregationBuilders.filter(AGG_FILTER).subAggregation(AggregationBuilders.terms(AGG_TERMS).field(field).size(size > 0 ? size : 50).order(termsOrder)).subAggregation(AggregationBuilders.missing("missing").field(field)).filter(standardAggregationFilters(range, filter));
    searchSourceBuilder.aggregation(filterBuilder);
    final Set<String> affectedIndices = determineAffectedIndices(range, filter);
    if (affectedIndices.isEmpty()) {
        return TermsResult.empty(query, searchSourceBuilder.toString());
    }
    final Search.Builder searchBuilder = new Search.Builder(searchSourceBuilder.toString()).ignoreUnavailable(true).allowNoIndices(true).addType(IndexMapping.TYPE_MESSAGE).addIndex(affectedIndices);
    final io.searchbox.core.SearchResult searchResult = checkForFailedShards(JestUtils.execute(jestClient, searchBuilder.build(), () -> "Unable to perform terms query"));
    final long tookMs = tookMsFromSearchResult(searchResult);
    recordEsMetrics(tookMs, range);
    final FilterAggregation filterAggregation = searchResult.getAggregations().getFilterAggregation(AGG_FILTER);
    final TermsAggregation termsAggregation = filterAggregation.getTermsAggregation(AGG_TERMS);
    final MissingAggregation missing = filterAggregation.getMissingAggregation("missing");
    return new TermsResult(termsAggregation, missing.getMissing(), filterAggregation.getCount(), query, searchSourceBuilder.toString(), tookMs);
}
Example 34
Project: searchisko-master  File: SearchService.java View source code
/**
	 * Setup all required aggregations on SearchRequestBuilder.
	 *
	 * @param querySettings
	 * @param srb
	 */
protected void handleAggregationSettings(QuerySettings querySettings, final Map<String, FilterBuilder> searchFilters, SearchRequestBuilder srb) {
    Map<String, Object> configuredAggregations = configService.get(ConfigService.CFGNAME_SEARCH_FULLTEXT_AGGREGATIONS_FIELDS);
    Set<String> requestedAggregations = querySettings.getAggregations();
    if (configuredAggregations != null && !configuredAggregations.isEmpty() && requestedAggregations != null && !requestedAggregations.isEmpty()) {
        for (String requestedAggregation : requestedAggregations) {
            Object aggregationConfig = configuredAggregations.get(requestedAggregation);
            if (aggregationConfig != null) {
                SemiParsedAggregationConfig parsedAggregationConfig = parseAggregationType(aggregationConfig, requestedAggregation);
                // terms aggregation
                if (SemiParsedAggregationConfig.AggregationType.TERMS.toString().equals(parsedAggregationConfig.getAggregationType())) {
                    int size;
                    try {
                        size = (int) parsedAggregationConfig.getOptionalSettings().get("size");
                    } catch (Exception e) {
                        throw new SettingsException("Incorrect configuration of fulltext search aggregation field '" + requestedAggregation + "' in configuration document " + ConfigService.CFGNAME_SEARCH_FULLTEXT_AGGREGATIONS_FIELDS + ": Invalid value of [size] field.");
                    }
                    // We need to apply security filter.
                    Map<String, FilterBuilder> _searchFilters = searchFilters;
                    if (_searchFilters != null && !_searchFilters.isEmpty()) {
                        FilterBuilder securityFilter = getContentLevelSecurityFilterInternal();
                        if (securityFilter != null) {
                            Map<String, FilterBuilder> _searchFiltersClone = new HashMap<>();
                            _searchFiltersClone.putAll(_searchFilters);
                            _searchFiltersClone.put("document_level_security", securityFilter);
                            _searchFilters = _searchFiltersClone;
                        }
                    }
                    srb.addAggregation(createTermsBuilder(requestedAggregation, parsedAggregationConfig.getFieldName(), size, _searchFilters, true));
                    if (_searchFilters != null && _searchFilters.containsKey(parsedAggregationConfig.getFieldName())) {
                        if (parsedAggregationConfig.isFiltered()) {
                            // we filter over contributors so we have to add second aggregation which provide more accurate numbers for selected
                            // contributors because they can be out of normal aggregation due size limit
                            srb.addAggregation(createTermsBuilder(requestedAggregation + "_selected", parsedAggregationConfig.getFieldName(), parsedAggregationConfig.getFilteredSize(), _searchFilters, false));
                        }
                    }
                // date histogram aggregation
                } else if (SemiParsedAggregationConfig.AggregationType.DATE_HISTOGRAM.toString().equals(parsedAggregationConfig.getAggregationType())) {
                    DateHistogramBuilder dhb = AggregationBuilders.dateHistogram(requestedAggregation);
                    DateHistogram.Interval i = new DateHistogram.Interval(getDateHistogramAggregationInterval(parsedAggregationConfig.getFieldName()));
                    dhb.field(parsedAggregationConfig.getFieldName()).interval(i);
                    srb.addAggregation(dhb);
                }
            }
        }
    }
}
Example 35
Project: loklak_server-master  File: ElasticsearchClient.java View source code
public LinkedHashMap<String, Long> fullDateHistogram(final String indexName, int timezoneOffset, String histogram_timefield) {
    // prepare request
    SearchRequestBuilder request = elasticsearchClient.prepareSearch(indexName).setSearchType(SearchType.QUERY_THEN_FETCH).setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchAllQuery())).setFrom(0).setSize(0);
    request.clearRescorers();
    request.addAggregation(AggregationBuilders.dateHistogram(histogram_timefield).field(histogram_timefield).timeZone("UTC").minDocCount(1).interval(DateHistogramInterval.DAY));
    // get response
    SearchResponse response = request.execute().actionGet();
    // evaluate date histogram:
    InternalHistogram<InternalHistogram.Bucket> dateCounts = response.getAggregations().get(histogram_timefield);
    LinkedHashMap<String, Long> list = new LinkedHashMap<>();
    for (InternalHistogram.Bucket bucket : dateCounts.getBuckets()) {
        Calendar cal = Calendar.getInstance(DateParser.UTCtimeZone);
        org.joda.time.DateTime k = (org.joda.time.DateTime) bucket.getKey();
        cal.setTime(k.toDate());
        cal.add(Calendar.MINUTE, -timezoneOffset);
        long docCount = bucket.getDocCount();
        list.put(DateParser.dayDateFormat.format(cal.getTime()), docCount);
    }
    return list;
}
Example 36
Project: vertexium-master  File: ElasticsearchSearchQueryBase.java View source code
protected List<AggregationBuilder> getElasticsearchGeohashAggregations(GeohashAggregation agg) {
    List<AggregationBuilder> aggs = new ArrayList<>();
    for (String propertyName : getPropertyNames(agg.getFieldName())) {
        String visibilityHash = getSearchIndex().getPropertyVisibilityHashFromDeflatedPropertyName(propertyName);
        String aggName = createAggregationName(agg.getAggregationName(), visibilityHash);
        GeoHashGridBuilder geoHashAgg = AggregationBuilders.geohashGrid(aggName);
        geoHashAgg.field(propertyName + Elasticsearch2SearchIndex.GEO_PROPERTY_NAME_SUFFIX);
        geoHashAgg.precision(agg.getPrecision());
        aggs.add(geoHashAgg);
    }
    return aggs;
}