/*******************************************************************************
* Copyright 2013-2015 alladin-IT GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package at.alladin.rmbt.statisticServer;
import java.math.BigDecimal;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.AbstractMap;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Queue;
import java.util.TimeZone;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.restlet.data.Form;
import org.restlet.data.Status;
import org.restlet.resource.Get;
import at.alladin.rmbt.shared.cache.CacheHelper;
@Deprecated /* DELETE if no errors occur */
public class OpenTestSearchResource extends ServerResource
{
private enum FieldType {STRING, DATE, LONG, DOUBLE, BOOLEAN, UUID, SORTBY, SORTORDER, IGNORE};
private static final int CACHE_EXP = 3600;
private final CacheHelper cache = CacheHelper.getInstance();
public final int MAXROWS = 10000; //maximum number of rows allowed, currently approx 1.5s response time at maximum
public final int DEFAULTROWS = 100; //default number of rows (when max_results is not specified)
public final int MAXQUERYFIELDS = 50; //to prevent database-server overload
private final int HISTOGRAMCLASSES = 12;
private final int HISTOGRAMDOWNLOADDEFAULTMAX = 100000;
private final int HISTOGRAMDOWNLOADDEFAULTMIN = 0;
private final int HISTOGRAMUPLOADDEFAULTMAX = 100000;
private final int HISTOGRAMUPLOADDEFAULTMIN = 0;
private final int HISTOGRAMPINGDEFAULTMAX = 300; //milliseconds
private final int HISTOGRAMPINGDEFAULTMIN = 0;
//all fields that should be displayed in a general request (e.g. all tests for one user)
private final String[] openDataFieldsSummary = {"open_uuid", "open_test_uuid", "time", "lat", "long", "download_kbit", "upload_kbit",
"ping_ms", "signal_strength", "lte_rsrp", "platform", "provider_name", "model"};
//all fields that are numbers (and are formatted as numbers in json)
private final HashSet<String> openDataNumberFields = new HashSet<>(Arrays.asList(new String[]{"time", "lat", "long", "zip_code", "download_kbit",
"upload_kbit","ping_ms","signal_strength", "lte_rsrp", "test_duration","num_threads","ndt_download_kbit","ndt_upload_kbit","asn"}));
//all fields for which the user can sort the result
private final HashSet<String> openDataFieldsSortable = new HashSet<>(Arrays.asList(new String[]{"download_kbit","upload_kbit","time","signal_strength","ping_ms"}));
private final HistogramInfo histogramInfo = new HistogramInfo();
private boolean excludeImplausible = true;
@Get("json")
public String request(final String entity) throws JSONException
{
addAllowOrigin();
//this are all allowed fields in the query
//for the conversion query-fieldname to db-fieldname
//please take a look at formatWhereClause();
Map<String,FieldType> allowedFields = new HashMap<>();
allowedFields.put("download_kbit", FieldType.LONG);
allowedFields.put("download_kbit[]", FieldType.LONG);
allowedFields.put("upload_kbit", FieldType.LONG);
allowedFields.put("upload_kbit[]", FieldType.LONG);
allowedFields.put("ping_ms", FieldType.DOUBLE);
allowedFields.put("ping_ms[]", FieldType.DOUBLE);
allowedFields.put("time", FieldType.DATE);
allowedFields.put("time[]", FieldType.DATE);
allowedFields.put("zip_code", FieldType.LONG);
allowedFields.put("zip_code[]", FieldType.LONG);
allowedFields.put("cat_technology", FieldType.STRING);
allowedFields.put("cat_technology[]", FieldType.STRING);
allowedFields.put("client_version", FieldType.STRING);
allowedFields.put("client_version[]", FieldType.STRING);
allowedFields.put("model", FieldType.STRING);
allowedFields.put("model[]", FieldType.STRING);
allowedFields.put("network_name", FieldType.STRING);
allowedFields.put("network_name[]", FieldType.STRING);
allowedFields.put("network_type", FieldType.STRING);
allowedFields.put("network_type[]", FieldType.STRING);
allowedFields.put("platform", FieldType.STRING);
allowedFields.put("platform[]", FieldType.STRING);
allowedFields.put("signal_strength", FieldType.LONG);
allowedFields.put("signal_strength[]", FieldType.LONG);
allowedFields.put("open_uuid",FieldType.UUID);
allowedFields.put("long",FieldType.DOUBLE);
allowedFields.put("long[]",FieldType.DOUBLE);
allowedFields.put("lat",FieldType.DOUBLE);
allowedFields.put("lat[]",FieldType.DOUBLE);
allowedFields.put("mobile_provider_name", FieldType.STRING);
allowedFields.put("mobile_provider_name[]", FieldType.STRING);
allowedFields.put("provider_name",FieldType.STRING);
allowedFields.put("provider_name[]",FieldType.STRING);
allowedFields.put("sim_mcc_mnc",FieldType.STRING);
allowedFields.put("sim_mcc_mnc[]",FieldType.STRING);
allowedFields.put("sim_country",FieldType.STRING);
allowedFields.put("sim_country[]",FieldType.STRING);
allowedFields.put("asn",FieldType.LONG);
allowedFields.put("asn[]",FieldType.LONG);
allowedFields.put("network_country",FieldType.STRING);
allowedFields.put("network_country[]",FieldType.STRING);
allowedFields.put("country_geoip",FieldType.STRING);
allowedFields.put("country_geoip[]",FieldType.STRING);
allowedFields.put("loc_accuracy",FieldType.LONG);
allowedFields.put("loc_accuracy[]",FieldType.LONG);
allowedFields.put("public_ip_as_name",FieldType.STRING);
allowedFields.put("timestamp", FieldType.IGNORE); //for forcing no-cache
allowedFields.put("_", FieldType.IGNORE); //jQuery no-cache standard
allowedFields.put("sender", FieldType.IGNORE);
//allowedFields.put("ip_anonym", FieldType.STRING);
//allowedFields.put("ip_anonym[]", FieldType.STRING);
allowedFields.put("implausible", FieldType.BOOLEAN);
allowedFields.put("sort_by",FieldType.SORTBY);
allowedFields.put("sort_order",FieldType.SORTORDER);
allowedFields.put("cursor", FieldType.LONG);
allowedFields.put("max_results", FieldType.LONG);
//Values for the database
Queue<Map.Entry<String, FieldType>> searchValues = new LinkedList<>();
String where_query = "";
String orderClause = "";
final JSONArray invalidElements = new JSONArray();
final JSONObject response = new JSONObject();
final Form getParameters = getRequest().getResourceRef().getQueryAsForm();
String sortBy="";
String sortOrder = "";
for (String attr : getParameters.getNames()) {
//check if attribute is allowed
if (!allowedFields.containsKey(attr)) {
invalidElements.put(attr);
continue;
}
//check if value for the attribute is correct
//first, check if the attribute is an array
String[] values = getParameters.getValuesArray(attr);
for (String value : values) {
boolean negate = false;
if (value.startsWith("!") && value.length()>0) {
negate = true;
value = value.substring(1);
}
FieldType type = allowedFields.get(attr);
//do some basic sanity checks for the given parameters
switch (type) {
case STRING:
if (value.isEmpty()) {
invalidElements.put(attr);
continue;
}
//allow using wildcard '*' instead of sql '%'
value = value.replace('*', '%');
//allow using wildcard '?' instead of sql '_'
value = value.replace('?', '_');
where_query += formatWhereClause(attr, value,negate, type, searchValues);
break;
case DATE:
String comperatorDate = "=";
if (value.startsWith(">") || value.startsWith("<")) {
comperatorDate = value.substring(0, 1);
value = value.substring(1);
}
if (value.isEmpty() || !isDouble(value)) {
//try parsing the date
long v = parseDate(value);
if (v == -1) {
invalidElements.put(attr);
continue;
}
//date can be parsed => assign new value
value = Long.toString(v);
}
long v = Long.parseLong(value);
value = Long.toString(v);
where_query += formatWhereClause(attr, value, comperatorDate, negate, type, searchValues);
break;
case UUID:
if (value.isEmpty()) {
invalidElements.put(attr);
continue;
}
value = value.substring(1); //cut prefix
try {
UUID.fromString(value);
} catch(IllegalArgumentException e) {
invalidElements.put(attr);
continue;
}
where_query += formatWhereClause(attr, value, "=", negate, type, searchValues);
break;
case BOOLEAN:
if (value.isEmpty() ||
(!value.toLowerCase().equals("false") && !value.toLowerCase().equals("true"))) {
invalidElements.put(attr);
continue;
}
where_query += formatWhereClause(attr, value, "=", negate, type, searchValues);
break;
case DOUBLE:
case LONG:
String comperator = "=";
if (value.startsWith(">") || value.startsWith("<")) {
comperator = value.substring(0, 1);
comperator += "=";
value = value.substring(1);
}
if (value.isEmpty() || !isDouble(value)) {
invalidElements.put(attr);
continue;
}
where_query += formatWhereClause(attr, value, comperator, negate, type, searchValues);
break;
case IGNORE:
break; //do nothing
case SORTBY:
if (value.isEmpty() || !openDataFieldsSortable.contains(value)) {
invalidElements.put(attr);
continue;
}
sortBy = value;
break;
case SORTORDER:
//only "ASC", "DESC" are allowed
//and the attribute is only allowed, if sort_by is also given
if (value.isEmpty() ||
(!value.toUpperCase().equals("ASC") && !value.toUpperCase().equals("DESC")) ||
!getParameters.getNames().contains("sort_by")) {
invalidElements.put(attr);
continue;
}
sortOrder = value;
break;
}
}
}
orderClause = formatOrderClause(sortBy, sortOrder);
//calculate offset
long offset = -1;
if (getParameters.getNames().contains("cursor")) {
//is always a valid LONG because it is checked with all other
//parameters above
offset = Long.parseLong(getParameters.getFirstValue("cursor"));
}
//get maximal results-parameter
long maxrows = DEFAULTROWS;
if (getParameters.getNames().contains("max_results")) {
//is always a valid LONG because it is checked with all other
//parameters above
maxrows = Long.parseLong(getParameters.getFirstValue("max_results"));
}
//if there have been errors => inform the user
if (invalidElements.length()>0) {
setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
response.put("invalid_fields", invalidElements);
return response.toString();
}
//if there are too many query elements (DoS-Attack?), don't let it
//get to the database
if (searchValues.size() > MAXQUERYFIELDS) {
setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
response.put("invalid_fields", "field limit exceeded");
return response.toString();
}
//differentiate between histogram and search query
//not a very good way...
if (getRequest().getAttributes().containsKey("histogram")) {
return this.getHistogram(where_query, searchValues);
}
else
return getSearchResult(where_query, searchValues, orderClause, offset, maxrows);
}
/**
* Formats the sql-clause for ordering the results
* @param sortBy the field for which the results are ordered, must be contained in openDataFieldsSortable
* @param sortOrder the order; ASC or DESC
* @return
*/
private static String formatOrderClause(String sortBy, String sortOrder) {
if (sortBy.isEmpty()) {
return "";
}
//convert to real field names
if (sortBy.equals("download_kbit")) {
sortBy = "t.speed_download";
}
else if (sortBy.equals("upload_kbit")) {
sortBy = "t.speed_upload";
}
else if (sortBy.equals("ping_ms")) {
sortBy = "t.ping_median";
}
else if (sortBy.equals("time")) {
sortBy = "t.time";
}
else if (sortBy.equals("client_version")) {
sortBy = "client_software_version";
}
else if (sortBy.equals("sim_mcc_mnc")) {
sortBy = "network_sim_operator";
}
else if (sortBy.equals("sim_country")) {
sortBy = "network_sim_country";
}
else if (sortBy.equals("signal_strength")) {
sortBy= "t.signal_strength";
}
String ret = " ORDER BY " + sortBy + " " + sortOrder;
return ret;
}
private String formatWhereClause(String attr, String value, boolean negate, FieldType type, Queue<Map.Entry<String, FieldType>> queue) {
return formatWhereClause(attr, value,"ILIKE",negate,type,queue);
}
/**
* Transforms the given parameters in a psql where-clause, starting with "AND"
* @param attr the attribute name from the get-request - is replaced with the real column name
* @param value what the column given in 'attr' should have as value
* @param comperator the comparator, eg. '=', '>=', '<=' 'LIKE'
* @param negate true, if the results should NOT match the criteria
* @param type the type of the column (numeric, string, uuid, date)
* @param queue the queue where the resulting transformed value should be put in
* @return the formatted AND-Clause for the prepared statement (AND xxx = ?)
*/
private String formatWhereClause(String attr, String value, String comperator, boolean negate, FieldType type, Queue<Map.Entry<String, FieldType>> queue) {
//if it is a array => remove the brackets
if (attr.endsWith("[]")) {
attr = attr.substring(0,attr.length()-2);
}
//because we use aliases, some modifications have to be made
if (attr.equals("download_kbit")) {
attr = "t.speed_download";
//histogram
if (comperator.equals(">="))
this.histogramInfo.min_download = Long.parseLong(value);
else if (comperator.equals("<="))
this.histogramInfo.max_download = Long.parseLong(value);
}
else if (attr.equals("upload_kbit")) {
attr = "t.speed_upload";
//histogram
if (comperator.equals(">="))
this.histogramInfo.min_upload = Long.parseLong(value);
else if (comperator.equals("<="))
this.histogramInfo.max_upload = Long.parseLong(value);
}
else if (attr.equals("ping_ms")) {
attr = "t.ping_median";
Double v = Double.parseDouble(value)*1000000;
//histogram
if (comperator.equals(">="))
this.histogramInfo.min_ping = Double.parseDouble(value);
else if (comperator.equals("<="))
this.histogramInfo.max_ping = Double.parseDouble(value);
value = v.toString();
}
else if (attr.equals("time")) {
attr = "t.time";
}
else if (attr.equals("cat_technology")) {
attr = "nt.group_name";
}
else if (attr.equals("client_version")) {
attr = "client_software_version";
}
else if (attr.equals("model")) {
attr = "(adm.fullname ILIKE ? OR t.model ILIKE ?)";
queue.add(new AbstractMap.SimpleEntry<>(value, type));
queue.add(new AbstractMap.SimpleEntry<>(value, type));
if (!negate) {
return " AND " + attr;
}
else {
return " AND NOT " + attr;
}
}
else if (attr.equals("provider_name")) {
attr = "(mprov.name ILIKE ? OR (mprov.name IS NULL AND prov.name ILIKE ?))";
queue.add(new AbstractMap.SimpleEntry<>(value, type));
queue.add(new AbstractMap.SimpleEntry<>(value, type));
if (!negate) {
return " AND " + attr;
}
else {
return " AND NOT " + attr;
}
}
else if (attr.equals("mobile_provider_name")) {
attr = "mprov.name";
}
else if (attr.equals("network_name")) {
attr = "network_operator_name";
}
else if (attr.equals("network_type")) {
attr = "t.network_group_type";
}
else if (attr.equals("cursor") || attr.equals("max_results")) {
return "";
}
else if (attr.equals("platform")) {
attr = "(t.plattform ILIKE ? OR (t.plattform IS NULL AND t.client_name ILIKE ?))";
queue.add(new AbstractMap.SimpleEntry<>(value, type));
queue.add(new AbstractMap.SimpleEntry<>(value, type));
if (!negate) {
return " AND " + attr;
}
else {
return " AND NOT " + attr;
}
}
else if (attr.equals("signal_strength")) {
attr= "t.signal_strength";
}
else if (attr.equals("open_uuid")) {
attr = "t.open_uuid";
}
else if (attr.equals("lat")) {
attr = "t.geo_lat";
}
else if (attr.equals("long")) {
attr = "t.geo_long";
}
else if (attr.equals("sim_mcc_mnc")) {
attr = "network_sim_operator";
}
else if (attr.equals("sim_country")) {
attr = "network_sim_country";
}
else if (attr.equals("asn")) {
attr = "public_ip_asn";
}
else if (attr.equals("implausible")) {
//if false -> also allow null
if (value.toLowerCase().equals("true")) {
this.excludeImplausible = false;
//return " AND (t.implausible = FALSE or t.implausible IS NULL)";
}
}
else if (attr.equals("loc_accuracy")) {
attr = "t.geo_accuracy";
//special case: if value > threshold -> ignore and find no results (?)
if (comperator.equals(">=") || comperator.equals("=")) {
long val = Long.parseLong(value);
if (val > Double.parseDouble(settings.getString("RMBT_GEO_ACCURACY_DETAIL_LIMIT"))) {
return " AND 1=0";
}
}
//special case: if (-1) than NULL values should be found
if (value.equals("-1")) {
return " AND t.geo_accuracy IS NULL";
}
}
else if (attr.equals("ip_anonym")) {
attr = "client_public_ip_anonymized";
}
//, zip_code are not renamed
queue.add(new AbstractMap.SimpleEntry<>(value, type));
if (negate) {
return " AND NOT " + attr + " " + comperator + " ?";
} else {
return " AND " + attr + " " + comperator + " ?";
}
}
public boolean isDouble( String input )
{
try
{
Double v = Double.parseDouble(input);
if (v.isNaN() || v.isInfinite()){
return false;
}
return true;
}
catch(Exception e)
{
return false;
}
}
/**
* Formats a opendata-time-value to utc time
* @param textual_date e.g. 2013-07-19 41:35
* @return the date value OR -1 if the format is invalid
* dz: add seconds
*/
private static long parseDate(final String textual_date)
{
final SimpleDateFormat date_formatter = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
date_formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
try {
return date_formatter.parse(textual_date).getTime();
} catch (ParseException ex) {
return -1;
}
}
private class HistogramInfo {
long max_download = Long.MIN_VALUE;
long min_download = Long.MIN_VALUE;
long max_upload = Long.MIN_VALUE;
long min_upload = Long.MIN_VALUE;
double max_ping = Long.MIN_VALUE;
double min_ping = Long.MIN_VALUE;
}
/**
* Gets the JSON-Response for the histograms
* @param whereClause
* @param searchValues
* @return Json as String
*/
private String getHistogram(String whereClause, Queue<Map.Entry<String, FieldType>> searchValues) {
JSONObject ret = new JSONObject();
try {
if (searchValues.isEmpty()) {
//try getting from cache
String cacheString = (String) cache.get("opentest-histogram");
if (cacheString != null) {
System.out.println("cache hit for histogram");
return cacheString;
}
}
//Download
// logarithmic if without filters
boolean logarithmic = false;
if (histogramInfo.max_download == Long.MIN_VALUE
&& histogramInfo.min_download == Long.MIN_VALUE) {
histogramInfo.max_download = 1;
histogramInfo.min_download = 0;
logarithmic = true;
}
if (!logarithmic && histogramInfo.max_download == Long.MIN_VALUE) {
histogramInfo.max_download = HISTOGRAMDOWNLOADDEFAULTMAX;
}
if (!logarithmic && histogramInfo.min_download == Long.MIN_VALUE) {
histogramInfo.min_download = HISTOGRAMDOWNLOADDEFAULTMIN;
}
double min = this.histogramInfo.min_download;
double max = this.histogramInfo.max_download;
JSONArray downArray = getJSONForHistogram(min, max,
(logarithmic) ? "speed_download_log" : "speed_download",
logarithmic, whereClause, searchValues);
ret.put("download_kbit", downArray);
// Upload
logarithmic = false;
if (histogramInfo.max_upload == Long.MIN_VALUE
&& histogramInfo.min_upload == Long.MIN_VALUE) {
histogramInfo.max_upload = 1;
histogramInfo.min_upload = 0;
logarithmic = true;
}
if (!logarithmic && histogramInfo.max_upload == Long.MIN_VALUE) {
histogramInfo.max_upload = HISTOGRAMUPLOADDEFAULTMAX;
}
if (!logarithmic && histogramInfo.min_upload == Long.MIN_VALUE) {
histogramInfo.min_upload = HISTOGRAMUPLOADDEFAULTMIN;
}
min = this.histogramInfo.min_upload;
max = this.histogramInfo.max_upload;
JSONArray upArray = getJSONForHistogram(min, max,
(logarithmic) ? "speed_upload_log" : "speed_upload",
logarithmic, whereClause, searchValues);
ret.put("upload_kbit", upArray);
//Ping
if (histogramInfo.max_ping == Long.MIN_VALUE) {
histogramInfo.max_ping = HISTOGRAMPINGDEFAULTMAX;
}
if (histogramInfo.min_ping == Long.MIN_VALUE) {
histogramInfo.min_ping = HISTOGRAMPINGDEFAULTMIN;
}
min = this.histogramInfo.min_ping;
max = this.histogramInfo.max_ping;
JSONArray pingArray = getJSONForHistogram(min, max, "(t.ping_median::float / 1000000)", false, whereClause, searchValues);
ret.put("ping_ms", pingArray);
if (searchValues.isEmpty()) {
//if it was the default -> save it to the cache for later
cache.set("opentest-histogram", CACHE_EXP, ret.toString());
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return ret.toString();
}
/**
* Gets the JSON Array for a specific histogram
* @param min lower bound of first class
* @param max upper bound of last class
* @param field numeric database-field that the histogram is based on
* @param isLogarithmic
* @param whereClause
* @param searchValues
* @return
* @throws JSONException
* @throws CacheException
*/
private JSONArray getJSONForHistogram(double min, double max, String field, boolean isLogarithmic, String whereClause, Queue<Map.Entry<String, FieldType>> searchValues) throws JSONException {
//Get min and max steps
double difference = max - min;
int digits = (int) Math.floor(Math.log10(difference));
//get histogram classes
long upperBound = new BigDecimal(max).setScale(-digits, BigDecimal.ROUND_CEILING).longValue();
long lowerBound = new BigDecimal(min).setScale(-digits, BigDecimal.ROUND_FLOOR).longValue();
double step = ((double) (upperBound-lowerBound))/((double)HISTOGRAMCLASSES);
System.out.println("lower: " + lowerBound + ", upper: " + upperBound + ", digits: " + digits + ", diff: " + difference + ", step: " + step);
//psql width_bucket: gets the histogram class in which a value belongs
final String sql =
"select "
+ " width_bucket(" + field + "," + lowerBound + "," + upperBound + "," + HISTOGRAMCLASSES + ") bucket, "
+ " count(*) cnt "
+ " from test t "
+ " LEFT JOIN network_type nt ON nt.uid=t.network_type"
+ " LEFT JOIN device_map adm ON adm.codename=t.model"
+ " LEFT JOIN test_server ts ON ts.uid=t.server_id"
+ " LEFT JOIN provider prov ON provider_id = prov.uid "
+ " LEFT JOIN provider mprov ON mobile_provider_id = mprov.uid"
+ " where " + field + " > 0 "
+ " AND t.deleted = false"
+ ((this.excludeImplausible) ? " AND implausible = false" : "")
+ " AND status = 'FINISHED' " + whereClause
+ " group by bucket " + "order by bucket asc;";
JSONArray jArray = new JSONArray();
try {
PreparedStatement stmt = conn.prepareStatement(sql);
stmt = fillInWhereClause(stmt, searchValues, 1);
ResultSet rs = stmt.executeQuery();
JSONObject jBucket = null;
long prevCnt = 0;
int prevBucket = 0;
while(rs.next()) {
int bucket = rs.getInt("bucket");
long cnt = rs.getLong("cnt");
double current_lower_bound = lowerBound + step * (bucket - 1);
//logarithmic -> times 10 for kbit
if (isLogarithmic)
current_lower_bound = Math.pow(10, current_lower_bound*4)*10;
double current_upper_bound = lowerBound + (step * bucket);
if (isLogarithmic)
current_upper_bound = Math.pow(10, current_upper_bound*4)*10;
if (bucket-prevBucket > 1) {
//problem: bucket without values
//solution: respond with classes with "0" elements in them
int diff = bucket-prevBucket;
for (int i=1;i<diff;i++) {
prevBucket++;
jBucket = new JSONObject();
double tLowerBound = lowerBound + step * (prevBucket - 1);
if (isLogarithmic)
tLowerBound = Math.pow(10, tLowerBound*4)*10;
double tUpperBound = lowerBound + (step * prevBucket);
if (isLogarithmic)
tUpperBound = Math.pow(10, tUpperBound*4)*10;
jBucket.put("lower_bound", tLowerBound);
jBucket.put("upper_bound", tUpperBound);
jBucket.put("results", 0);
jArray.put(jBucket);
}
}
prevBucket = bucket;
prevCnt = cnt;
jBucket = new JSONObject();
if (bucket == 0) {
jBucket.put("lower_bound", JSONObject.NULL);
} else {
//2 digits accuracy for small differences
if (step < 1 && !isLogarithmic)
jBucket.put("lower_bound", ((double) Math.round(current_lower_bound*100))/(double) 100);
else
jBucket.put("lower_bound", Math.round(current_lower_bound));
}
if (bucket == HISTOGRAMCLASSES + 1) {
jBucket.put("upper_bound", JSONObject.NULL);
} else {
if (step < 1 && !isLogarithmic)
jBucket.put("upper_bound", ((double) Math.round(current_upper_bound*100))/(double) 100);
else
jBucket.put("upper_bound", Math.round(current_upper_bound));
}
jBucket.put("results", cnt);
jArray.put(jBucket);
}
//problem: not enough buckets
//solution: respond with classes with "0" elements
if (jArray.length() < HISTOGRAMCLASSES) {
int diff = HISTOGRAMCLASSES - jArray.length();
int bucket = jArray.length();
for (int i=0;i<diff;i++) {
jBucket = new JSONObject();
bucket++;
double tLowerBound = lowerBound + step * (bucket - 1);
if (isLogarithmic)
tLowerBound = Math.pow(10, tLowerBound*4)*10;
double tUpperBound = lowerBound + (step * bucket);
if (isLogarithmic)
tUpperBound = Math.pow(10, tUpperBound*4)*10;
jBucket.put("lower_bound", tLowerBound);
jBucket.put("upper_bound", tUpperBound);
jBucket.put("results", 0);
jArray.put(jBucket);
}
}
rs.close();
stmt.close();
} catch (SQLException e) {
e.printStackTrace();
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return jArray;
}
/**
* Gets a JSON-String containing all open-data-values of all rows
* that matched the given criteria
* @param whereClause the where-clause to use
* @param searchValues the values for the columns which the user wants to filter
* @param offset a offset-value for paging (given as "next-cursor" in the response), -1 if none is set
* @return
*/
private String getSearchResult(String whereClause, Queue<Map.Entry<String, FieldType>> searchValues, String orderClause, long offset, long maxrows) {
long startTime = System.currentTimeMillis();
String offsetString = (offset>0)? " AND t.uid<"+offset:""; //if no sorting is used
String offsetString2 = (offset>0)? " OFFSET "+offset:""; //if sorting is used => may have concurrency issues in the results
boolean defaultOrder = true;
if (orderClause == null || orderClause.isEmpty()) {
orderClause = " ORDER BY t.uid DESC ";
offsetString2 = "";
} else {
defaultOrder = false;
offsetString = "";
}
if (maxrows>MAXROWS)
maxrows = MAXROWS;
if (maxrows <= 0)
maxrows = DEFAULTROWS;
//There are many LEFT JOINs in the sql statement that are usual not needed.
//This has no significant impact on the performance since our DBMS (postgres)
//is intelligent enough to ignore these during query optimization if they are
//not needed
final String sql = "SELECT" +
" t.uid as cursor, " + //only for pagination
" ('P' || t.open_uuid) open_uuid," +
" ('O' || t.open_test_uuid) open_test_uuid," +
" to_char(t.time AT TIME ZONE 'UTC', 'YYYY-MM-DD HH24:MI:SS') \"time\"," +
//" nt.group_name cat_technology," +
//" nt.name network_type," +
" t.geo_lat lat," +
" t.geo_long long," +
//" t.geo_provider loc_src," +
//" t.zip_code," +
" t.speed_download download_kbit," +
" t.speed_upload upload_kbit," +
" (t.ping_median::float / 1000000) ping_ms," +
" t.signal_strength," +
" t.lte_rsrp," +
//" ts.name server_name," +
//" duration test_duration," +
//" num_threads," +
" CONCAT(plattform,' ',network_group_name) as platform, " +
" COALESCE(adm.fullname, t.model) model," +
" COALESCE(prov.shortname, mprov.shortname, msim.shortname,msim.name,"
+ " prov.name, mprov.name, network_operator_name, t.public_ip_as_name, network_sim_operator) provider_name " +
//" client_software_version client_version," +
//" network_operator network_mcc_mnc," +
//" network_operator_name network_name," +
//" network_sim_operator sim_mcc_mnc," +
//" nat_type \"connection\"," +
//" public_ip_asn asn," +
//" client_public_ip_anonymized ip_anonym," +
//" (ndt.s2cspd*1000)::int ndt_download_kbit," +
//" (ndt.c2sspd*1000)::int ndt_upload_kbit" +
" FROM test t" +
" LEFT JOIN network_type nt ON nt.uid=t.network_type" +
" LEFT JOIN device_map adm ON adm.codename=t.model" +
" LEFT JOIN test_server ts ON ts.uid=t.server_id" +
" LEFT JOIN provider prov ON provider_id = prov.uid " +
" LEFT JOIN provider mprov ON mobile_provider_id = mprov.uid" +
" LEFT JOIN mccmnc2name msim ON mobile_sim_id = msim.uid" + //TODO: finalize migration to msim/mnwk
" WHERE " +
" (t.deleted = false)" +
((this.excludeImplausible)?" AND implausible = false":"") +
" AND status = 'FINISHED' " + whereClause + offsetString +
orderClause + " LIMIT " + maxrows + offsetString2;
final String[] columns;
PreparedStatement ps = null;
ResultSet rs = null;
final JSONObject response = new JSONObject();
final JSONArray resultList = new JSONArray();
try
{
ps = conn.prepareStatement(sql);
//fill in values for WHERE
ps = fillInWhereClause(ps, searchValues, 1);
//Logger.getLogger(OpenTestResource.class.getName()).log(Level.INFO, "prepstmt" + ps);
if (!ps.execute())
return null;
rs = ps.getResultSet();
long lastUID = 0; //remember last uid for pagination since rs can only be traversed in one direction
while (rs.next())
{
final JSONObject jsonItem = new JSONObject();
for (int i=0;i<openDataFieldsSummary.length;i++) {
final Object obj = rs.getObject(openDataFieldsSummary[i]);
if (obj==null) {
jsonItem.put(openDataFieldsSummary[i], JSONObject.NULL);
} else if (openDataNumberFields.contains(openDataFieldsSummary[i])) {
final String tmp = obj.toString().trim();
if (tmp.isEmpty())
jsonItem.put(openDataFieldsSummary[i], JSONObject.NULL);
else
jsonItem.put(openDataFieldsSummary[i], JSONObject.stringToValue(tmp));
} else {
jsonItem.put(openDataFieldsSummary[i], obj.toString());
}
}
lastUID = rs.getLong("cursor");
resultList.put(jsonItem);
}
//if there are more results than we send, use pagination
if (resultList.length() == maxrows) {
//if it is the standard sort order
if (defaultOrder) {
response.put("next_cursor", lastUID);
} else {
offset = (offset<0) ? 0 : offset;
response.put("next_cursor", offset+maxrows);
}
} else {
response.put("next_cursor", JSONObject.NULL);
}
response.put("results", resultList);
//also put in the result, how long the query took to execute
long elapsedTime = System.currentTimeMillis() - startTime;
response.put("duration_ms",elapsedTime);
}
catch (final JSONException e) {
Logger.getLogger(OpenTestResource.class.getName()).log(Level.SEVERE, null, e);
} catch (SQLException ex) {
try {
setStatus(Status.CLIENT_ERROR_NOT_FOUND);
response.put("error","invalid parameters");
} catch (JSONException ex1) {
Logger.getLogger(OpenTestResource.class.getName()).log(Level.SEVERE, null, ex1);
}
Logger.getLogger(OpenTestResource.class.getName()).log(Level.SEVERE, null, ex);
}
finally
{
try
{
if (rs != null)
rs.close();
if (ps != null)
ps.close();
}
catch (final SQLException e)
{
Logger.getLogger(OpenTestResource.class.getName()).log(Level.SEVERE, null, e);
}
}
return response.toString();
}
/**
* Fills in the given fields in the queue into the given prepared statement
* @param ps
* @param searchValues
* @param firstField
* @return
* @throws SQLException
*/
private static PreparedStatement fillInWhereClause(PreparedStatement ps, Queue<Map.Entry<String, FieldType>> searchValues, int firstField) throws SQLException{
//insert all values in the prepared statement in the order
//in which the values had been put in the queue
for (Map.Entry<String, FieldType> entry : searchValues){
switch(entry.getValue()) {
case STRING:
ps.setString(firstField, entry.getKey());
break;
case DATE:
ps.setTimestamp(firstField, new Timestamp(Long.parseLong(entry.getKey())));
break;
case LONG:
ps.setLong(firstField, Long.parseLong(entry.getKey()));
break;
case DOUBLE:
ps.setDouble(firstField, Double.parseDouble(entry.getKey()));
break;
case UUID:
ps.setObject(firstField, UUID.fromString(entry.getKey()));
break;
case BOOLEAN:
ps.setBoolean(firstField, Boolean.valueOf(entry.getKey()));
break;
}
firstField++;
}
return ps;
}
}