Java Examples for org.encog.ml.data.basic.BasicMLData

The following java examples will help you to understand the usage of org.encog.ml.data.basic.BasicMLData. These source code samples are taken from different open source projects.

Example 1
Project: encog-java-core-master  File: BasicNetwork.java View source code
/**
	 * Compute the output for a given input to the neural network.
	 * 
	 * @param input
	 *            The input to the neural network.
	 * @return The output from the neural network.
	 */
@Override
public MLData compute(final MLData input) {
    try {
        final MLData result = new BasicMLData(this.structure.getFlat().getOutputCount());
        this.structure.getFlat().compute(input.getData(), result.getData());
        return result;
    } catch (final ArrayIndexOutOfBoundsException ex) {
        throw new NeuralNetworkError("Index exception: there was likely a mismatch between layer sizes, or the size of the input presented to the network.", ex);
    }
}
Example 2
Project: shifu-master  File: CommonUtils.java View source code
/**
     * Assemble map data to Encog standard input format. If no variable selected(noVarSel = true), all candidate
     * variables will be selected.
     * 
     * @param binCategoryMap
     *            categorical map
     * @param noVarSel
     *            if after var select
     * @param modelConfig
     *            model config instance
     * @param columnConfigList
     *            column config list
     * @param rawNsDataMap
     *            raw NSColumn data
     * @param cutoff
     *            cut off value
     * @param alg
     *            algorithm used in model
     * @return data pair instance
     * @throws NullPointerException
     *             if input is null
     * @throws NumberFormatException
     *             if column value is not number format.
     */
public static MLDataPair assembleNsDataPair(Map<Integer, Map<String, Integer>> binCategoryMap, boolean noVarSel, ModelConfig modelConfig, List<ColumnConfig> columnConfigList, Map<NSColumn, String> rawNsDataMap, double cutoff, String alg) {
    double[] ideal = { Constants.DEFAULT_IDEAL_VALUE };
    List<Double> inputList = new ArrayList<Double>();
    for (ColumnConfig config : columnConfigList) {
        if (config == null) {
            continue;
        }
        NSColumn key = new NSColumn(config.getColumnName());
        if (config.isFinalSelect() && !rawNsDataMap.containsKey(key)) {
            throw new IllegalStateException(String.format("Variable Missing in Test Data: %s", key));
        }
        if (config.isTarget()) {
            continue;
        } else {
            if (!noVarSel) {
                if (config != null && !config.isMeta() && !config.isTarget() && config.isFinalSelect()) {
                    String val = rawNsDataMap.get(key) == null ? null : rawNsDataMap.get(key).toString();
                    if (CommonUtils.isTreeModel(alg) && config.isCategorical()) {
                        Integer index = binCategoryMap.get(config.getColumnNum()).get(val == null ? "" : val);
                        if (index == null) {
                            // not in binCategories, should be missing value
                            // -1 as missing value
                            inputList.add(-1d);
                        } else {
                            inputList.add(index * 1d);
                        }
                    } else {
                        inputList.add(computeNumericNormResult(modelConfig, cutoff, config, val));
                    }
                }
            } else {
                if (!config.isMeta() && !config.isTarget() && CommonUtils.isGoodCandidate(config)) {
                    String val = rawNsDataMap.get(key) == null ? null : rawNsDataMap.get(key).toString();
                    if (CommonUtils.isTreeModel(alg) && config.isCategorical()) {
                        Integer index = binCategoryMap.get(config.getColumnNum()).get(val == null ? "" : val);
                        if (index == null) {
                            // not in binCategories, should be missing value
                            // -1 as missing value
                            inputList.add(-1d);
                        } else {
                            inputList.add(index * 1d);
                        }
                    } else {
                        inputList.add(computeNumericNormResult(modelConfig, cutoff, config, val));
                    }
                }
            }
        }
    }
    // god, Double [] cannot be casted to double[], toArray doesn't work
    int size = inputList.size();
    double[] input = new double[size];
    for (int i = 0; i < size; i++) {
        input[i] = inputList.get(i);
    }
    return new BasicMLDataPair(new BasicMLData(input), new BasicMLData(ideal));
}
Example 3
Project: encog-java-examples-master  File: OCR.java View source code
/**
	 * Used to map neurons to actual letters.
	 * 
	 * @return The current mapping between neurons and letters as an array.
	 */
char[] mapNeurons() {
    final char map[] = new char[this.letterListModel.size()];
    for (int i = 0; i < map.length; i++) {
        map[i] = '?';
    }
    for (int i = 0; i < this.letterListModel.size(); i++) {
        final MLData input = new BasicMLData(5 * 7);
        int idx = 0;
        final SampleData ds = (SampleData) this.letterListModel.getElementAt(i);
        for (int y = 0; y < ds.getHeight(); y++) {
            for (int x = 0; x < ds.getWidth(); x++) {
                input.setData(idx++, ds.getData(x, y) ? .5 : -.5);
            }
        }
        final int best = this.net.classify(input);
        map[best] = ds.getLetter();
    }
    return map;
}
Example 4
Project: dailyBot-master  File: NeuralNetworkAnalysis.java View source code
private static BasicNetwork generateNeuralNetwork(StrategyId id, Pair currency, boolean isBuy, int iterations, int middleNeurons) throws IOException {
    List<SignalHistoryRecord> allEntries = Utils.getStrategyRecords(id, currency);
    Collections.shuffle(allEntries);
    int validationSize = (int) Math.round(allEntries.size() * 0.2);
    double[][] inputs = new double[allEntries.size() - validationSize][];
    double[][] outputs = new double[allEntries.size() - validationSize][];
    double[][] inputsValidation = new double[validationSize][];
    double[][] outputsValidation = new double[validationSize][];
    int index = 0;
    int indexValidation = 0;
    for (SignalHistoryRecord entry : allEntries) {
        if (index < inputs.length) {
            inputs[index] = entry.getCharacteristics();
            outputs[index++] = entry.getOutput();
        } else {
            inputsValidation[indexValidation] = entry.getCharacteristics();
            outputsValidation[indexValidation++] = entry.getOutput();
        }
    }
    test(inputs, outputs);
    BasicNetwork network = EncogUtility.simpleFeedForward(inputs[0].length, 50, 0, outputs[0].length, true);
    MLDataSet trainingSet = new BasicMLDataSet(inputs, outputs);
    System.out.println("Neural Network Results:");
    EncogUtility.trainConsole(network, trainingSet, 1);
    for (int i = 0; i < inputs.length; i++) {
        final MLData output = network.compute(new BasicMLData(inputs[i]));
        System.out.println(Arrays.toString(inputs[i]) + ", actual=" + output.getData(0) + ", ideal=" + outputs[i][0]);
    }
    System.out.println("Validation:");
    System.out.println("Validation:");
    System.out.println("Validation:");
    System.out.println("Validation:");
    for (int i = 0; i < inputsValidation.length; i++) {
        final MLData output = network.compute(new BasicMLData(inputsValidation[i]));
        System.out.println(Arrays.toString(inputsValidation[i]) + ", actual=" + output.getData(0) + ", ideal=" + outputsValidation[i][0]);
    }
    return network;
}
Example 5
Project: jskat-multimodule-master  File: EncogNetworkWrapperTest.java View source code
/**
	 * Tests the {@link BasicNetwork} directly with an XOR example.
	 */
@Test
@Ignore
public final void testXORDirect() {
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
    network.getStructure().finalizeStructure();
    network.reset();
    BasicMLDataSet trainingSet = new BasicMLDataSet();
    double[][] input = { { 1.0, 1.0 }, { 1.0, 0.0 }, { 0.0, 1.0 }, { 0.0, 0.0 } };
    double[][] output = { // A XOR B
    { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };
    for (int i = 0; i < input.length; i++) {
        trainingSet.add(new BasicMLDataPair(new BasicMLData(input[i]), new BasicMLData(output[i])));
    }
    double error = 1000.0;
    int i = 0;
    int iteration = 0;
    while (error > MIN_DIFF && iteration < MAX_ITERATIONS) {
        i = (i + 1) % trainingSet.size();
        Backpropagation trainer = new Backpropagation(network, new BasicMLDataSet(Arrays.asList(trainingSet.get(i))));
        trainer.setBatchSize(1);
        trainer.iteration();
        error = trainer.getError();
        iteration++;
    }
    if (iteration == MAX_ITERATIONS) {
        fail("Needed more than " + MAX_ITERATIONS + " iterations. Error: " + error);
    } else {
        log.debug("Needed " + iteration + " iterations to learn.");
        log.debug("Testing network:");
        for (int n = 0; n < input.length; n++) {
            log.debug("Input: " + input[n][0] + " " + input[n][1] + " Expected output: " + output[n][0] + " Predicted output: " + network.compute(new BasicMLData(input[n])));
        }
    }
}
Example 6
Project: encog-java-workbench-master  File: CreateTrainingData.java View source code
public static void generateLinear(String name) {
    CreateLinearTrainingDialog dialog = new CreateLinearTrainingDialog(EncogWorkBench.getInstance().getMainWindow());
    if (dialog.process()) {
        double xBegin = dialog.getxBegin().getValue();
        double xEnd = dialog.getxEnd().getValue();
        int elements = dialog.getElements().getValue();
        double b = dialog.getIntercept().getValue();
        double m = dialog.getSlope().getValue();
        double range = xEnd - xBegin;
        double incr = range / elements;
        File targetFile = new File(EncogWorkBench.getInstance().getProjectDirectory(), name);
        MLDataSet trainingData = new BasicMLDataSet();
        for (int i = 0; i < elements; i++) {
            double x = xBegin + (i * incr);
            double y = (m * x) + b;
            MLData inputData = new BasicMLData(new double[] { x });
            MLData idealData = new BasicMLData(new double[] { y });
            trainingData.add(inputData, idealData);
        }
        EncogUtility.saveCSV(targetFile, CSVFormat.ENGLISH, trainingData);
    }
}
Example 7
Project: guagua-master  File: NNWorker.java View source code
@Override
public void load(GuaguaWritableAdapter<LongWritable> currentKey, GuaguaWritableAdapter<Text> currentValue, WorkerContext<NNParams, NNParams> workerContext) {
    ++this.count;
    if ((this.count) % 100000 == 0) {
        LOG.info("Read {} records.", this.count);
    }
    // use guava to iterate only once
    double[] ideal = new double[1];
    int inputNodes = NumberFormatUtils.getInt(workerContext.getProps().getProperty(NNConstants.GUAGUA_NN_INPUT_NODES), NNConstants.GUAGUA_NN_DEFAULT_INPUT_NODES);
    double[] inputs = new double[inputNodes];
    int i = 0;
    for (String input : Splitter.on(NNConstants.NN_DEFAULT_COLUMN_SEPARATOR).split(currentValue.getWritable().toString())) {
        if (i == 0) {
            ideal[i++] = NumberFormatUtils.getDouble(input, 0.0d);
        } else {
            int inputsIndex = (i++) - 1;
            if (inputsIndex >= inputNodes) {
                break;
            }
            inputs[inputsIndex] = NumberFormatUtils.getDouble(input, 0.0d);
        }
    }
    if (i < (inputNodes + 1)) {
        throw new GuaguaRuntimeException(String.format("Not enough data columns, input nodes setting:%s, data column:%s", inputNodes, i));
    }
    int scale = NumberFormatUtils.getInt(workerContext.getProps().getProperty(NNConstants.NN_RECORD_SCALE), 1);
    for (int j = 0; j < scale; j++) {
        double[] tmpInputs = j == 0 ? inputs : new double[inputs.length];
        double[] tmpIdeal = j == 0 ? ideal : new double[ideal.length];
        System.arraycopy(inputs, 0, tmpInputs, 0, inputs.length);
        MLDataPair pair = new BasicMLDataPair(new BasicMLData(tmpInputs), new BasicMLData(tmpIdeal));
        double r = Math.random();
        if (r >= 0.5d) {
            this.trainingData.add(pair);
        } else {
            this.testingData.add(pair);
        }
    }
}
Example 8
Project: AutoEncoder-master  File: AutoEncoder.java View source code
public void setData(double[][] p) {
    for (int i = 0; i < p.length; i++) {
        double[] input = p[i];
        MLDataPair pair = new BasicMLDataPair(new BasicMLData(input), new BasicMLData(input));
        dataset.add(pair);
    }
}