/*
* Encog(tm) Core v3.4 - Java Version
* http://www.heatonresearch.com/encog/
* https://github.com/encog/encog-java-core
* Copyright 2008-2016 Heaton Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package org.encog.engine.network.activation;
import org.encog.ml.factory.MLActivationFactory;
import org.encog.util.obj.ActivationUtil;
/**
* The Linear layer is really not an activation function at all. The input is
* simply passed on, unmodified, to the output. This activation function is
* primarily theoretical and of little actual use. Usually an activation
* function that scales between 0 and 1 or -1 and 1 should be used.
*/
public class ActivationLinear implements ActivationFunction {
/**
* Default empty parameters.
*/
public static final double P[] = new double[0];
/**
* Default empty parameters.
*/
public static final String N[] = new String[0];
/**
* Serial id for this class.
*/
private static final long serialVersionUID = -5356580554235104944L;
/**
* The parameters.
*/
private final double[] params;
/**
* Construct a linear activation function, with a slope of 1.
*/
public ActivationLinear() {
this.params = new double[0];
}
/**
* {@inheritDoc}
*/
@Override
public final void activationFunction(final double[] x, final int start,
final int size) {
}
/**
* @return The object cloned.
*/
@Override
public final ActivationFunction clone() {
return new ActivationLinear();
}
/**
* {@inheritDoc}
*/
@Override
public final double derivativeFunction(final double b, final double a) {
return 1;
}
/**
* {@inheritDoc}
*/
@Override
public final String[] getParamNames() {
final String[] result = {};
return result;
}
/**
* {@inheritDoc}
*/
@Override
public final double[] getParams() {
return this.params;
}
/**
* @return Return true, linear has a 1 derivative.
*/
@Override
public final boolean hasDerivative() {
return true;
}
/**
* {@inheritDoc}
*/
@Override
public final void setParam(final int index, final double value) {
this.params[index] = value;
}
/**
* {@inheritDoc}
*/
@Override
public String getFactoryCode() {
return ActivationUtil.generateActivationFactory(MLActivationFactory.AF_LINEAR, this);
}
@Override
public String getLabel() {
return "linear";
}
}