001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements.  See the NOTICE file distributed with
004 * this work for additional information regarding copyright ownership.
005 * The ASF licenses this file to You under the Apache License, Version 2.0
006 * (the "License"); you may not use this file except in compliance with
007 * the License.  You may obtain a copy of the License at
008 *
009 *      http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS,
013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014 * See the License for the specific language governing permissions and
015 * limitations under the License.
016 */
017
018package org.apache.commons.math3.ml.neuralnet.sofm.util;
019
020import org.apache.commons.math3.exception.NotStrictlyPositiveException;
021import org.apache.commons.math3.exception.NumberIsTooLargeException;
022import org.apache.commons.math3.util.FastMath;
023
024/**
025 * Exponential decay function: <code>a e<sup>-x / b</sup></code>,
026 * where {@code x} is the (integer) independent variable.
027 * <br/>
028 * Class is immutable.
029 *
030 * @since 3.3
031 */
032public class ExponentialDecayFunction {
033    /** Factor {@code a}. */
034    private final double a;
035    /** Factor {@code 1 / b}. */
036    private final double oneOverB;
037
038    /**
039     * Creates an instance. It will be such that
040     * <ul>
041     *  <li>{@code a = initValue}</li>
042     *  <li>{@code b = -numCall / ln(valueAtNumCall / initValue)}</li>
043     * </ul>
044     *
045     * @param initValue Initial value, i.e. {@link #value(long) value(0)}.
046     * @param valueAtNumCall Value of the function at {@code numCall}.
047     * @param numCall Argument for which the function returns
048     * {@code valueAtNumCall}.
049     * @throws NotStrictlyPositiveException if {@code initValue <= 0}.
050     * @throws NotStrictlyPositiveException if {@code valueAtNumCall <= 0}.
051     * @throws NumberIsTooLargeException if {@code valueAtNumCall >= initValue}.
052     * @throws NotStrictlyPositiveException if {@code numCall <= 0}.
053     */
054    public ExponentialDecayFunction(double initValue,
055                                    double valueAtNumCall,
056                                    long numCall) {
057        if (initValue <= 0) {
058            throw new NotStrictlyPositiveException(initValue);
059        }
060        if (valueAtNumCall <= 0) {
061            throw new NotStrictlyPositiveException(valueAtNumCall);
062        }
063        if (valueAtNumCall >= initValue) {
064            throw new NumberIsTooLargeException(valueAtNumCall, initValue, false);
065        }
066        if (numCall <= 0) {
067            throw new NotStrictlyPositiveException(numCall);
068        }
069
070        a = initValue;
071        oneOverB = -FastMath.log(valueAtNumCall / initValue) / numCall;
072    }
073
074    /**
075     * Computes <code>a e<sup>-numCall / b</sup></code>.
076     *
077     * @param numCall Current step of the training task.
078     * @return the value of the function at {@code numCall}.
079     */
080    public double value(long numCall) {
081        return a * FastMath.exp(-numCall * oneOverB);
082    }
083}