001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements.  See the NOTICE file distributed with
004 * this work for additional information regarding copyright ownership.
005 * The ASF licenses this file to You under the Apache License, Version 2.0
006 * (the "License"); you may not use this file except in compliance with
007 * the License.  You may obtain a copy of the License at
008 *
009 *      http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS,
013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014 * See the License for the specific language governing permissions and
015 * limitations under the License.
016 */
017
018package org.apache.commons.math4.neuralnet.sofm;
019
020import org.apache.commons.math4.neuralnet.internal.NeuralNetException;
021import org.apache.commons.math4.neuralnet.sofm.util.ExponentialDecayFunction;
022import org.apache.commons.math4.neuralnet.sofm.util.QuasiSigmoidDecayFunction;
023
024/**
025 * Factory for creating instances of {@link LearningFactorFunction}.
026 *
027 * @since 3.3
028 */
029public final class LearningFactorFunctionFactory {
030    /** Class contains only static methods. */
031    private LearningFactorFunctionFactory() {}
032
033    /**
034     * Creates an exponential decay {@link LearningFactorFunction function}.
035     * It will compute <code>a e<sup>-x / b</sup></code>,
036     * where {@code x} is the (integer) independent variable and
037     * <ul>
038     *  <li><code>a = initValue</code>
039     *  <li><code>b = -numCall / ln(valueAtNumCall / initValue)</code>
040     * </ul>
041     *
042     * @param initValue Initial value, i.e.
043     * {@link LearningFactorFunction#value(long) value(0)}.
044     * @param valueAtNumCall Value of the function at {@code numCall}.
045     * @param numCall Argument for which the function returns
046     * {@code valueAtNumCall}.
047     * @return the learning factor function.
048     * @throws IllegalArgumentException if {@code initValue <= 0},
049     * {@code initValue > 1} {@code valueAtNumCall <= 0},
050     * {@code valueAtNumCall >= initValue} or {@code numCall <= 0}.
051     */
052    public static LearningFactorFunction exponentialDecay(final double initValue,
053                                                          final double valueAtNumCall,
054                                                          final long numCall) {
055        if (initValue <= 0 ||
056            initValue > 1) {
057            throw new NeuralNetException(NeuralNetException.OUT_OF_RANGE, initValue, 0, 1);
058        }
059
060        return new LearningFactorFunction() {
061            /** DecayFunction. */
062            private final ExponentialDecayFunction decay
063                = new ExponentialDecayFunction(initValue, valueAtNumCall, numCall);
064
065            /** {@inheritDoc} */
066            @Override
067            public double value(long n) {
068                return decay.applyAsDouble(n);
069            }
070        };
071    }
072
073    /**
074     * Creates an sigmoid-like {@code LearningFactorFunction function}.
075     * The function {@code f} will have the following properties:
076     * <ul>
077     *  <li>{@code f(0) = initValue}</li>
078     *  <li>{@code numCall} is the inflexion point</li>
079     *  <li>{@code slope = f'(numCall)}</li>
080     * </ul>
081     *
082     * @param initValue Initial value, i.e.
083     * {@link LearningFactorFunction#value(long) value(0)}.
084     * @param slope Value of the function derivative at {@code numCall}.
085     * @param numCall Inflexion point.
086     * @return the learning factor function.
087     * @throws IllegalArgumentException if {@code initValue <= 0},
088     * {@code initValue > 1}, {@code slope >= 0} or {@code numCall <= 0}.
089     */
090    public static LearningFactorFunction quasiSigmoidDecay(final double initValue,
091                                                           final double slope,
092                                                           final long numCall) {
093        if (initValue <= 0 ||
094            initValue > 1) {
095            throw new NeuralNetException(NeuralNetException.OUT_OF_RANGE, initValue, 0, 1);
096        }
097
098        return new LearningFactorFunction() {
099            /** DecayFunction. */
100            private final QuasiSigmoidDecayFunction decay
101                = new QuasiSigmoidDecayFunction(initValue, slope, numCall);
102
103            /** {@inheritDoc} */
104            @Override
105            public double value(long n) {
106                return decay.applyAsDouble(n);
107            }
108        };
109    }
110}