1 /*
2 * Licensed to the Apache Software Foundation (ASF) under one or more
3 * contributor license agreements. See the NOTICE file distributed with
4 * this work for additional information regarding copyright ownership.
5 * The ASF licenses this file to You under the Apache License, Version 2.0
6 * (the "License"); you may not use this file except in compliance with
7 * the License. You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18 package org.apache.commons.math4.neuralnet.sofm;
19
20 import org.apache.commons.math4.neuralnet.internal.NeuralNetException;
21 import org.apache.commons.math4.neuralnet.sofm.util.ExponentialDecayFunction;
22 import org.apache.commons.math4.neuralnet.sofm.util.QuasiSigmoidDecayFunction;
23
24 /**
25 * Factory for creating instances of {@link LearningFactorFunction}.
26 *
27 * @since 3.3
28 */
29 public final class LearningFactorFunctionFactory {
30 /** Class contains only static methods. */
31 private LearningFactorFunctionFactory() {}
32
33 /**
34 * Creates an exponential decay {@link LearningFactorFunction function}.
35 * It will compute <code>a e<sup>-x / b</sup></code>,
36 * where {@code x} is the (integer) independent variable and
37 * <ul>
38 * <li><code>a = initValue</code>
39 * <li><code>b = -numCall / ln(valueAtNumCall / initValue)</code>
40 * </ul>
41 *
42 * @param initValue Initial value, i.e.
43 * {@link LearningFactorFunction#value(long) value(0)}.
44 * @param valueAtNumCall Value of the function at {@code numCall}.
45 * @param numCall Argument for which the function returns
46 * {@code valueAtNumCall}.
47 * @return the learning factor function.
48 * @throws IllegalArgumentException if {@code initValue <= 0},
49 * {@code initValue > 1} {@code valueAtNumCall <= 0},
50 * {@code valueAtNumCall >= initValue} or {@code numCall <= 0}.
51 */
52 public static LearningFactorFunction exponentialDecay(final double initValue,
53 final double valueAtNumCall,
54 final long numCall) {
55 if (initValue <= 0 ||
56 initValue > 1) {
57 throw new NeuralNetException(NeuralNetException.OUT_OF_RANGE, initValue, 0, 1);
58 }
59
60 return new LearningFactorFunction() {
61 /** DecayFunction. */
62 private final ExponentialDecayFunction decay
63 = new ExponentialDecayFunction(initValue, valueAtNumCall, numCall);
64
65 /** {@inheritDoc} */
66 @Override
67 public double value(long n) {
68 return decay.applyAsDouble(n);
69 }
70 };
71 }
72
73 /**
74 * Creates an sigmoid-like {@code LearningFactorFunction function}.
75 * The function {@code f} will have the following properties:
76 * <ul>
77 * <li>{@code f(0) = initValue}</li>
78 * <li>{@code numCall} is the inflexion point</li>
79 * <li>{@code slope = f'(numCall)}</li>
80 * </ul>
81 *
82 * @param initValue Initial value, i.e.
83 * {@link LearningFactorFunction#value(long) value(0)}.
84 * @param slope Value of the function derivative at {@code numCall}.
85 * @param numCall Inflexion point.
86 * @return the learning factor function.
87 * @throws IllegalArgumentException if {@code initValue <= 0},
88 * {@code initValue > 1}, {@code slope >= 0} or {@code numCall <= 0}.
89 */
90 public static LearningFactorFunction quasiSigmoidDecay(final double initValue,
91 final double slope,
92 final long numCall) {
93 if (initValue <= 0 ||
94 initValue > 1) {
95 throw new NeuralNetException(NeuralNetException.OUT_OF_RANGE, initValue, 0, 1);
96 }
97
98 return new LearningFactorFunction() {
99 /** DecayFunction. */
100 private final QuasiSigmoidDecayFunction decay
101 = new QuasiSigmoidDecayFunction(initValue, slope, numCall);
102
103 /** {@inheritDoc} */
104 @Override
105 public double value(long n) {
106 return decay.applyAsDouble(n);
107 }
108 };
109 }
110 }