1 /*
2 * Licensed to the Apache Software Foundation (ASF) under one or more
3 * contributor license agreements. See the NOTICE file distributed with
4 * this work for additional information regarding copyright ownership.
5 * The ASF licenses this file to You under the Apache License, Version 2.0
6 * (the "License"); you may not use this file except in compliance with
7 * the License. You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18 package org.apache.commons.math4.neuralnet.sofm.util;
19
20 import java.util.function.LongToDoubleFunction;
21
22 import org.apache.commons.math4.neuralnet.internal.NeuralNetException;
23
24 /**
25 * Exponential decay function: <code>a e<sup>-x / b</sup></code>,
26 * where {@code x} is the (integer) independent variable.
27 * <br>
28 * Class is immutable.
29 *
30 * @since 3.3
31 */
32 public class ExponentialDecayFunction implements LongToDoubleFunction {
33 /** Factor {@code a}. */
34 private final double a;
35 /** Factor {@code 1 / b}. */
36 private final double oneOverB;
37
38 /**
39 * Creates an instance. It will be such that
40 * <ul>
41 * <li>{@code a = initValue}</li>
42 * <li>{@code b = -numCall / ln(valueAtNumCall / initValue)}</li>
43 * </ul>
44 *
45 * @param initValue Initial value, i.e. {@link #applyAsDouble(long) applyAsDouble(0)}.
46 * @param valueAtNumCall Value of the function at {@code numCall}.
47 * @param numCall Argument for which the function returns
48 * {@code valueAtNumCall}.
49 * @throws IllegalArgumentException if {@code initValue <= 0},
50 * {@code valueAtNumCall <= 0}, {@code valueAtNumCall >= initValue} or
51 * {@code numCall <= 0}.
52 */
53 public ExponentialDecayFunction(double initValue,
54 double valueAtNumCall,
55 long numCall) {
56 if (initValue <= 0) {
57 throw new NeuralNetException(NeuralNetException.NOT_STRICTLY_POSITIVE, initValue);
58 }
59 if (valueAtNumCall <= 0) {
60 throw new NeuralNetException(NeuralNetException.NOT_STRICTLY_POSITIVE, valueAtNumCall);
61 }
62 if (valueAtNumCall >= initValue) {
63 throw new NeuralNetException(NeuralNetException.TOO_LARGE, valueAtNumCall, initValue);
64 }
65 if (numCall <= 0) {
66 throw new NeuralNetException(NeuralNetException.NOT_STRICTLY_POSITIVE, numCall);
67 }
68
69 a = initValue;
70 oneOverB = -Math.log(valueAtNumCall / initValue) / numCall;
71 }
72
73 /**
74 * Computes <code>a e<sup>-numCall / b</sup></code>.
75 *
76 * @param numCall Current step of the training task.
77 * @return the value of the function at {@code numCall}.
78 */
79 @Override
80 public double applyAsDouble(long numCall) {
81 return a * Math.exp(-numCall * oneOverB);
82 }
83 }