View Javadoc
1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one or more
3    * contributor license agreements.  See the NOTICE file distributed with
4    * this work for additional information regarding copyright ownership.
5    * The ASF licenses this file to You under the Apache License, Version 2.0
6    * (the "License"); you may not use this file except in compliance with
7    * the License.  You may obtain a copy of the License at
8    *
9    *      http://www.apache.org/licenses/LICENSE-2.0
10   *
11   * Unless required by applicable law or agreed to in writing, software
12   * distributed under the License is distributed on an "AS IS" BASIS,
13   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14   * See the License for the specific language governing permissions and
15   * limitations under the License.
16   */
17  package org.apache.commons.math4.legacy.fitting.leastsquares;
18  
19  import org.apache.commons.math4.legacy.analysis.differentiation.FiniteDifferencesDifferentiator;
20  import org.apache.commons.math4.legacy.analysis.differentiation.UnivariateVectorFunctionDifferentiator;
21  import org.apache.commons.math4.legacy.linear.DiagonalMatrix;
22  import org.apache.commons.math4.legacy.linear.RealMatrix;
23  import org.apache.commons.math4.legacy.linear.RealVector;
24  import org.apache.commons.math4.legacy.optim.SimpleVectorValueChecker;
25  import org.apache.commons.math4.core.jdkmath.JdkMath;
26  import org.junit.Assert;
27  import org.junit.Test;
28  
29  public class DifferentiatorVectorMultivariateJacobianFunctionTest {
30  
31      private static final int POINTS = 20;
32      private static final double STEP_SIZE = 0.2;
33  
34      private final UnivariateVectorFunctionDifferentiator differentiator = new FiniteDifferencesDifferentiator(POINTS, STEP_SIZE);
35      private final LeastSquaresOptimizer optimizer = this.getOptimizer();
36  
37      public LeastSquaresBuilder base() {
38          return new LeastSquaresBuilder()
39                  .checkerPair(new SimpleVectorValueChecker(1e-6, 1e-6))
40                  .maxEvaluations(100)
41                  .maxIterations(getMaxIterations());
42      }
43  
44      public LeastSquaresBuilder builder(BevingtonProblem problem, boolean automatic){
45          if(automatic) {
46              return base()
47                      .model(new DifferentiatorVectorMultivariateJacobianFunction(problem.getModelFunction(), differentiator));
48          } else {
49              return base()
50                      .model(problem.getModelFunction(), problem.getModelFunctionJacobian());
51          }
52      }
53  
54      public int getMaxIterations() {
55          return 25;
56      }
57  
58      public LeastSquaresOptimizer getOptimizer() {
59          return new LevenbergMarquardtOptimizer();
60      }
61  
62      /**
63       * Non-linear test case: fitting of decay curve (from Chapter 8 of
64       * Bevington's textbook, "Data reduction and analysis for the physical sciences").
65       */
66      @Test
67      public void testBevington() {
68  
69          // the analytical optimum to compare to
70          final LeastSquaresOptimizer.Optimum analyticalOptimum = findBevington(false);
71          final RealVector analyticalSolution = analyticalOptimum.getPoint();
72          final RealMatrix analyticalCovarianceMatrix = analyticalOptimum.getCovariances(1e-14);
73  
74          // the automatic DifferentiatorVectorMultivariateJacobianFunction optimum
75          final LeastSquaresOptimizer.Optimum automaticOptimum = findBevington(true);
76          final RealVector automaticSolution = automaticOptimum.getPoint();
77          final RealMatrix automaticCovarianceMatrix = automaticOptimum.getCovariances(1e-14);
78  
79          final int numParams = analyticalOptimum.getPoint().getDimension();
80  
81          // Check that the automatic solution is within the reference error range.
82          for (int i = 0; i < numParams; i++) {
83              final double error = JdkMath.sqrt(analyticalCovarianceMatrix.getEntry(i, i));
84              Assert.assertEquals("Parameter " + i, analyticalSolution.getEntry(i), automaticSolution.getEntry(i), error);
85          }
86  
87          // Check that each entry of the computed covariance matrix is within 1%
88          // of the reference analytical matrix entry.
89          for (int i = 0; i < numParams; i++) {
90              for (int j = 0; j < numParams; j++) {
91                  Assert.assertEquals("Covariance matrix [" + i + "][" + j + "]",
92                          analyticalCovarianceMatrix.getEntry(i, j),
93                          automaticCovarianceMatrix.getEntry(i, j),
94                          JdkMath.abs(0.01 * analyticalCovarianceMatrix.getEntry(i, j)));
95              }
96          }
97  
98          // Check various measures of goodness-of-fit.
99          final double tol = 1e-40;
100         Assert.assertEquals(analyticalOptimum.getChiSquare(), automaticOptimum.getChiSquare(), tol);
101         Assert.assertEquals(analyticalOptimum.getCost(), automaticOptimum.getCost(), tol);
102         Assert.assertEquals(analyticalOptimum.getRMS(), automaticOptimum.getRMS(), tol);
103         Assert.assertEquals(analyticalOptimum.getReducedChiSquare(automaticOptimum.getPoint().getDimension()), automaticOptimum.getReducedChiSquare(automaticOptimum.getPoint().getDimension()), tol);
104     }
105 
106     /**
107      * Build the problem and return the optimum, doesn't actually test the results.
108      *
109      * Pass in if you want to test using analytical derivatives,
110      * or the automatic {@link DifferentiatorVectorMultivariateJacobianFunction}
111      *
112      * @param automatic automatic {@link DifferentiatorVectorMultivariateJacobianFunction}, as opposed to analytical
113      * @return the optimum for this test
114      */
115     private LeastSquaresOptimizer.Optimum findBevington(boolean automatic) {
116         final double[][] dataPoints = {
117                 // column 1 = times
118                 { 15, 30, 45, 60, 75, 90, 105, 120, 135, 150,
119                         165, 180, 195, 210, 225, 240, 255, 270, 285, 300,
120                         315, 330, 345, 360, 375, 390, 405, 420, 435, 450,
121                         465, 480, 495, 510, 525, 540, 555, 570, 585, 600,
122                         615, 630, 645, 660, 675, 690, 705, 720, 735, 750,
123                         765, 780, 795, 810, 825, 840, 855, 870, 885, },
124                 // column 2 = measured counts
125                 { 775, 479, 380, 302, 185, 157, 137, 119, 110, 89,
126                         74, 61, 66, 68, 48, 54, 51, 46, 55, 29,
127                         28, 37, 49, 26, 35, 29, 31, 24, 25, 35,
128                         24, 30, 26, 28, 21, 18, 20, 27, 17, 17,
129                         14, 17, 24, 11, 22, 17, 12, 10, 13, 16,
130                         9, 9, 14, 21, 17, 13, 12, 18, 10, },
131         };
132         final double[] start = {10, 900, 80, 27, 225};
133 
134         final BevingtonProblem problem = new BevingtonProblem();
135 
136         final int len = dataPoints[0].length;
137         final double[] weights = new double[len];
138         for (int i = 0; i < len; i++) {
139             problem.addPoint(dataPoints[0][i],
140                     dataPoints[1][i]);
141 
142             weights[i] = 1 / dataPoints[1][i];
143         }
144 
145         return optimizer.optimize(
146                 builder(problem, automatic)
147                         .target(dataPoints[1])
148                         .weight(new DiagonalMatrix(weights))
149                         .start(start)
150                         .build()
151         );
152     }
153 }