001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements.  See the NOTICE file distributed with
004 * this work for additional information regarding copyright ownership.
005 * The ASF licenses this file to You under the Apache License, Version 2.0
006 * (the "License"); you may not use this file except in compliance with
007 * the License.  You may obtain a copy of the License at
008 *
009 *      http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS,
013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014 * See the License for the specific language governing permissions and
015 * limitations under the License.
016 */
017package org.apache.commons.math4.optim.nonlinear.scalar.noderiv;
018
019import java.util.Comparator;
020
021import org.apache.commons.math4.analysis.MultivariateFunction;
022import org.apache.commons.math4.exception.MathUnsupportedOperationException;
023import org.apache.commons.math4.exception.NullArgumentException;
024import org.apache.commons.math4.exception.util.LocalizedFormats;
025import org.apache.commons.math4.optim.ConvergenceChecker;
026import org.apache.commons.math4.optim.OptimizationData;
027import org.apache.commons.math4.optim.PointValuePair;
028import org.apache.commons.math4.optim.SimpleValueChecker;
029import org.apache.commons.math4.optim.nonlinear.scalar.GoalType;
030import org.apache.commons.math4.optim.nonlinear.scalar.MultivariateOptimizer;
031
032/**
033 * This class implements simplex-based direct search optimization.
034 *
035 * <p>
036 *  Direct search methods only use objective function values, they do
037 *  not need derivatives and don't either try to compute approximation
038 *  of the derivatives. According to a 1996 paper by Margaret H. Wright
039 *  (<a href="http://cm.bell-labs.com/cm/cs/doc/96/4-02.ps.gz">Direct
040 *  Search Methods: Once Scorned, Now Respectable</a>), they are used
041 *  when either the computation of the derivative is impossible (noisy
042 *  functions, unpredictable discontinuities) or difficult (complexity,
043 *  computation cost). In the first cases, rather than an optimum, a
044 *  <em>not too bad</em> point is desired. In the latter cases, an
045 *  optimum is desired but cannot be reasonably found. In all cases
046 *  direct search methods can be useful.
047 * </p>
048 * <p>
049 *  Simplex-based direct search methods are based on comparison of
050 *  the objective function values at the vertices of a simplex (which is a
051 *  set of n+1 points in dimension n) that is updated by the algorithms
052 *  steps.
053 * </p>
054 * <p>
055 *  The simplex update procedure ({@link NelderMeadSimplex} or
056 * {@link MultiDirectionalSimplex})  must be passed to the
057 * {@code optimize} method.
058 * </p>
059 * <p>
060 *  Each call to {@code optimize} will re-use the start configuration of
061 *  the current simplex and move it such that its first vertex is at the
062 *  provided start point of the optimization.
063 *  If the {@code optimize} method is called to solve a different problem
064 *  and the number of parameters change, the simplex must be re-initialized
065 *  to one with the appropriate dimensions.
066 * </p>
067 * <p>
068 *  Convergence is checked by providing the <em>worst</em> points of
069 *  previous and current simplex to the convergence checker, not the best
070 *  ones.
071 * </p>
072 * <p>
073 *  This simplex optimizer implementation does not directly support constrained
074 *  optimization with simple bounds; so, for such optimizations, either a more
075 *  dedicated algorithm must be used like
076 *  {@link CMAESOptimizer} or {@link BOBYQAOptimizer}, or the objective
077 *  function must be wrapped in an adapter like
078 *  {@link org.apache.commons.math4.optim.nonlinear.scalar.MultivariateFunctionMappingAdapter
079 *  MultivariateFunctionMappingAdapter} or
080 *  {@link org.apache.commons.math4.optim.nonlinear.scalar.MultivariateFunctionPenaltyAdapter
081 *  MultivariateFunctionPenaltyAdapter}.
082 *  <br>
083 *  The call to {@link #optimize(OptimizationData[]) optimize} will throw
084 *  {@link MathUnsupportedOperationException} if bounds are passed to it.
085 * </p>
086 *
087 * @since 3.0
088 */
089public class SimplexOptimizer extends MultivariateOptimizer {
090    /** Simplex update rule. */
091    private AbstractSimplex simplex;
092
093    /**
094     * @param checker Convergence checker.
095     */
096    public SimplexOptimizer(ConvergenceChecker<PointValuePair> checker) {
097        super(checker);
098    }
099
100    /**
101     * @param rel Relative threshold.
102     * @param abs Absolute threshold.
103     */
104    public SimplexOptimizer(double rel, double abs) {
105        this(new SimpleValueChecker(rel, abs));
106    }
107
108    /**
109     * {@inheritDoc}
110     *
111     * @param optData Optimization data. In addition to those documented in
112     * {@link MultivariateOptimizer#parseOptimizationData(OptimizationData[])
113     * MultivariateOptimizer}, this method will register the following data:
114     * <ul>
115     *  <li>{@link AbstractSimplex}</li>
116     * </ul>
117     * @return {@inheritDoc}
118     */
119    @Override
120    public PointValuePair optimize(OptimizationData... optData) {
121        // Set up base class and perform computation.
122        return super.optimize(optData);
123    }
124
125    /** {@inheritDoc} */
126    @Override
127    protected PointValuePair doOptimize() {
128        checkParameters();
129
130        // Indirect call to "computeObjectiveValue" in order to update the
131        // evaluations counter.
132        final MultivariateFunction evalFunc
133            = new MultivariateFunction() {
134                /** {@inheritDoc} */
135                @Override
136                public double value(double[] point) {
137                    return computeObjectiveValue(point);
138                }
139            };
140
141        final boolean isMinim = getGoalType() == GoalType.MINIMIZE;
142        final Comparator<PointValuePair> comparator
143            = new Comparator<PointValuePair>() {
144            /** {@inheritDoc} */
145            @Override
146            public int compare(final PointValuePair o1,
147                               final PointValuePair o2) {
148                final double v1 = o1.getValue();
149                final double v2 = o2.getValue();
150                return isMinim ? Double.compare(v1, v2) : Double.compare(v2, v1);
151            }
152        };
153
154        // Initialize search.
155        simplex.build(getStartPoint());
156        simplex.evaluate(evalFunc, comparator);
157
158        PointValuePair[] previous = null;
159        int iteration = 0;
160        final ConvergenceChecker<PointValuePair> checker = getConvergenceChecker();
161        while (true) {
162            iteration = getIterations();
163            if (iteration > 0) {
164                boolean converged = true;
165                for (int i = 0; i < simplex.getSize(); i++) {
166                    PointValuePair prev = previous[i];
167                    converged = converged &&
168                        checker.converged(iteration, prev, simplex.getPoint(i));
169
170                    if (!converged) {
171                        // Short circuit, since "converged" will stay "false".
172                        break;
173                    }
174                }
175                if (converged) {
176                    // We have found an optimum.
177                    return simplex.getPoint(0);
178                }
179            }
180
181            // We still need to search.
182            previous = simplex.getPoints();
183            simplex.iterate(evalFunc, comparator);
184
185            incrementIterationCount();
186        }
187    }
188
189    /**
190     * Scans the list of (required and optional) optimization data that
191     * characterize the problem.
192     *
193     * @param optData Optimization data.
194     * The following data will be looked for:
195     * <ul>
196     *  <li>{@link AbstractSimplex}</li>
197     * </ul>
198     */
199    @Override
200    protected void parseOptimizationData(OptimizationData... optData) {
201        // Allow base class to register its own data.
202        super.parseOptimizationData(optData);
203
204        // The existing values (as set by the previous call) are reused if
205        // not provided in the argument list.
206        for (OptimizationData data : optData) {
207            if (data instanceof AbstractSimplex) {
208                simplex = (AbstractSimplex) data;
209                // If more data must be parsed, this statement _must_ be
210                // changed to "continue".
211                break;
212            }
213        }
214    }
215
216    /**
217     * @throws MathUnsupportedOperationException if bounds were passed to the
218     * {@link #optimize(OptimizationData[]) optimize} method.
219     * @throws NullArgumentException if no initial simplex was passed to the
220     * {@link #optimize(OptimizationData[]) optimize} method.
221     */
222    private void checkParameters() {
223        if (simplex == null) {
224            throw new NullArgumentException();
225        }
226        if (getLowerBound() != null ||
227            getUpperBound() != null) {
228            throw new MathUnsupportedOperationException(LocalizedFormats.CONSTRAINT);
229        }
230    }
231}