001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements.  See the NOTICE file distributed with
004 * this work for additional information regarding copyright ownership.
005 * The ASF licenses this file to You under the Apache License, Version 2.0
006 * (the "License"); you may not use this file except in compliance with
007 * the License.  You may obtain a copy of the License at
008 *
009 *      http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS,
013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014 * See the License for the specific language governing permissions and
015 * limitations under the License.
016 */
017
018package org.apache.commons.math3.optimization.direct;
019
020import java.util.Comparator;
021
022import org.apache.commons.math3.analysis.MultivariateFunction;
023import org.apache.commons.math3.exception.NullArgumentException;
024import org.apache.commons.math3.optimization.GoalType;
025import org.apache.commons.math3.optimization.ConvergenceChecker;
026import org.apache.commons.math3.optimization.PointValuePair;
027import org.apache.commons.math3.optimization.SimpleValueChecker;
028import org.apache.commons.math3.optimization.MultivariateOptimizer;
029import org.apache.commons.math3.optimization.OptimizationData;
030
031/**
032 * This class implements simplex-based direct search optimization.
033 *
034 * <p>
035 *  Direct search methods only use objective function values, they do
036 *  not need derivatives and don't either try to compute approximation
037 *  of the derivatives. According to a 1996 paper by Margaret H. Wright
038 *  (<a href="http://cm.bell-labs.com/cm/cs/doc/96/4-02.ps.gz">Direct
039 *  Search Methods: Once Scorned, Now Respectable</a>), they are used
040 *  when either the computation of the derivative is impossible (noisy
041 *  functions, unpredictable discontinuities) or difficult (complexity,
042 *  computation cost). In the first cases, rather than an optimum, a
043 *  <em>not too bad</em> point is desired. In the latter cases, an
044 *  optimum is desired but cannot be reasonably found. In all cases
045 *  direct search methods can be useful.
046 * </p>
047 * <p>
048 *  Simplex-based direct search methods are based on comparison of
049 *  the objective function values at the vertices of a simplex (which is a
050 *  set of n+1 points in dimension n) that is updated by the algorithms
051 *  steps.
052 * <p>
053 * <p>
054 *  The {@link #setSimplex(AbstractSimplex) setSimplex} method <em>must</em>
055 *  be called prior to calling the {@code optimize} method.
056 * </p>
057 * <p>
058 *  Each call to {@link #optimize(int,MultivariateFunction,GoalType,double[])
059 *  optimize} will re-use the start configuration of the current simplex and
060 *  move it such that its first vertex is at the provided start point of the
061 *  optimization. If the {@code optimize} method is called to solve a different
062 *  problem and the number of parameters change, the simplex must be
063 *  re-initialized to one with the appropriate dimensions.
064 * </p>
065 * <p>
066 *  Convergence is checked by providing the <em>worst</em> points of
067 *  previous and current simplex to the convergence checker, not the best
068 *  ones.
069 * </p>
070 * <p>
071 * This simplex optimizer implementation does not directly support constrained
072 * optimization with simple bounds, so for such optimizations, either a more
073 * dedicated method must be used like {@link CMAESOptimizer} or {@link
074 * BOBYQAOptimizer}, or the optimized method must be wrapped in an adapter like
075 * {@link MultivariateFunctionMappingAdapter} or {@link
076 * MultivariateFunctionPenaltyAdapter}.
077 * </p>
078 *
079 * @see AbstractSimplex
080 * @see MultivariateFunctionMappingAdapter
081 * @see MultivariateFunctionPenaltyAdapter
082 * @see CMAESOptimizer
083 * @see BOBYQAOptimizer
084 * @deprecated As of 3.1 (to be removed in 4.0).
085 * @since 3.0
086 */
087@SuppressWarnings("boxing") // deprecated anyway
088@Deprecated
089public class SimplexOptimizer
090    extends BaseAbstractMultivariateOptimizer<MultivariateFunction>
091    implements MultivariateOptimizer {
092    /** Simplex. */
093    private AbstractSimplex simplex;
094
095    /**
096     * Constructor using a default {@link SimpleValueChecker convergence
097     * checker}.
098     * @deprecated See {@link SimpleValueChecker#SimpleValueChecker()}
099     */
100    @Deprecated
101    public SimplexOptimizer() {
102        this(new SimpleValueChecker());
103    }
104
105    /**
106     * @param checker Convergence checker.
107     */
108    public SimplexOptimizer(ConvergenceChecker<PointValuePair> checker) {
109        super(checker);
110    }
111
112    /**
113     * @param rel Relative threshold.
114     * @param abs Absolute threshold.
115     */
116    public SimplexOptimizer(double rel, double abs) {
117        this(new SimpleValueChecker(rel, abs));
118    }
119
120    /**
121     * Set the simplex algorithm.
122     *
123     * @param simplex Simplex.
124     * @deprecated As of 3.1. The initial simplex can now be passed as an
125     * argument of the {@link #optimize(int,MultivariateFunction,GoalType,OptimizationData[])}
126     * method.
127     */
128    @Deprecated
129    public void setSimplex(AbstractSimplex simplex) {
130        parseOptimizationData(simplex);
131    }
132
133    /**
134     * Optimize an objective function.
135     *
136     * @param maxEval Allowed number of evaluations of the objective function.
137     * @param f Objective function.
138     * @param goalType Optimization type.
139     * @param optData Optimization data. The following data will be looked for:
140     * <ul>
141     *  <li>{@link org.apache.commons.math3.optimization.InitialGuess InitialGuess}</li>
142     *  <li>{@link AbstractSimplex}</li>
143     * </ul>
144     * @return the point/value pair giving the optimal value for objective
145     * function.
146     */
147    @Override
148    protected PointValuePair optimizeInternal(int maxEval, MultivariateFunction f,
149                                              GoalType goalType,
150                                              OptimizationData... optData) {
151        // Scan "optData" for the input specific to this optimizer.
152        parseOptimizationData(optData);
153
154        // The parent's method will retrieve the common parameters from
155        // "optData" and call "doOptimize".
156        return super.optimizeInternal(maxEval, f, goalType, optData);
157    }
158
159    /**
160     * Scans the list of (required and optional) optimization data that
161     * characterize the problem.
162     *
163     * @param optData Optimization data. The following data will be looked for:
164     * <ul>
165     *  <li>{@link AbstractSimplex}</li>
166     * </ul>
167     */
168    private void parseOptimizationData(OptimizationData... optData) {
169        // The existing values (as set by the previous call) are reused if
170        // not provided in the argument list.
171        for (OptimizationData data : optData) {
172            if (data instanceof AbstractSimplex) {
173                simplex = (AbstractSimplex) data;
174                continue;
175            }
176        }
177    }
178
179    /** {@inheritDoc} */
180    @Override
181    protected PointValuePair doOptimize() {
182        if (simplex == null) {
183            throw new NullArgumentException();
184        }
185
186        // Indirect call to "computeObjectiveValue" in order to update the
187        // evaluations counter.
188        final MultivariateFunction evalFunc
189            = new MultivariateFunction() {
190                /** {@inheritDoc} */
191                public double value(double[] point) {
192                    return computeObjectiveValue(point);
193                }
194            };
195
196        final boolean isMinim = getGoalType() == GoalType.MINIMIZE;
197        final Comparator<PointValuePair> comparator
198            = new Comparator<PointValuePair>() {
199            /** {@inheritDoc} */
200            public int compare(final PointValuePair o1,
201                               final PointValuePair o2) {
202                final double v1 = o1.getValue();
203                final double v2 = o2.getValue();
204                return isMinim ? Double.compare(v1, v2) : Double.compare(v2, v1);
205            }
206        };
207
208        // Initialize search.
209        simplex.build(getStartPoint());
210        simplex.evaluate(evalFunc, comparator);
211
212        PointValuePair[] previous = null;
213        int iteration = 0;
214        final ConvergenceChecker<PointValuePair> checker = getConvergenceChecker();
215        while (true) {
216            if (iteration > 0) {
217                boolean converged = true;
218                for (int i = 0; i < simplex.getSize(); i++) {
219                    PointValuePair prev = previous[i];
220                    converged = converged &&
221                        checker.converged(iteration, prev, simplex.getPoint(i));
222                }
223                if (converged) {
224                    // We have found an optimum.
225                    return simplex.getPoint(0);
226                }
227            }
228
229            // We still need to search.
230            previous = simplex.getPoints();
231            simplex.iterate(evalFunc, comparator);
232            ++iteration;
233        }
234    }
235}