001/*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements.  See the NOTICE file distributed with
004 * this work for additional information regarding copyright ownership.
005 * The ASF licenses this file to You under the Apache License, Version 2.0
006 * (the "License"); you may not use this file except in compliance with
007 * the License.  You may obtain a copy of the License at
008 *
009 *      http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS,
013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014 * See the License for the specific language governing permissions and
015 * limitations under the License.
016 */
017package org.apache.commons.math3.optim.nonlinear.scalar;
018
019import org.apache.commons.math3.analysis.MultivariateVectorFunction;
020import org.apache.commons.math3.optim.ConvergenceChecker;
021import org.apache.commons.math3.optim.OptimizationData;
022import org.apache.commons.math3.optim.PointValuePair;
023import org.apache.commons.math3.exception.TooManyEvaluationsException;
024
025/**
026 * Base class for implementing optimizers for multivariate scalar
027 * differentiable functions.
028 * It contains boiler-plate code for dealing with gradient evaluation.
029 *
030 * @since 3.1
031 */
032public abstract class GradientMultivariateOptimizer
033    extends MultivariateOptimizer {
034    /**
035     * Gradient of the objective function.
036     */
037    private MultivariateVectorFunction gradient;
038
039    /**
040     * @param checker Convergence checker.
041     */
042    protected GradientMultivariateOptimizer(ConvergenceChecker<PointValuePair> checker) {
043        super(checker);
044    }
045
046    /**
047     * Compute the gradient vector.
048     *
049     * @param params Point at which the gradient must be evaluated.
050     * @return the gradient at the specified point.
051     */
052    protected double[] computeObjectiveGradient(final double[] params) {
053        return gradient.value(params);
054    }
055
056    /**
057     * {@inheritDoc}
058     *
059     * @param optData Optimization data. In addition to those documented in
060     * {@link MultivariateOptimizer#parseOptimizationData(OptimizationData[])
061     * MultivariateOptimizer}, this method will register the following data:
062     * <ul>
063     *  <li>{@link ObjectiveFunctionGradient}</li>
064     * </ul>
065     * @return {@inheritDoc}
066     * @throws TooManyEvaluationsException if the maximal number of
067     * evaluations (of the objective function) is exceeded.
068     */
069    @Override
070    public PointValuePair optimize(OptimizationData... optData)
071        throws TooManyEvaluationsException {
072        // Set up base class and perform computation.
073        return super.optimize(optData);
074    }
075
076    /**
077     * Scans the list of (required and optional) optimization data that
078     * characterize the problem.
079     *
080     * @param optData Optimization data.
081     * The following data will be looked for:
082     * <ul>
083     *  <li>{@link ObjectiveFunctionGradient}</li>
084     * </ul>
085     */
086    @Override
087    protected void parseOptimizationData(OptimizationData... optData) {
088        // Allow base class to register its own data.
089        super.parseOptimizationData(optData);
090
091        // The existing values (as set by the previous call) are reused if
092        // not provided in the argument list.
093        for (OptimizationData data : optData) {
094            if  (data instanceof ObjectiveFunctionGradient) {
095                gradient = ((ObjectiveFunctionGradient) data).getObjectiveFunctionGradient();
096                // If more data must be parsed, this statement _must_ be
097                // changed to "continue".
098                break;
099            }
100        }
101    }
102}