Example 2: Skipping A Gradient Component

This example uses the same data as in the One-Sided Differences example. Here we assume that the second component of the gradient is analytically known. Therefore only the first gradient component needs numerical approximation. The input values of array options specify that numerical differentiation with respect to y_2 is skipped.
import com.imsl.math.*;
import java.text.*;

public class NumericalDerivativesEx2 {
    static int m = 1, n = 2;
    static double a, b, c;
    
    
    public static void main(String args[]) {
        int[] options = new int[n];
        double u;
        double[] y = new double[n];
        double[] valueF = new double[m];
        double[] scale = new double[n];
        double[][] actual = new double[m][n];
        double[] re = new double[2];
    
        // Define data and point of evaluation:
        a = 2.5e6;
        b = 3.4e0;
        c = 4.5e0;
        y[0] = 2.1e0;
        y[1] = 3.2e0;
        
        // Precision, for measuring errors
        u = Math.sqrt(2.220446049250313e-016);
        
        // Set scaling:
        scale[0] = 1.e0;
        // Increase scale to account for large value of a.
        scale[1] = 8.e3;
        
        // compute true values of partials.
        actual[0][0] = a * b * Math.exp(b * y[0]) + c * y[1] * y[1];
        actual[0][1] = 2 * c * y[0] * y[1];
        
        options[0] = NumericalDerivatives.ONE_SIDED;
        options[1] = NumericalDerivatives.SKIP;
        
        valueF[0] = a * Math.exp(b * y[0]) + c * y[0] * y[1] * y[1];
        
        NumericalDerivatives.Jacobian fcn = new NumericalDerivatives.Jacobian() {
            public double[] f(int varIndex, double[] y) {
                double[] tmp = new double[m];    
                tmp[0] = a * Math.exp(b * y[0]) + c * y[0] * y[1] * y[1]; 
                return tmp;
            }
            
            public double[][] jacobian(double[] y) {
                double[][] tmp = new double[m][n];
                
                // The second component partial is skipped,
                // since it is known analytically
                tmp[0][1] = 2.e0 * c * y[0] * y[1];
                
                return tmp;
            }
        };
        
        NumericalDerivatives derv = new NumericalDerivatives(fcn);
        derv.setDifferencingMethods(options);
        derv.setScalingFactors(scale);
        derv.setInitialF(valueF);
        double[][] jacobian = derv.evaluateJ(y);
        
        NumberFormat nf = NumberFormat.getInstance();
        nf.setMaximumFractionDigits(2);
        nf.setMinimumFractionDigits(2);
        
        PrintMatrixFormat pmf = new PrintMatrixFormat();
        pmf.setNumberFormat(nf);
        new PrintMatrix("Numerical gradient:").print(pmf, jacobian);
        new PrintMatrix("Analytic gradient:").print(pmf, actual);

        jacobian[0][0] = (jacobian[0][0] - actual[0][0]) / actual[0][0];
        jacobian[0][1] = (jacobian[0][1] - actual[0][1]) / actual[0][1];
        re[0] = jacobian[0][0];
        re[1] = jacobian[0][1];
        
        System.out.println("Relative accuracy:");
        System.out.println("df/dy_1       df/dy_2");
        System.out.printf(" %.2fu        %.2fu\n", re[0]/u, re[1]/u);
        System.out.printf("(%.3e)  (%.3e)\n", re[0], re[1]);
    }
}

Output

     Numerical gradient:
           0            1    
0  10,722,141,696.00  60.48  

     Analytic gradient:
           0            1    
0  10,722,141,353.42  60.48  

Relative accuracy:
df/dy_1       df/dy_2
 2.14u        0.00u
(3.195e-08)  (0.000e+00)
Link to Java source.