pub fn gradient<T, F, const N: usize>(f: F, point: [T; N]) -> (T, [T; N])Expand description
Compute the gradient of a scalar multivariable function in a single forward pass.
Given a function f: ℝⁿ → ℝ and a point in ℝⁿ, computes both the
function value and its gradient ∇f = [∂f/∂x₁, …, ∂f/∂xₙ] at that
point.
This is the primary high-level API for computing gradients with MultiDual. It automatically seeds the input variables and evaluates the function once.
§Type Parameters
T: The numeric type (typicallyf64orf32)F: A function that takes NMultiDualinputs and returns aMultiDualoutputN: The number of input variables (compile-time constant)
§Arguments
f: The function to differentiatepoint: The point at which to evaluate the gradient
§Returns
A tuple (value, gradient) where:
value: The function value f(point)gradient: The gradient ∇f evaluated at point
§Examples
§Quadratic Function
use autodiff::{MultiDual, gradient};
// f(x, y) = x² + 2xy + y² at (3, 4)
let f = |vars: [MultiDual<f64, 2>; 2]| {
let [x, y] = vars;
let two = MultiDual::constant(2.0);
x * x + two * x * y + y * y
};
let point = [3.0, 4.0];
let (value, grad) = gradient(f, point);
assert_eq!(value, 49.0); // f(3, 4) = 9 + 24 + 16
assert_eq!(grad[0], 14.0); // ∂f/∂x = 2x + 2y = 14
assert_eq!(grad[1], 14.0); // ∂f/∂y = 2x + 2y = 14§With Transcendental Functions
use autodiff::{MultiDual, gradient};
// f(x, y, z) = x² + y·exp(z) at (1, 2, 0)
let f = |vars: [MultiDual<f64, 3>; 3]| {
let [x, y, z] = vars;
x * x + y * z.exp()
};
let point = [1.0, 2.0, 0.0];
let (value, grad) = gradient(f, point);
assert_eq!(value, 3.0); // 1 + 2·1 = 3
assert_eq!(grad[0], 2.0); // ∂f/∂x = 2x = 2
assert_eq!(grad[1], 1.0); // ∂f/∂y = exp(z) = 1
assert_eq!(grad[2], 2.0); // ∂f/∂z = y·exp(z) = 2§Rosenbrock Function (optimization benchmark)
use autodiff::{MultiDual, gradient};
// Rosenbrock: f(x, y) = (1-x)² + 100(y-x²)²
let rosenbrock = |vars: [MultiDual<f64, 2>; 2]| {
let [x, y] = vars;
let one = MultiDual::constant(1.0);
let hundred = MultiDual::constant(100.0);
let term1 = one - x;
let term2 = y - x * x;
term1 * term1 + hundred * term2 * term2
};
let point = [1.0, 1.0]; // Global minimum
let (value, grad) = gradient(rosenbrock, point);
assert_eq!(value, 0.0); // Minimum value is 0
assert_eq!(grad[0], 0.0); // Gradient is zero at minimum
assert_eq!(grad[1], 0.0);