16 releases

new 0.0.17 Apr 16, 2024
0.0.16 Apr 16, 2024

#83 in Machine learning

Download history 208/week @ 2024-04-08 1380/week @ 2024-04-15

1,588 downloads per month

Custom license

50KB
1K SLoC

hyperopt

Tree-of-Parzen-estimators hyperparameter optimization for Rust

Documentation Check status Code coverage Maintenance

Examples

Continuous

use std::f64::consts::{FRAC_PI_2, PI};

use approx::assert_abs_diff_eq;
use fastrand::Rng;
use ordered_float::NotNan;

use hyperopt::Optimizer;
use hyperopt::kernel::continuous::Epanechnikov;
use hyperopt::kernel::universal::Uniform;

fn main() {
    let min = NotNan::new(FRAC_PI_2).unwrap();
    let max = NotNan::new(PI + FRAC_PI_2).unwrap();
    let mut optimizer = Optimizer::new(
        min..=max,                       // parameter search limits
        Uniform::with_bounds(min..=max), // our initial guess is just as bad
        Rng::with_seed(42),
    );

    // Run 50 trials for the cosine function and try to find the point `(π, -1)`:
    for _ in 0..50 {
        // Generate new trials using Epanechnikov kernel with `<NotNan<f64>>`
        // as both parameter and density:
        let x = optimizer.new_trial::<Epanechnikov<NotNan<f64>>>();
        
        // Tell the optimizer the result of evaluation:
        optimizer.feed_back(x, NotNan::new(x.cos()).unwrap());
    }

    let best_trial = optimizer.best_trial().unwrap();
    assert_abs_diff_eq!(best_trial.parameter.into_inner(), PI, epsilon = 0.05);
    assert_abs_diff_eq!(best_trial.metric.into_inner(), -1.0, epsilon = 0.01);
}

Discrete

use fastrand::Rng;
use ordered_float::OrderedFloat;

use hyperopt::Optimizer;
use hyperopt::kernel::discrete::Binomial;
use hyperopt::kernel::universal::Uniform;

fn main() {
    let mut optimizer = Optimizer::new(
        -100..=100,
        Uniform::with_bounds(-100..=100),
        Rng::with_seed(42),
    );

    for _ in 0..30 {
        // Use the binomial kernel for `i32` as parameter
        // and `OrderedFloat<f64>` as density:
        let x = optimizer.new_trial::<Binomial<i32, OrderedFloat<f64>>>();
        
        // Optimize the parabola: https://www.wolframalpha.com/input?i=x%5E2+-+4x
        optimizer.feed_back(x, x * x - 4 * x);
    }

    let best_trial = optimizer.best_trial().unwrap();
    assert_eq!(best_trial.parameter, 2);
    assert_eq!(best_trial.metric, -4);
}

Features

  • ordered-float enables support for OrderedFloat and NotNan types

Dependencies

~205KB