7 unstable releases (3 breaking)

0.4.0 Feb 9, 2025
0.3.3 Aug 18, 2024
0.2.1 Feb 28, 2024
0.1.0 Dec 16, 2023

#1636 in Machine learning

Download history 125/week @ 2024-12-08 68/week @ 2024-12-15 26/week @ 2024-12-22 39/week @ 2024-12-29 152/week @ 2025-01-05 125/week @ 2025-01-12 73/week @ 2025-01-19 41/week @ 2025-01-26 75/week @ 2025-02-02 212/week @ 2025-02-09 58/week @ 2025-02-16 66/week @ 2025-02-23 61/week @ 2025-03-02 85/week @ 2025-03-09 69/week @ 2025-03-16 39/week @ 2025-03-23

266 downloads per month
Used in 3 crates (via kalosm-language)

MIT/Apache

290KB
5.5K SLoC

rbert

A Rust wrapper for bert sentence transformers implemented in Candle

Usage

use kalosm_language_model::Embedder;
use rbert::*;

#[tokio::main]
async fn main() -> anyhow::Result<()> {
    let mut bert = Bert::new().await?;
    let sentences = [
        "Cats are cool",
        "The geopolitical situation is dire",
        "Pets are great",
        "Napoleon was a tyrant",
        "Napoleon was a great general",
    ];
    let embeddings = bert.embed_batch(sentences).await?;
    println!("embeddings {:?}", embeddings);

    // Find the cosine similarity between the first two sentences
    let mut similarities = vec![];
    let n_sentences = sentences.len();
    for (i, e_i) in embeddings.iter().enumerate() {
        for j in (i + 1)..n_sentences {
            let e_j = embeddings.get(j).unwrap();
            let cosine_similarity = e_j.cosine_similarity(e_i);
            similarities.push((cosine_similarity, i, j))
        }
    }
    similarities.sort_by(|u, v| v.0.total_cmp(&u.0));
    for &(score, i, j) in similarities.iter() {
        println!("score: {score:.2} '{}' '{}'", sentences[i], sentences[j])
    }

    Ok(())
}

Dependencies

~32–53MB
~1M SLoC