#chat-completion #highly #opinionated #structured #model #inc #kind

kind-openai

Highly opinionated OpenAI API wrapper crate. By Kindness Inc.

25 releases

new 0.3.7 Dec 10, 2024
0.3.6 Dec 10, 2024
0.3.4 Nov 23, 2024
0.2.1 Nov 14, 2024
0.1.13 Aug 30, 2024

#558 in Web programming

Download history 680/week @ 2024-08-19 329/week @ 2024-08-26 174/week @ 2024-09-02 35/week @ 2024-09-09 209/week @ 2024-09-16 162/week @ 2024-09-23 205/week @ 2024-09-30 23/week @ 2024-10-07 27/week @ 2024-10-14 3/week @ 2024-10-28 9/week @ 2024-11-04 215/week @ 2024-11-11 746/week @ 2024-11-18 101/week @ 2024-11-25 22/week @ 2024-12-02

1,085 downloads per month

MIT license

35KB
586 lines

Kind OpenAI

An incomplete and highly opinionated OpenAI API wrapper for Rust.

Featuring:

  • Strongly typed structured chat completions with a derive macro to automatically generate schemas
  • A vastly simplified interface to the API that gives easy access to all common operations
  • Gentler error handling for things like model refusals
  • Friendly-to-construct API types, thanks to bon.

Quickly add OpenAI to your project with:

[dependencies]
kind-openai = "0.3.0"

Links:

Before using, I highly reccoment reading the OpenAISchema derive macro docs

Example

//! Run this example with `OPENAI_API_KEY=`

use kind_openai::{
    endpoints::chat::{ChatCompletion, Model},
    system_message, user_message, EnvironmentAuthTokenProvider, OpenAI, OpenAISchema,
};
use serde::Deserialize;
use serde_repr::Deserialize_repr;

#[derive(Deserialize, OpenAISchema, Debug)]
/// The name.
pub struct Name {
    /// The first name. No matter what, prefix this first name with `Mr. `.
    pub first_name: Option<String>,
    #[serde(rename = "last_name_renamed")]
    pub last_name: Option<String>,
    #[serde(skip)]
    pub absolutely_nothing: String,
}

#[derive(Deserialize, OpenAISchema, Debug)]
/// The niceness score.
pub struct NicenessScoreContainer {
    pub niceness_score: NicenessScore,
    pub category: Category,
}

#[derive(Deserialize_repr, OpenAISchema, Debug)]
#[repr(u8)]
/// How nice the message is between 1 and 10.
pub enum NicenessScore {
    One = 1,
    Two = 2,
    Three = 3,
    Four = 4,
    Five = 5,
    Six = 6,
    Seven = 7,
    Eight = 8,
    Nine = 9,
    Ten = 10,
}

#[derive(Deserialize, OpenAISchema, Debug)]
/// The category of the message that's being inquired about.
pub enum Category {
    Question,
    Statement,
    Answer,
}

#[tokio::main]
async fn main() {
    let client = OpenAI::new(EnvironmentAuthTokenProvider);

    let name = "John";

    let chat_completion = ChatCompletion::model(Model::Gpt4oMini)
        .messages(vec![
            system_message!("Extract the first and last name from the provided message."),
            user_message!("Hello, my name is {name}."),
        ])
        .temperature(0.1)
        .structured::<Name>();

    let name = client
        .req(&chat_completion)
        .await
        .expect("Failed to get response")
        .take_first_choice()
        .expect("No choices")
        .message()
        .expect("Model generated a refusal");

    println!("{:?}", name);

    let niceness_score_message = "Wow, that new shirt you are wearing is really nice.";
    let niceness_chat_completion = ChatCompletion::model(Model::Gpt4oMini)
        .temperature(0.0)
        .messages(vec![
            system_message!("Rate the niceness score of the provided message"),
            user_message!("{niceness_score_message}"),
        ])
        .structured::<NicenessScoreContainer>();

    let niceness_score = client
        .req(&niceness_chat_completion)
        .await
        .expect("Failed to get response")
        .take_first_choice()
        .expect("No choices")
        .message()
        .expect("Model generated a refusal");

    println!("{:?}", niceness_score);

    let niceness_score_message = "What?????? How???";
    let niceness_chat_completion = ChatCompletion::model(Model::Gpt4o)
        .temperature(0.0)
        .messages(vec![
            system_message!("Rate the niceness score of the provided message"),
            user_message!("{niceness_score_message}"),
        ])
        .structured::<NicenessScoreContainer>();

    let niceness_score = client
        .req(&niceness_chat_completion)
        .await
        .expect("Failed to get response")
        .take_first_choice()
        .expect("No choices")
        .message()
        .expect("Model generated a refusal");

    println!("{:?}", niceness_score);
}

Dependencies

~5–6.5MB
~123K SLoC