30 stable releases
Uses new Rust 2024
new 3.7.9 | May 6, 2025 |
---|---|
3.6.8 | May 5, 2025 |
3.6.7 | Apr 26, 2025 |
2.6.4 | Apr 21, 2025 |
0.1.1 | Apr 8, 2025 |
#559 in Web programming
2,732 downloads per month
60KB
830 lines
Overview
A Rust library to use Google's Gemini API. It is extremely flexible and modular to integrate with any framework.
For example, since Actix supports stream of Result<Bytes, Error>
for response streaming, you can get it directly instead of making a wrapper stream around a response stream which is a pain.
Features
- Automatic context management
- Vision to see images
- Code execution by Gemini
- File reading like PDF or any document, even audio files like MP3
- Function call support
- Inbuilt markdown to parts parser enables AI to see markdown images or files, even if they are from your device storage!
Basic usage
use gemini_client_api::gemini::{
ask::Gemini,
types::request::{SystemInstruction, Tool},
types::sessions::Session,
utils::MarkdownToParts,
};
use futures::StreamExt;
use serde_json::json;
async fn see_markdown() {
let mut session = Session::new(6);
let ai = Gemini::new(
std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY not found"),
"gemini-2.0-flash",
None,
);
let response1 = ai.ask(session.ask_string("Hi, can you tell me which one of two bowls has more healty item?")).await.unwrap();
println!("{}", response1.get_text("")); //Question and reply both automatically gets stored in `session` for context.
let parser = MarkdownToParts::new("Here is their . Thanks by the way", |_|"image/png".to_string()).await;
//Can even read from file path of files on your device!
let parts = parser.process();
let response2 = ai.ask(session.ask(parts))
.await
.unwrap();
println!("{}", response2.get_text(""));
}
async fn ask_string_for_json() {
let mut session = Session::new(6);
session.set_remember_reply(false);
let response = Gemini::new(
std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY not found"),
"gemini-2.0-flash-lite",
Some(SystemInstruction::from_str("Calssify the given words")),
)
.set_json_mode(json!({
"type": "object",
"properties": {
"positive":{
"type":"array",
"items":{"type":"string"}
},
"negative":{
"type":"array",
"items":{"type":"string"}
}
}
}))
.ask(session.ask_string(r#"["Joy", "Success", "Love", "Hope", "Confidence", "Peace", "Victory", "Harmony", "Inspiration", "Gratitude", "Prosperity", "Strength", "Freedom", "Comfort", "Brilliance" "Fear", "Failure", "Hate", "Doubt", "Pain", "Suffering", "Loss", "Anxiety", "Despair", "Betrayal", "Weakness", "Chaos", "Misery", "Frustration", "Darkness"]"#))
.await
.unwrap();
println!("{}", response.get_text(""));
}
async fn ask_streamed() {
let mut session = Session::new(6);
session.ask_string("How are you");
let ai = Gemini::new(
std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY not found"),
"gemini-2.5-pro-exp-03-25",
None,
);
let mut response_stream = ai.ask_as_stream(session, |_, gemini_response| gemini_response).await.unwrap();
while let Some(response) = response_stream.next().await {
println!("{}", response.unwrap().get_text(""));
}
session = response_stream.get_session_owned();
println!("Complete reply: {}", session.get_last_message_text("").unwrap());
}
async fn ask_streamed_with_tools() {
let mut session = Session::new(6);
session.ask_string("find sum of first 100 prime number using code");
let mut ai = Gemini::new(
std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY not found"),
"gemini-2.0-flash",
None,
);
ai.set_tools(Some(vec![Tool::code_execution(json!({}))]));
let mut response_stream = ai.ask_as_stream(session, |_, gemini_response| gemini_response).await.unwrap();
while let Some(response) = response_stream.next().await {
if let Ok(response) = response {
println!("{}", response.get_text(""));
}
}
println!(
"Complete reply: {:#?}",
json!(response_stream.get_session().get_last_message().unwrap())
);
}
Dependencies
~8–21MB
~282K SLoC