4 releases
0.0.5 | Aug 27, 2023 |
---|---|
0.0.4 | Aug 27, 2023 |
0.0.3 | Aug 26, 2023 |
0.0.2 | Aug 25, 2023 |
0.0.1 |
|
#766 in Machine learning
Used in lemon-llm
89KB
1.5K
SLoC
Replicate Rust client
An Unofficial Rust client for Replicate
Documentation · Crate · Report Bug · Request Feature
An Unofficial Rust client for Replicate. Provides a type-safe interface by deserializing API responses into Rust structs.
Getting Started
Add replicate_rust
to Cargo.toml
:
[dependencies]
replicate-rust = "0.0.5"
Grab your token from replicate.com/account and set it as an environment variable:
export REPLICATE_API_TOKEN=<your token>
Here's an example using replicate_rust
to run a model:
use replicate_rust::{config::Config, Replicate, errors::ReplicateError};
fn main() -> Result<(), ReplicateError> {
let config = Config::default();
// Instead of using the default config ( which reads API token from env variable), you can also set the token directly:
// let config = Config {
// auth: String::from("REPLICATE_API_TOKEN"),
// ..Default::default()
// };
let replicate = Replicate::new(config);
// Construct the inputs.
let mut inputs = std::collections::HashMap::new();
inputs.insert("prompt", "a 19th century portrait of a wombat gentleman");
let version = "stability-ai/stable-diffusion:27b93a2413e7f36cd83da926f3656280b2931564ff050bf9575f1fdf9bcd7478";
// Run the model.
let result = replicate.run(version, inputs)?;
// Print the result.
println!("{:?}", result.output);
// Some(Array [String("https://pbxt.replicate.delivery/QLDGe2rXuIQ9ByMViQEXrYCkKfDi9I3YWAzPwWsDZWMXeN7iA/out-0.png")])```
Ok(())
}
Usage
See the reference docs for detailed API documentation.
Examples
-
Run a model in the background:
// Construct the inputs. let mut inputs = std::collections::HashMap::new(); inputs.insert("prompt", "a 19th century portrait of a wombat gentleman"); let version = "stability-ai/stable-diffusion:27b93a2413e7f36cd83da926f3656280b2931564ff050bf9575f1fdf9bcd7478"; // Run the model. let mut prediction = replicate.predictions.create(version, inputs)?; println!("{:?}", prediction.status); // 'starting' prediction.reload()?; println!("{:?}", prediction.status); // 'processing' println!("{:?}", prediction.logs); // Some("Using seed: 3599 // 0%| | 0/50 [00:00<?, ?it/s] // 4%|▍ | 2/50 [00:00<00:04, 10.00it/s] // 8%|▊ | 4/50 [00:00<00:03, 11.56it/s] let prediction = prediction.wait()?; println!("{:?}", prediction.status); // 'succeeded' println!("{:?}", prediction.output);
-
Cancel a prediction:
// Construct the inputs. let mut inputs = std::collections::HashMap::new(); inputs.insert("prompt", "a 19th century portrait of a wombat gentleman"); let version = "stability-ai/stable-diffusion:27b93a2413e7f36cd83da926f3656280b2931564ff050bf9575f1fdf9bcd7478"; // Run the model. let mut prediction = replicate.predictions.create(version, inputs)?; println!("{:?}", prediction.status); // 'starting' prediction.cancel()?; prediction.reload()?; println!("{:?}", prediction.status); // 'cancelled'
-
List predictions:
let predictions = replicate.predictions.list()?; println!("{:?}", predictions); // ListPredictions { ... }
-
Get model Information:
let model = replicate.models.get("replicate", "hello-world")?; println!("{:?}", model); // GetModel { ... }
-
Get Versions List:
let versions = replicate.models.versions.list("replicate", "hello-world")?; println!("{:?}", versions); // ListModelVersions { ... }
-
Get Model Version Information:
let model = replicate.models.versions.get("kvfrans", "clipdraw", "5797a99edc939ea0e9242d5e8c9cb3bc7d125b1eac21bda852e5cb79ede2cd9b",)?; println!("{:?}", model); // GetModelVersion { ... }
-
Get Collection Information:
let collection = replicate.collections.get("audio-generation")?; println!("{:?}", collection); // GetCollectionModels { ... }//! ```
-
Get Collection Lists:
let collections = replicate.collections.list()?; println!("{:?}", collections); // ListCollectionModels { ... }
Dependencies
~4–20MB
~223K SLoC