mirror of https://github.com/0xplaygrounds/rig
22 lines
611 B
Rust
22 lines
611 B
Rust
use rig::providers::ollama;
|
|
use rig::streaming::{stream_to_stdout, StreamingPrompt};
|
|
|
|
#[tokio::main]
|
|
async fn main() -> Result<(), anyhow::Error> {
|
|
// Create streaming agent with a single context prompt
|
|
let agent = ollama::Client::new()
|
|
.agent("llama3.2")
|
|
.preamble("Be precise and concise.")
|
|
.temperature(0.5)
|
|
.build();
|
|
|
|
// Stream the response and print chunks as they arrive
|
|
let mut stream = agent
|
|
.stream_prompt("When and where and what type is the next solar eclipse?")
|
|
.await?;
|
|
|
|
stream_to_stdout(agent, &mut stream).await?;
|
|
|
|
Ok(())
|
|
}
|