Skip to main content

Setup

[dependencies]
laminae = { version = "0.3", features = ["openai"] }
tokio = { version = "1", features = ["full"] }

Usage

use laminae::psyche::PsycheEngine;
use laminae::openai::OpenAIBackend;
use laminae::ollama::OllamaClient;

#[tokio::main]
async fn main() -> anyhow::Result<()> {
    let ego = OpenAIBackend::new()?; // reads OPENAI_API_KEY from env
    let engine = PsycheEngine::new(OllamaClient::new(), ego);

    let response = engine.reply("Write a haiku about Rust.").await?;
    println!("{response}");
    Ok(())
}

Compatible Providers

ProviderBase URLModel Example
OpenAIhttps://api.openai.com/v1 (default)gpt-4o
Groqhttps://api.groq.com/openai/v1llama-3.1-70b-versatile
Togetherhttps://api.together.xyz/v1meta-llama/Llama-3-70b
DeepSeekhttps://api.deepseek.com/v1deepseek-chat
Local (Ollama)http://localhost:11434/v1qwen2.5:14b
OPENAI_API_KEY=sk-... cargo run