Skip to main content

Setup

brew install ollama && ollama serve && ollama pull qwen2.5:7b

Usage

use laminae::ollama::OllamaClient;

let client = OllamaClient::new(); // defaults to localhost:11434

let response = client.complete(
    "qwen2.5:7b",
    "You are a helpful assistant.",
    "What is Rust?",
    0.3,    // temperature
    1024,   // max tokens
).await?;

println!("{response}");

Custom Host

let client = OllamaClient::with_host("http://my-ollama-server:11434");
Use CaseModelSize
Psyche Id/Superegoqwen2.5:7b4.7 GB
Shadow LLM Reviewqwen2.5:14b9.0 GB
Persona Extractionqwen2.5:7b4.7 GB
Cortex Analysisqwen2.5:7b4.7 GB