// Learn more about clients at https://docs.boundaryml.com/docs/snippets/clients/overview client Llama2 { provider "openai-generic" options { // This will try the clients in order until one succeeds base_url "http://localhost:11434/v1" model llama3.2:1b } }