A teeny-tiny package to prompt for answers in Ollama, Perplexity, vllm and other OpenAI-compatible API servers.
client := perplexity.New(
perplexity.Defaults(prompts.WithApiKey[perplexity.Event](os.Getenv("PPLX_API_KEY")))...)
msgs := []prompts.ChatCompletionMessage{
{
Role: prompts.RoleSystem,
Content: "You are a helpful assistant. You start every answer with 'Sure my lord!'",
},
{
Role: prompts.RoleUser,
Content: "What is the definition of Pi?",
},
}
req := prompts.NewStreamChatCompletionRequest(msgs...)
req.Model = perplexity.DefaultModel
stream, err := client.SendStreamCompletionRequest(context.Background(), req)
if err != nil {
panic(err)
}
prompts.Print(stream)- Ollama
- Perplexity
- More are coming ...
You can find the documentation hosted on godoc.org.
The examples are located in the examples directory.