feat: add example golang stream chat

This commit is contained in:
Hidayat Hamir 2023-12-20 12:14:35 +07:00
parent 23dc179350
commit 540eec985f
4 changed files with 118 additions and 0 deletions

View File

@ -0,0 +1,31 @@
# golang-steamchat
This is a simple golang code to stream response from llama2.
## Get Started
1. Run the Ollama Docker container:
```shell
sudo docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
```
For more detailed information, refer to the [Ollama Quickstart Docker](https://hub.docker.com/r/ollama/ollama). Please note we are using CPU only, the AI will response slow, if you have GPU, you can follow the instruction to run the docker and using your GPU to improve performance.
2. Pull the llama2 model:
```shell
curl --location 'http://localhost:11434/api/pull' \
--header 'Content-Type: application/json' \
--data '{
"name": "llama2:7b"
}'
```
3. Run the golang code.
```shell
go run main.go
```
![get-started-gif](./assets/get-started.gif)

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 MiB

View File

@ -0,0 +1,3 @@
module github.com/jmorganca/ollama/tree/main/examples/golang-streamchat
go 1.21.4

View File

@ -0,0 +1,84 @@
package main
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
)
func main() {
err := SendChatRequest(ReqStreamChat{
Model: ModelLlama27b,
Messages: []ReqStreamChatMessage{
{
Role: RoleUser,
Content: "Show me how to use golang channel.",
},
},
})
if err != nil {
panic(err)
}
}
func SendChatRequest(payload ReqStreamChat) error {
jsonBytes, err := json.Marshal(payload)
if err != nil {
return fmt.Errorf("%s: %w", "error marshal payload", err)
}
url := "http://localhost:11434/api/chat"
res, err := http.Post(url, "application/json", bytes.NewBuffer(jsonBytes))
if err != nil {
return fmt.Errorf("%s: %w", "error http post request", err)
}
defer res.Body.Close()
dec := json.NewDecoder(res.Body)
for {
var r ResStreamChat
if err := dec.Decode(&r); err != nil {
break
}
fmt.Print(r.Message.Content)
}
return nil
}
type Model string
const (
ModelLlama27b Model = "llama2:7b"
)
type Role string
const (
RoleUser Role = "user"
RoleAssistant Role = "assistant"
)
type ReqStreamChat struct {
Model Model `json:"model"`
Messages []ReqStreamChatMessage `json:"messages"`
}
type ReqStreamChatMessage struct {
Role Role
Content string
}
type ResStreamChat struct {
Model Model `json:"model"`
CreatedAt string `json:"created_at"`
Message ResStreamChatMessage `json:"message"`
Done bool `json:"done"`
}
type ResStreamChatMessage struct {
Role Role `json:"role"`
Content string `json:"content"`
Images interface{} `json:"images"`
}