mirror-ollama-twitch-bot/generate.go

163 lines
4.1 KiB
Go
Raw Permalink Normal View History

2024-03-03 00:07:33 +01:00
package main
import (
"bytes"
"encoding/json"
"io"
"net/http"
)
type ollamaResponse struct {
2024-03-05 18:35:46 +01:00
Model string `json:"model"`
CreatedAt string `json:"created_at"`
Response string `json:"response"`
Done bool `json:"done"`
Message ollamaMessage `json:"message"`
2024-03-03 00:07:33 +01:00
}
type ollamaRequest struct {
2024-03-05 18:15:03 +01:00
Format string `json:"format"`
2024-03-03 00:07:33 +01:00
Model string `json:"model"`
Prompt string `json:"prompt"`
Stream bool `json:"stream"`
System string `json:"system"`
Raw bool `json:"raw"`
Messages []ollamaMessage `json:"messages"`
}
type ollamaMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
2024-03-05 18:35:46 +01:00
var requestBody ollamaRequest
2024-03-03 00:07:33 +01:00
2024-03-05 18:35:46 +01:00
// chatUserContext provides additional message context from specifically
// the past interactions of user with the AI since last restart.
func (app *application) chatUserContext(target, username, input string) {
2024-03-03 00:07:33 +01:00
olm := ollamaMessage{}
olm.Role = "user"
olm.Content = input
2024-03-05 21:02:12 +01:00
app.userMsgStore[username] = append(app.userMsgStore[username], olm)
2024-03-03 00:07:33 +01:00
2024-03-05 21:17:23 +01:00
requestBody.Model = app.cfg.ollamaModel
requestBody.System = app.cfg.ollamaSystem
2024-03-05 21:02:12 +01:00
requestBody.Messages = app.userMsgStore[username]
2024-03-03 00:07:33 +01:00
requestBody.Prompt = input
requestBody.Stream = false
marshalled, err := json.Marshal(requestBody)
if err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err)
2024-03-03 00:07:33 +01:00
}
2024-03-05 18:15:03 +01:00
resp, err := http.Post("http://localhost:11434/api/chat", "application/json", bytes.NewBuffer(marshalled))
if err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err.Error())
2024-03-05 18:15:03 +01:00
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err.Error())
2024-03-05 18:15:03 +01:00
}
var responseObject ollamaResponse
if err := json.Unmarshal(body, &responseObject); err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err)
2024-03-05 18:15:03 +01:00
}
2024-03-05 18:35:46 +01:00
2024-03-05 18:15:03 +01:00
olm.Role = responseObject.Message.Role
olm.Content = responseObject.Message.Content
2024-03-05 21:02:12 +01:00
app.userMsgStore[username] = append(app.userMsgStore[username], olm)
2024-03-05 18:15:03 +01:00
2024-03-05 21:02:12 +01:00
app.log.Infow("Message context for username",
"username", username,
"app.userMsgStore[username]", app.userMsgStore[username],
2024-03-05 18:15:03 +01:00
)
2024-03-05 21:02:12 +01:00
app.send(target, responseObject.Message.Content)
2024-03-05 18:15:03 +01:00
}
2024-03-05 18:35:46 +01:00
// chatGeneralContext provides additional message context from every past
// interaction with the AI since last restart.
2024-03-05 18:15:03 +01:00
func (app *application) chatGeneralContext(target, input string) {
olm := ollamaMessage{}
olm.Role = "user"
olm.Content = input
2024-03-05 21:02:12 +01:00
app.msgStore = append(app.msgStore, olm)
2024-03-05 18:15:03 +01:00
2024-03-05 21:17:23 +01:00
requestBody.Model = app.cfg.ollamaModel
requestBody.System = app.cfg.ollamaSystem
2024-03-05 21:02:12 +01:00
requestBody.Messages = app.msgStore
2024-03-05 18:15:03 +01:00
requestBody.Prompt = input
requestBody.Stream = false
marshalled, err := json.Marshal(requestBody)
if err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err)
2024-03-05 18:15:03 +01:00
}
2024-03-03 00:07:33 +01:00
resp, err := http.Post("http://localhost:11434/api/chat", "application/json", bytes.NewBuffer(marshalled))
if err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err.Error())
2024-03-03 00:07:33 +01:00
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err.Error())
2024-03-03 00:07:33 +01:00
}
var responseObject ollamaResponse
if err := json.Unmarshal(body, &responseObject); err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err)
2024-03-03 00:07:33 +01:00
}
2024-03-05 18:35:46 +01:00
2024-03-03 00:07:33 +01:00
olm.Role = responseObject.Message.Role
olm.Content = responseObject.Message.Content
2024-03-05 21:02:12 +01:00
app.msgStore = append(app.msgStore, olm)
2024-03-03 00:07:33 +01:00
2024-03-05 21:02:12 +01:00
app.log.Infow("app.msgStore",
"app.msgStore", app.msgStore,
2024-03-03 00:07:33 +01:00
)
2024-03-05 21:02:12 +01:00
app.send(target, responseObject.Message.Content)
2024-03-03 00:07:33 +01:00
}
2024-03-05 18:35:46 +01:00
// generateNoContext provides no additional message context
2024-03-05 18:15:03 +01:00
func (app *application) generateNoContext(target, input string) {
2024-03-03 00:07:33 +01:00
var requestBody ollamaRequest
2024-03-05 21:17:23 +01:00
requestBody.Model = app.cfg.ollamaModel
requestBody.System = app.cfg.ollamaSystem
2024-03-03 00:07:33 +01:00
requestBody.Prompt = input
requestBody.Stream = false
marshalled, err := json.Marshal(requestBody)
if err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err)
2024-03-03 00:07:33 +01:00
}
resp, err := http.Post("http://localhost:11434/api/generate", "application/json", bytes.NewBuffer(marshalled))
if err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err.Error())
2024-03-03 00:07:33 +01:00
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err.Error())
2024-03-03 00:07:33 +01:00
}
var responseObject ollamaResponse
if err := json.Unmarshal(body, &responseObject); err != nil {
2024-03-05 21:02:12 +01:00
app.log.Error(err)
2024-03-03 00:07:33 +01:00
}
2024-03-05 18:35:46 +01:00
2024-03-05 21:02:12 +01:00
app.send(target, responseObject.Response)
2024-03-03 00:07:33 +01:00
}