2025-04-15 22:24:19 +08:00

202 lines
4.8 KiB
Go

package chatarea
import (
"fmt"
"lazykimi/pkg/theme"
"strings"
"github.com/charmbracelet/bubbles/v2/viewport"
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/glamour"
"github.com/charmbracelet/lipgloss/v2"
"github.com/sashabaranov/go-openai"
"github.com/muesli/reflow/wordwrap"
)
type Style struct {
UserMessage lipgloss.Style
AssistantMessage lipgloss.Style
SystemMessage lipgloss.Style
Divider lipgloss.Style
}
type Model struct {
Width, Height int
systemPrompt string
Messages []openai.ChatCompletionMessage
streamingResponse string
markdownMode bool
vp viewport.Model
glmr *glamour.TermRenderer
style Style
ready bool
}
func (m *Model) SetMarkdownMode(markdown bool) {
m.markdownMode = markdown
}
func New(width, height int) Model {
vp := viewport.New()
vp.Style = lipgloss.NewStyle().
Border(lipgloss.RoundedBorder()).
BorderForeground(theme.Gray).
Padding(0, 0)
vp.MouseWheelEnabled = true
baseMessage := lipgloss.NewStyle().Padding(0, 2).Bold(true)
return Model{
vp: vp,
style: Style{
UserMessage: baseMessage.Foreground(theme.Yellow),
AssistantMessage: baseMessage.Foreground(theme.Blue),
SystemMessage: baseMessage.Foreground(theme.Green),
Divider: lipgloss.NewStyle().Foreground(theme.Gray),
},
}
}
func (m Model) Update(msg tea.Msg) (Model, tea.Cmd) {
var cmd tea.Cmd
switch msg := msg.(type) {
case tea.WindowSizeMsg:
m.Width = msg.Width
m.Height = msg.Height - 3
m.vp.SetWidth(m.Width)
m.vp.SetHeight(m.Height)
// // Initialize markdown renderer
// gr, err := glamour.NewTermRenderer(
// glamour.WithAutoStyle(),
// glamour.WithEmoji(),
// glamour.WithWordWrap(m.Width-6),
// )
// if err != nil {
// panic(err)
// }
// m.glmr = gr
m.ready = true
}
m.vp, cmd = m.vp.Update(msg)
return m, cmd
}
func (m Model) View() string {
if !m.ready {
return ""
}
var messages []string
divider := m.style.Divider.Render(strings.Repeat("─", m.Width-6))
// Render history messages
for _, msg := range m.Messages {
messages = append(messages, m.formatMessage(msg))
messages = append(messages, divider)
}
// Render current response if loading
if m.streamingResponse != "" {
messages = append(messages, m.formatMessage(openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleAssistant,
Content: m.streamingResponse,
}))
}
// Set content with proper width
m.vp.SetContent(strings.Join(messages, "\n"))
// // Auto-scroll to bottom if we weren't already at the bottom
// // before this update
if !m.vp.AtBottom() {
m.vp.GotoBottom()
}
return m.vp.View()
}
// AddUserMessage adds a user message to the chat
func (c *Model) AddUserMessage(content string) {
c.Messages = append(c.Messages, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleUser,
Content: content,
})
}
// AddAssistantMessage adds an assistant message to the chat
func (c *Model) AddAssistantMessage(content string) {
c.Messages = append(c.Messages, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleAssistant,
Content: content,
})
}
// AppendCurrentResponse updates the current streaming response
func (c *Model) AppendCurrentResponse(content string) {
c.streamingResponse += content
}
// CommitCurrentResponse adds the current response as a message and clears it
func (c *Model) CommitCurrentResponse() {
if c.streamingResponse != "" {
c.AddAssistantMessage(c.streamingResponse)
c.streamingResponse = ""
}
}
// Clear clears all messages except the system prompt
func (c *Model) Clear() {
c.Messages = []openai.ChatCompletionMessage{}
c.streamingResponse = ""
// Re-add system message if set
if c.systemPrompt != "" {
c.Messages = append(c.Messages, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleSystem,
Content: c.systemPrompt,
})
}
}
// SetSystemPrompt sets a new system prompt and updates the first message if needed
func (c *Model) SetSystemPrompt(prompt string) {
c.systemPrompt = prompt
c.Clear() // This will add the new system prompt
}
func (m *Model) formatMessage(msg openai.ChatCompletionMessage) string {
var (
indicator string
style lipgloss.Style
useMd bool
)
switch msg.Role {
case openai.ChatMessageRoleUser:
indicator = "You"
style = m.style.UserMessage
case openai.ChatMessageRoleAssistant:
indicator = "AI"
style = m.style.AssistantMessage
useMd = m.markdownMode
case openai.ChatMessageRoleSystem:
indicator = "System"
style = m.style.SystemMessage
useMd = m.markdownMode
}
content := strings.TrimSpace(msg.Content)
content = wordwrap.String(content, m.Width-3)
if useMd {
renderedContent, _ := m.glmr.Render(content)
return fmt.Sprintf("%s\n%s", indicator, renderedContent)
}
return fmt.Sprintf("%s\n%s", indicator, style.Render(content))
}