memoryBuffer := memory.NewConversationWindowBuffer(10)chatChain := chains.NewConversation(llm, memoryBuffer)messages := []string{"你好,我叫PBR","你知道我叫什么吗?","你可以解决什么问题?",}for _, message := range messages {completion, err := chains.Run(ctx, chatChain, message)for {if err == nil {break}time.Sleep(30 * time.Second)completion, err = chains.Run(ctx, chatChain, message)}chatMessages, _ := memoryBuffer.ChatHistory.Messages(ctx)fmt.Printf("上下文对话历史:%v\n", chatMessages)fmt.Printf("输入:%v\n输出:%v\n", message, completion)
memoryBuffer := memory.NewConversationBuffer()memoryBuffer := memory.NewConversationWindowBuffer(10)memoryBuffer := memory.NewConversationTokenBuffer(llm, 1024)
func NewConversationWindowBuffer(conversationWindowSize int,options ...ConversationBufferOption,) *ConversationWindowBuffer {if conversationWindowSize <= 0 {conversationWindowSize = defaultConversationWindowSize}tb := &ConversationWindowBuffer{ConversationWindowSize: conversationWindowSize,ConversationBuffer: *applyBufferOptions(options...),}return tb}
type ConversationWindowBuffer struct {ConversationBufferConversationWindowSize int}
type ConversationBuffer struct {ChatHistory schema.ChatMessageHistoryReturnMessages boolInputKey stringOutputKey stringHumanPrefix stringAIPrefix stringMemoryKey string}
// ChatMessageHistory is the interface for chat history in memory/store.type ChatMessageHistory interface {// AddMessage adds a message to the store.AddMessage(ctx context.Context, message llms.ChatMessage) error// AddUserMessage is a convenience method for adding a human message string// to the store.AddUserMessage(ctx context.Context, message string) error// AddAIMessage is a convenience method for adding an AI message string to// the store.AddAIMessage(ctx context.Context, message string) error// Clear removes all messages from the store.Clear(ctx context.Context) error// Messages retrieves all messages from the storeMessages(ctx context.Context) ([]llms.ChatMessage, error)// SetMessages replaces existing messages in the storeSetMessages(ctx context.Context, messages []llms.ChatMessage) error}
func applyBufferOptions(opts ...ConversationBufferOption) *ConversationBuffer {m := &ConversationBuffer{ReturnMessages: false,InputKey: "",OutputKey: "",HumanPrefix: "Human",AIPrefix: "AI",MemoryKey: "history",}for _, opt := range opts {opt(m)}if m.ChatHistory == nil {m.ChatHistory = NewChatMessageHistory()}return m}
func NewConversationTokenBuffer(llm llms.Model,maxTokenLimit int,options ...ConversationBufferOption,) *ConversationTokenBuffer {tb := &ConversationTokenBuffer{LLM: llm,MaxTokenLimit: maxTokenLimit,ConversationBuffer: *applyBufferOptions(options...),}return tb}
func NewConversation(llm llms.Model, memory schema.Memory) LLMChain {return LLMChain{Prompt: prompts.NewPromptTemplate(_conversationTemplate,[]string{"history", "input"},),LLM: llm,Memory: memory,OutputParser: outputparser.NewSimple(),OutputKey: _llmChainDefaultOutputKey,}}
const _llmChainDefaultOutputKey = "text"
const _conversationTemplate = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.Current conversation:{{.history}}Human: {{.input}}AI:`
type LLMChain struct {Prompt prompts.FormatPrompterLLM llms.ModelMemory schema.MemoryCallbacksHandler callbacks.HandlerOutputParser schema.OutputParser[any]OutputKey string}
func Run(ctx context.Context, c Chain, input any, options ...ChainCallOption) (string, error) {inputKeys := c.GetInputKeys()memoryKeys := c.GetMemory().MemoryVariables(ctx)neededKeys := make([]string, 0, len(inputKeys))// Remove keys gotten from the memory.for _, inputKey := range inputKeys {isInMemory := falsefor _, memoryKey := range memoryKeys {if inputKey == memoryKey {isInMemory = truecontinue}}if isInMemory {continue}neededKeys = append(neededKeys, inputKey)}if len(neededKeys) != 1 {return "", ErrMultipleInputsInRun}outputKeys := c.GetOutputKeys()if len(outputKeys) != 1 {return "", ErrMultipleOutputsInRun}inputValues := map[string]any{neededKeys[0]: input}outputValues, err := Call(ctx, c, inputValues, options...)if err != nil {return "", err}outputValue, ok := outputValues[outputKeys[0]].(string)if !ok {return "", ErrWrongOutputTypeInRun}return outputValue, nil}
// Call is the standard function used for executing chains.func Call(ctx context.Context, c Chain, inputValues map[string]any, options ...ChainCallOption) (map[string]any, error) { // nolint: lllfullValues := make(map[string]any, 0)for key, value := range inputValues {fullValues[key] = value}newValues, err := c.GetMemory().LoadMemoryVariables(ctx, inputValues)if err != nil {return nil, err}for key, value := range newValues {fullValues[key] = value}callbacksHandler := getChainCallbackHandler(c)if callbacksHandler != nil {callbacksHandler.HandleChainStart(ctx, inputValues)}outputValues, err := callChain(ctx, c, fullValues, options...)if err != nil {if callbacksHandler != nil {callbacksHandler.HandleChainError(ctx, err)}return outputValues, err}if callbacksHandler != nil {callbacksHandler.HandleChainEnd(ctx, outputValues)}if err = c.GetMemory().SaveContext(ctx, inputValues, outputValues); err != nil {return outputValues, err}return outputValues, nil}
func callChain(ctx context.Context,c Chain,fullValues map[string]any,options ...ChainCallOption,) (map[string]any, error) {if err := validateInputs(c, fullValues); err != nil {return nil, err}outputValues, err := c.Call(ctx, fullValues, options...)if err != nil {return outputValues, err}if err := validateOutputs(c, outputValues); err != nil {return outputValues, err}return outputValues, nil}


文章转载自golang算法架构leetcode技术php,如果涉嫌侵权,请发送邮件至:contact@modb.pro进行举报,并提供相关证据,一经查实,墨天轮将立刻删除相关内容。




