Skip to content

Commit

Permalink
Added backwards compatibility
Browse files Browse the repository at this point in the history
  • Loading branch information
jubeless committed Nov 22, 2024
1 parent 6faf091 commit 33faf24
Showing 1 changed file with 10 additions and 2 deletions.
12 changes: 10 additions & 2 deletions chains/llm.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ type LLMChain struct {
Memory schema.Memory
CallbacksHandler callbacks.Handler
OutputParser schema.OutputParser[any]
// When enabled usesMultiplePrompts will not 'flatten' the prompt into a single message.
UseMultiPrompt bool

OutputKey string
}
Expand All @@ -41,6 +43,7 @@ func NewLLMChain(llm llms.Model, prompt prompts.FormatPrompter, opts ...ChainCal
Memory: memory.NewSimple(),
OutputKey: _llmChainDefaultOutputKey,
CallbacksHandler: opt.CallbackHandler,
UseMultiPrompt: false,
}

return chain
Expand All @@ -56,12 +59,17 @@ func (c LLMChain) Call(ctx context.Context, values map[string]any, options ...Ch
return nil, err
}

result, err := llms.GenerateFromMultiPrompt(ctx, c.LLM, chatMessagesToLLmMessageContent(promptValue.Messages()), getLLMCallOptions(options...)...)
var output string
if c.UseMultiPrompt {
output, err = llms.GenerateFromMultiPrompt(ctx, c.LLM, chatMessagesToLLmMessageContent(promptValue.Messages()), getLLMCallOptions(options...)...)
} else {
output, err = llms.GenerateFromSinglePrompt(ctx, c.LLM, promptValue.String(), getLLMCallOptions(options...)...)
}
if err != nil {
return nil, err
}

finalOutput, err := c.OutputParser.ParseWithPrompt(result, promptValue)
finalOutput, err := c.OutputParser.ParseWithPrompt(output, promptValue)
if err != nil {
return nil, err
}
Expand Down

0 comments on commit 33faf24

Please sign in to comment.