diff --git a/server/ai/configuration.go b/server/ai/configuration.go index d4310dde..3a33ffe4 100644 --- a/server/ai/configuration.go +++ b/server/ai/configuration.go @@ -20,6 +20,7 @@ type BotConfig struct { CustomInstructions string `json:"customInstructions"` Service ServiceConfig `json:"service"` EnableVision bool `json:"enableVision"` + DisableTools bool `json:"disableTools"` } func (c *BotConfig) IsValid() bool { diff --git a/server/api_channel.go b/server/api_channel.go index a7d9b30b..37b2ed39 100644 --- a/server/api_channel.go +++ b/server/api_channel.go @@ -110,7 +110,7 @@ func (p *Plugin) handleSince(c *gin.Context) { "type": promptPreset, }) - prompt, err := p.prompts.ChatCompletion(promptPreset, context, p.getDefaultToolsStore(context.IsDMWithBot())) + prompt, err := p.prompts.ChatCompletion(promptPreset, context, p.getDefaultToolsStore(bot, context.IsDMWithBot())) if err != nil { c.AbortWithError(http.StatusInternalServerError, err) return diff --git a/server/built_in_tools.go b/server/built_in_tools.go index 2f81ef4a..fd8c4aa6 100644 --- a/server/built_in_tools.go +++ b/server/built_in_tools.go @@ -440,7 +440,10 @@ func (p *Plugin) getBuiltInTools(isDM bool) []ai.Tool { return builtInTools } -func (p *Plugin) getDefaultToolsStore(isDM bool) ai.ToolStore { +func (p *Plugin) getDefaultToolsStore(bot *Bot, isDM bool) ai.ToolStore { + if bot == nil || bot.cfg.DisableTools { + return ai.NewNoTools() + } store := ai.NewToolStore(&p.pluginAPI.Log, p.getConfiguration().EnableLLMTrace) store.AddTools(p.getBuiltInTools(isDM)) return store diff --git a/server/meeting_summarization.go b/server/meeting_summarization.go index c2730a0e..98402c6d 100644 --- a/server/meeting_summarization.go +++ b/server/meeting_summarization.go @@ -272,7 +272,7 @@ func (p *Plugin) summarizeTranscription(bot *Bot, transcription *subtitles.Subti p.pluginAPI.Log.Debug("Split into chunks", "chunks", len(chunks)) for _, chunk := range chunks { context.PromptParameters = map[string]string{"TranscriptionChunk": chunk} - summarizeChunkPrompt, err := p.prompts.ChatCompletion(ai.PromptSummarizeChunk, context, p.getDefaultToolsStore(context.IsDMWithBot())) + summarizeChunkPrompt, err := p.prompts.ChatCompletion(ai.PromptSummarizeChunk, context, p.getDefaultToolsStore(bot, context.IsDMWithBot())) if err != nil { return nil, fmt.Errorf("unable to get summarize chunk prompt: %w", err) } @@ -291,7 +291,7 @@ func (p *Plugin) summarizeTranscription(bot *Bot, transcription *subtitles.Subti } context.PromptParameters = map[string]string{"Transcription": llmFormattedTranscription, "IsChunked": fmt.Sprintf("%t", isChunked)} - summaryPrompt, err := p.prompts.ChatCompletion(ai.PromptMeetingSummary, context, p.getDefaultToolsStore(context.IsDMWithBot())) + summaryPrompt, err := p.prompts.ChatCompletion(ai.PromptMeetingSummary, context, p.getDefaultToolsStore(bot, context.IsDMWithBot())) if err != nil { return nil, fmt.Errorf("unable to get meeting summary prompt: %w", err) } diff --git a/server/service.go b/server/service.go index a0e628f2..d5e9c415 100644 --- a/server/service.go +++ b/server/service.go @@ -47,7 +47,7 @@ func (p *Plugin) processUserRequestToBot(bot *Bot, context ai.ConversationContex } func (p *Plugin) newConversation(bot *Bot, context ai.ConversationContext) error { - conversation, err := p.prompts.ChatCompletion(ai.PromptDirectMessageQuestion, context, p.getDefaultToolsStore(context.IsDMWithBot())) + conversation, err := p.prompts.ChatCompletion(ai.PromptDirectMessageQuestion, context, p.getDefaultToolsStore(bot, context.IsDMWithBot())) if err != nil { return err } @@ -128,7 +128,7 @@ func (p *Plugin) continueConversation(bot *Bot, threadData *ThreadData, context return nil, err } } else { - prompt, err := p.prompts.ChatCompletion(ai.PromptDirectMessageQuestion, context, p.getDefaultToolsStore(context.IsDMWithBot())) + prompt, err := p.prompts.ChatCompletion(ai.PromptDirectMessageQuestion, context, p.getDefaultToolsStore(bot, context.IsDMWithBot())) if err != nil { return nil, err } @@ -151,7 +151,7 @@ func (p *Plugin) continueThreadConversation(bot *Bot, questionThreadData *Thread originalThread := formatThread(originalThreadData) context.PromptParameters = map[string]string{"Thread": originalThread} - prompt, err := p.prompts.ChatCompletion(ai.PromptSummarizeThread, context, p.getDefaultToolsStore(context.IsDMWithBot())) + prompt, err := p.prompts.ChatCompletion(ai.PromptSummarizeThread, context, p.getDefaultToolsStore(bot, context.IsDMWithBot())) if err != nil { return nil, err } @@ -177,7 +177,7 @@ func (p *Plugin) summarizePost(bot *Bot, postIDToSummarize string, context ai.Co formattedThread := formatThread(threadData) context.PromptParameters = map[string]string{"Thread": formattedThread} - prompt, err := p.prompts.ChatCompletion(ai.PromptSummarizeThread, context, p.getDefaultToolsStore(context.IsDMWithBot())) + prompt, err := p.prompts.ChatCompletion(ai.PromptSummarizeThread, context, p.getDefaultToolsStore(bot, context.IsDMWithBot())) if err != nil { return nil, err } diff --git a/webapp/src/components/system_console/bot.tsx b/webapp/src/components/system_console/bot.tsx index eaa0b774..7a76df0a 100644 --- a/webapp/src/components/system_console/bot.tsx +++ b/webapp/src/components/system_console/bot.tsx @@ -31,6 +31,7 @@ export type LLMBotConfig = { service: LLMService customInstructions: string enableVision: boolean + disableTools: boolean } type Props = { @@ -135,17 +136,27 @@ const Bot = (props: Props) => { onChange={(e) => props.onChange({...props.bot, customInstructions: e.target.value})} /> { (props.bot.service.type === 'openai' || props.bot.service.type === 'openaicompatible') && ( - - - - - } - value={props.bot.enableVision} - onChange={(to: boolean) => props.onChange({...props.bot, enableVision: to})} - helpText={intl.formatMessage({defaultMessage: 'Enable Vision to allow the bot to process images. Requires a compatible model.'})} - /> + <> + + + + + } + value={props.bot.enableVision} + onChange={(to: boolean) => props.onChange({...props.bot, enableVision: to})} + helpText={intl.formatMessage({defaultMessage: 'Enable Vision to allow the bot to process images. Requires a compatible model.'})} + /> + + } + value={props.bot.disableTools} + onChange={(to: boolean) => props.onChange({...props.bot, disableTools: to})} + helpText={intl.formatMessage({defaultMessage: 'By default some tool use is enabled to allow for features such as integrations with JIRA. Disabling this allows use of models that do not support or are not very good at tool use. Some features will not work without tools.'})} + /> + )} diff --git a/webapp/src/components/system_console/bots.tsx b/webapp/src/components/system_console/bots.tsx index 3fac55a9..2c8c9816 100644 --- a/webapp/src/components/system_console/bots.tsx +++ b/webapp/src/components/system_console/bots.tsx @@ -28,6 +28,7 @@ const defaultNewBot = { streamingTimeoutSeconds: 0, }, enableVision: false, + disableTools: false, }; export const firstNewBot = {