diff --git a/pkg/externalfunctions/externalfunctions.go b/pkg/externalfunctions/externalfunctions.go index 156b501..0026864 100644 --- a/pkg/externalfunctions/externalfunctions.go +++ b/pkg/externalfunctions/externalfunctions.go @@ -1245,6 +1245,8 @@ func AnsysGPTPerformLLMRephraseRequest(template string, query string, history [] if len(history) >= 1 { historyMessages += "user:" + history[len(history)-2].Content + "\n" + } else { + return query } // Create map for the data to be used in the template @@ -1257,7 +1259,11 @@ func AnsysGPTPerformLLMRephraseRequest(template string, query string, history [] fmt.Println("System template:", userTemplate) // Perform the general request - rephrasedQuery, _ = PerformGeneralRequest(userTemplate, nil, false, "You are AnsysGPT, a technical support assistant that is professional, friendly and multilingual that generates a clear and concise answer") + rephrasedQuery, _, err := performGeneralRequest(userTemplate, nil, false, "You are AnsysGPT, a technical support assistant that is professional, friendly and multilingual that generates a clear and concise answer") + if err != nil { + panic(err) + } + fmt.Println("Rephrased query:", rephrasedQuery) return rephrasedQuery diff --git a/pkg/externalfunctions/privatefunctions.go b/pkg/externalfunctions/privatefunctions.go index 52b43e0..703ffea 100644 --- a/pkg/externalfunctions/privatefunctions.go +++ b/pkg/externalfunctions/privatefunctions.go @@ -661,3 +661,58 @@ func ansysGPTACSSemanticHybridSearch( return respObject.Value } + +// performGeneralRequest performs a general chat completion request to LLM +// +// Parameters: +// - input: the input string +// - history: the conversation history +// - isStream: the stream flag +// - systemPrompt: the system prompt +// +// Returns: +// - message: the generated message +// - stream: the stream channel +// - err: the error +func performGeneralRequest(input string, history []HistoricMessage, isStream bool, systemPrompt string) (message string, stream *chan string, err error) { + // get the LLM handler endpoint + llmHandlerEndpoint := *config.AllieFlowkitConfig.LLM_HANDLER_ENDPOINT + + // Set up WebSocket connection with LLM and send chat request + responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint) + + // If isStream is true, create a stream channel and return asap + if isStream { + // Create a stream channel + streamChannel := make(chan string, 400) + + // Start a goroutine to transfer the data from the response channel to the stream channel + go transferDatafromResponseToStreamChannel(&responseChannel, &streamChannel, false) + + // Return the stream channel + return "", &streamChannel, nil + } + + // else Process all responses + var responseAsStr string + for response := range responseChannel { + // Check if the response is an error + if response.Type == "error" { + return "", nil, fmt.Errorf("error in general llm request %v: %v (%v)", response.InstructionGuid, response.Error.Code, response.Error.Message) + } + + // Accumulate the responses + responseAsStr += *(response.ChatData) + + // If we are at the last message, break the loop + if *(response.IsLast) { + break + } + } + + // Close the response channel + close(responseChannel) + + // Return the response + return responseAsStr, nil, nil +}