Skip to content

Commit

Permalink
fix: add private function for llm request (#42)
Browse files Browse the repository at this point in the history
  • Loading branch information
laurasgkadri98 authored Jul 23, 2024
1 parent f268e1d commit b69b19e
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 1 deletion.
8 changes: 7 additions & 1 deletion pkg/externalfunctions/externalfunctions.go
Original file line number Diff line number Diff line change
Expand Up @@ -1245,6 +1245,8 @@ func AnsysGPTPerformLLMRephraseRequest(template string, query string, history []

if len(history) >= 1 {
historyMessages += "user:" + history[len(history)-2].Content + "\n"
} else {
return query
}

// Create map for the data to be used in the template
Expand All @@ -1257,7 +1259,11 @@ func AnsysGPTPerformLLMRephraseRequest(template string, query string, history []
fmt.Println("System template:", userTemplate)

// Perform the general request
rephrasedQuery, _ = PerformGeneralRequest(userTemplate, nil, false, "You are AnsysGPT, a technical support assistant that is professional, friendly and multilingual that generates a clear and concise answer")
rephrasedQuery, _, err := performGeneralRequest(userTemplate, nil, false, "You are AnsysGPT, a technical support assistant that is professional, friendly and multilingual that generates a clear and concise answer")
if err != nil {
panic(err)
}

fmt.Println("Rephrased query:", rephrasedQuery)

return rephrasedQuery
Expand Down
55 changes: 55 additions & 0 deletions pkg/externalfunctions/privatefunctions.go
Original file line number Diff line number Diff line change
Expand Up @@ -661,3 +661,58 @@ func ansysGPTACSSemanticHybridSearch(

return respObject.Value
}

// performGeneralRequest performs a general chat completion request to LLM
//
// Parameters:
// - input: the input string
// - history: the conversation history
// - isStream: the stream flag
// - systemPrompt: the system prompt
//
// Returns:
// - message: the generated message
// - stream: the stream channel
// - err: the error
func performGeneralRequest(input string, history []HistoricMessage, isStream bool, systemPrompt string) (message string, stream *chan string, err error) {
// get the LLM handler endpoint
llmHandlerEndpoint := *config.AllieFlowkitConfig.LLM_HANDLER_ENDPOINT

// Set up WebSocket connection with LLM and send chat request
responseChannel := sendChatRequest(input, "general", history, 0, systemPrompt, llmHandlerEndpoint)

// If isStream is true, create a stream channel and return asap
if isStream {
// Create a stream channel
streamChannel := make(chan string, 400)

// Start a goroutine to transfer the data from the response channel to the stream channel
go transferDatafromResponseToStreamChannel(&responseChannel, &streamChannel, false)

// Return the stream channel
return "", &streamChannel, nil
}

// else Process all responses
var responseAsStr string
for response := range responseChannel {
// Check if the response is an error
if response.Type == "error" {
return "", nil, fmt.Errorf("error in general llm request %v: %v (%v)", response.InstructionGuid, response.Error.Code, response.Error.Message)
}

// Accumulate the responses
responseAsStr += *(response.ChatData)

// If we are at the last message, break the loop
if *(response.IsLast) {
break
}
}

// Close the response channel
close(responseChannel)

// Return the response
return responseAsStr, nil, nil
}

0 comments on commit b69b19e

Please sign in to comment.