forked from Azure-Samples/azure-search-openai-demo
-
Notifications
You must be signed in to change notification settings - Fork 2
/
locustfile.py
144 lines (137 loc) · 6.15 KB
/
locustfile.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
import random
import time
from locust import HttpUser, between, task
class ChatUser(HttpUser):
wait_time = between(5, 20)
@task
def ask_question(self):
self.client.get("/")
time.sleep(5)
self.client.post(
"/chat",
json={
"messages": [
{
"content": random.choice(
[
"What is Mosaic Theory?",
"Can you show me case examples related to Mosaic Theory?",
"Can you provide me with a quiz question related to Ethical Standard II(A) - Material Nonpublic Information?",
"Can you give me one more quiz?",
]
),
"role": "user",
},
],
"context": {
"overrides": {
"retrieval_mode": "hybrid",
"semantic_ranker": True,
"semantic_captions": False,
"top": 3,
"suggest_followup_questions": False,
},
},
},
)
time.sleep(5)
self.client.post(
"/chat",
json={
"messages": [
{"content": "What is Mosaic Theory?", "role": "user"},
{
"content": "Good question! Mosaic Theory refers to .... In summary, Mosaic Theory is about...",
"role": "assistant",
},
{"content": "Can you show me an example related to Mosaic Theory?", "role": "user"},
{
"content": "Of course! Let's see this example:\n\nRoger Clement is a senior financial analyst who specializes in the European automobile sector at Rivoli Capital....\n\nAnalysis: to reach a conclusion about the value of the company, Clement has pieced together a number of nonmaterial or public bits of information that affect Turgot Chariots. Therefore, under the mosaic theory, Clement has not violated Standard II(A) in drafting the report.",
"role": "assistant",
},
{"content": "Can you provide me with a quiz related to Ethical Standard II(A) - Material Nonpublic Information?", "role": "user"},
{
"content": "Sure! Ready for the challenge?\n\nUnder the Code and Standards, which of the following investment risks is least likely required to be disclosed to clients?\nAnswer Choices:\nA. the use of leverage as part of the investment strategy.\nB. risks that are unknown to the investment adviser at the time of the recommendation.\nC.general market-related risks.\"\n\nWhich is the correct answer?",
"role": "assistant",
},
{"content": "I still don't understand.", "role": "user"},
{
"content": "No worries! Let me give you a more concrete example to make it clearer. Imaging. you are .... Does it sound clear to you now? Free to ask if you have more questions.",
"role": "assistant",
},
],
"context": {
"overrides": {
"retrieval_mode": "hybrid",
"semantic_ranker": True,
"semantic_captions": False,
"top": 3,
"suggest_followup_questions": False,
},
},
},
)
class ChatVisionUser(HttpUser):
wait_time = between(5, 20)
@task
def ask_question(self):
self.client.get("/")
time.sleep(5)
self.client.post(
"/chat",
json={
"messages": [
{
"content": "Can you identify any correlation between oil prices and stock market trends?",
"role": "user",
}
],
"stream": True,
"context": {
"overrides": {
"top": 3,
"temperature": 0.3,
"minimum_reranker_score": 0,
"minimum_search_score": 0,
"retrieval_mode": "hybrid",
"semantic_ranker": True,
"semantic_captions": False,
"suggest_followup_questions": False,
"use_oid_security_filter": False,
"use_groups_security_filter": False,
"vector_fields": ["embedding", "imageEmbedding"],
"use_gpt4v": True,
"gpt4v_input": "textAndImages",
}
},
"session_state": None,
},
)
time.sleep(5)
self.client.post(
"/chat",
json={
"messages": [
{"content": "Compare the impact of interest rates and GDP in financial markets.", "role": "user"}
],
"stream": True,
"context": {
"overrides": {
"top": 3,
"temperature": 0.3,
"minimum_reranker_score": 0,
"minimum_search_score": 0,
"retrieval_mode": "hybrid",
"semantic_ranker": True,
"semantic_captions": False,
"suggest_followup_questions": False,
"use_oid_security_filter": False,
"use_groups_security_filter": False,
"vector_fields": ["embedding", "imageEmbedding"],
"use_gpt4v": True,
"gpt4v_input": "textAndImages",
}
},
"session_state": None,
},
)