Skip to content

Commit

Permalink
Merge pull request stanfordnlp#1012 from omkar-sh/bug/vllm_extra_para…
Browse files Browse the repository at this point in the history
…m_fix

bug(dspy): removed extra params from kwargs sent to vllm pydantic model
  • Loading branch information
arnavsinghvi11 authored May 15, 2024
2 parents 17d76b7 + ec96941 commit 9c1fff9
Showing 1 changed file with 7 additions and 0 deletions.
7 changes: 7 additions & 0 deletions dsp/modules/hf_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,10 @@ def _generate(self, prompt, **kwargs):
messages = [{"role": "user", "content": prompt}]
if system_prompt:
messages.insert(0, {"role": "system", "content": system_prompt})

kwargs.pop("port", None)
kwargs.pop("url", None)

payload = {
"model": self.kwargs["model"],
"messages": messages,
Expand All @@ -180,6 +184,9 @@ def _generate(self, prompt, **kwargs):
print("Failed to parse JSON response:", response.text)
raise Exception("Received invalid JSON response from server")
else:
kwargs.pop("port", None)
kwargs.pop("url", None)

payload = {
"model": self.kwargs["model"],
"prompt": prompt,
Expand Down

0 comments on commit 9c1fff9

Please sign in to comment.