From d5a41384879758f379fd5d43e1c10c298e09b5b7 Mon Sep 17 00:00:00 2001 From: Sebastian Sosa <1sebastian1sosa1@gmail.com> Date: Wed, 9 Aug 2023 20:43:00 -0400 Subject: [PATCH 1/2] migrate langchain to oplangchain with test --- .gitignore | 3 ++- pypi-core/openplugincore/openplugin.py | 14 +++++++------- pypi-core/requirements.txt | Bin 0 -> 1752 bytes pypi-core/setup.py | 2 +- pypi-core/tests/test_e2e.py | 23 +++++++++++++++++++++++ 5 files changed, 33 insertions(+), 9 deletions(-) create mode 100644 pypi-core/requirements.txt diff --git a/.gitignore b/.gitignore index 119ed34..1ac7a43 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,5 @@ envOpenplugin log.txt node_modules -package-lock.json \ No newline at end of file +package-lock.json +logs \ No newline at end of file diff --git a/pypi-core/openplugincore/openplugin.py b/pypi-core/openplugincore/openplugin.py index 32a1511..66c2a57 100644 --- a/pypi-core/openplugincore/openplugin.py +++ b/pypi-core/openplugincore/openplugin.py @@ -6,13 +6,13 @@ from .types import ChatgptAssistantMessage, ChatgptFunctionMessage, PluginConfigs from .utils.constants import openai_models_info from .utils.prompting import estimate_tokens, tokens_to_chars, truncate_json_root -from langchain.chains.openai_functions.openapi import openapi_spec_to_openai_fn -from langchain.utilities.openapi import OpenAPISpec -from langchain.output_parsers.openai_functions import JsonOutputFunctionsParser -from langchain.prompts import ChatPromptTemplate -from langchain.chat_models import ChatOpenAI -from langchain.schema import HumanMessage, AIMessage, SystemMessage, FunctionMessage -from langchain import LLMChain +from oplangchain.chains.openai_functions.openapi import openapi_spec_to_openai_fn +from oplangchain.utilities.openapi import OpenAPISpec +from oplangchain.output_parsers.openai_functions import JsonOutputFunctionsParser +from oplangchain.prompts import ChatPromptTemplate +from oplangchain.chat_models import ChatOpenAI +from oplangchain.schema import HumanMessage, AIMessage, SystemMessage, FunctionMessage +from oplangchain import LLMChain import openai from dotenv import load_dotenv load_dotenv() diff --git a/pypi-core/requirements.txt b/pypi-core/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..c6dbd052191f661642d7ecf74f881e2fc53a7a16 GIT binary patch literal 1752 zcmZuxU2oGs5Zq@Z{sVu&7&~o&N|mU11rfv(Psli~ZSWVdLlb^HFtdAWC#n^>iFli7CO) z-pxx^=VQG;=o(JLz+yKP~bz?;JpR*UX?~;9lzcrLst0>Niv#Y|yIFrY@mjnuY zV(Z9Ay*XevouPM}hb6Iv#ztqcQ!mWD8)bB#T4Z&=L!KkBoCfnv-z=5nWM9@*2TmsX zh+TWFnPP1@A+}CN*ORIu+udUqE6{DvPl_Cqfr($%2OuIS;A{%@CQ_YDWv4ymXBE5~ z-*59N!NGILlo#?s-r->u8ob>~te^M#YYRk=WEVhBs2aKGef@Vg6Ds^ zr@V*qVpcw9YFYC-QN>9}7*WiOpIuJUZa7ZURx9l$0$U&Q^?RP&lB+G4zRJCPln=UU zDJ2VM_Y0}BDt=S)#8&K;fk2UD1o#q3aXjIvE^iEFi S&Th&RW=#ZF=nSh_PyYcBkP7br literal 0 HcmV?d00001 diff --git a/pypi-core/setup.py b/pypi-core/setup.py index a308139..38a32bc 100644 --- a/pypi-core/setup.py +++ b/pypi-core/setup.py @@ -4,7 +4,7 @@ setup( # the name must match the folder name 'verysimplemodule' name='openplugincore', - version="0.6.2", + version="0.6.3", author="Sebastian Sosa", author_email="1sebastian1sosa1@gmail.com", description='Seamlessly integrate with OpenAI ChatGPT plugins via API (or client), offering the same powerful functionality as the ChatGPT api + plugins!', diff --git a/pypi-core/tests/test_e2e.py b/pypi-core/tests/test_e2e.py index 5fd711e..fb25dbd 100644 --- a/pypi-core/tests/test_e2e.py +++ b/pypi-core/tests/test_e2e.py @@ -182,6 +182,7 @@ def test_initiate_and_fetch_scholarai(): # Replace the line below with a test for the final output in json_content assert isinstance(json_content["total_num_results"], int) +@pytest.mark.skip(reason="Not whitelisted") def test_initiate_and_fetch_rephrase(): plugin = OpenPlugin("rephrase") assert plugin.manifest is not None @@ -303,6 +304,28 @@ def test_initiate_and_fetch_Ai_PDF(): # Replace the line below with a test for the final output in json_content assert isinstance(json_content[0], str) +def test_initiate_and_fetch_askyourpdf(): + plugin = OpenPlugin("askyourpdf", verbose=True) + assert plugin.manifest is not None + + chatgpt_prompt = 'summarize this pdf https://eforms.com/download/2018/01/Non-Disclosure-Agreement-Template.pdf' + response = plugin.fetch_plugin( + messages=[ + { + "role": "user", + "content": chatgpt_prompt + } + ], + model="gpt-3.5-turbo-0613", + temperature=0, + ) + + assert response is not None + assert response["role"] == "function" + json_content = json.loads(response["content"]) + + assert len(json_content["summary"]) > 0 + """ TEMPLATE for testing a new plugin 0. test the plugin with a prompt in ChatGPT From 8167777fcece5b41e954addbc15f9c5a0ca6799e Mon Sep 17 00:00:00 2001 From: Sebastian Sosa <1sebastian1sosa1@gmail.com> Date: Wed, 9 Aug 2023 20:50:34 -0400 Subject: [PATCH 2/2] fix CI errors --- npm-core/openplugincore/src/index.ts | 2 +- pypi-core/setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/npm-core/openplugincore/src/index.ts b/npm-core/openplugincore/src/index.ts index b0879b3..8843475 100644 --- a/npm-core/openplugincore/src/index.ts +++ b/npm-core/openplugincore/src/index.ts @@ -4,4 +4,4 @@ export { OpenPluginMemo } from './openpluginMemo'; // under utils export openaiModelsInfo export { openaiModelsInfo } from './util/constants'; -export { estimateTokens } from './util/prompting'; \ No newline at end of file +export { estimateTokens } from './util/prompting'; diff --git a/pypi-core/setup.py b/pypi-core/setup.py index 38a32bc..c18141d 100644 --- a/pypi-core/setup.py +++ b/pypi-core/setup.py @@ -17,7 +17,7 @@ install_requires=[ 'requests', 'openai', - 'langchain', + 'oplangchain', 'python-dotenv', ], # add any additional packages