Skip to content

Commit

Permalink
Update Langchain notebook to uses new integration with CometLLM (#162)
Browse files Browse the repository at this point in the history
* Update Langchain notebook to uses new integration with CometLLM

* Fix agent example

* Update links to project and Colab
  • Loading branch information
Lothiraldan authored Dec 7, 2023
1 parent a507bf2 commit 2c3dd21
Showing 1 changed file with 18 additions and 146 deletions.
164 changes: 18 additions & 146 deletions integrations/llm/langchain/notebooks/Comet_with_Langchain.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@
"source": [
"In this guide we will demonstrate how to track your Langchain Experiments, Evaluation Metrics, and LLM Sessions with [Comet](https://www.comet.com/site/?utm_source=langchain&utm_medium=referral&utm_campaign=comet_notebook). \n",
"\n",
"<a target=\"_blank\" href=\"https://colab.research.google.com/github/hwchase17/langchain/blob/master/docs/ecosystem/comet_tracking.ipynb\">\n",
"<a target=\"_blank\" href=\"https://colab.research.google.com/github/comet-ml/comet-examples/blob/master/integrations/llm/langchain/notebooks/Comet_with_Langchain.ipynb\">\n",
" <img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/>\n",
"</a>\n",
"\n",
"**Example Project:** [Comet with LangChain](https://www.comet.com/examples/comet-example-langchain/view/b5ZThK6OFdhKWVSP3fDfRtrNF/panels?utm_source=langchain&utm_medium=referral&utm_campaign=comet_notebook)"
"**Example Project:** [Comet with LangChain](https://www.comet.com/examples/comet-example-langchain-notebook/prompts?utm_source=langchain&utm_medium=referral&utm_campaign=comet_notebook)"
]
},
{
Expand All @@ -47,11 +47,7 @@
"metadata": {},
"outputs": [],
"source": [
"%pip install -U comet_ml \"langchain>=0.0.162\" openai google-search-results spacy textstat pandas\n",
"\n",
"import sys\n",
"\n",
"!{sys.executable} -m spacy download en_core_web_sm"
"%pip install -U comet_llm \"langchain>=0.0.346\" openai google-search-results spacy textstat pandas numexpr"
]
},
{
Expand All @@ -74,9 +70,9 @@
"metadata": {},
"outputs": [],
"source": [
"import comet_ml\n",
"import comet_llm\n",
"\n",
"comet_ml.init(project_name=\"comet-example-langchain\")"
"comet_llm.init(project=\"comet-example-langchain-notebook\")"
]
},
{
Expand Down Expand Up @@ -121,21 +117,15 @@
"source": [
"from datetime import datetime\n",
"\n",
"from langchain.callbacks import CometCallbackHandler\n",
"from langchain.callbacks.tracers.comet import CometTracer\n",
"from langchain.llms import OpenAI\n",
"\n",
"comet_callback = CometCallbackHandler(\n",
" project_name=\"comet-example-langchain\",\n",
" complexity_metrics=True,\n",
" stream_logs=True,\n",
" tags=[\"llm\"],\n",
" visualizations=[\"dep\"],\n",
")\n",
"comet_callback = CometTracer()\n",
"llm = OpenAI(temperature=0.9, callbacks=[comet_callback], verbose=True)\n",
"\n",
"llm_result = llm.generate([\"Tell me a joke\", \"Tell me a poem\", \"Tell me a fact\"] * 3)\n",
"print(\"LLM result\", llm_result)\n",
"comet_callback.flush_tracker(llm, finish=True)"
"comet_callback.flush()"
]
},
{
Expand All @@ -151,17 +141,12 @@
"metadata": {},
"outputs": [],
"source": [
"from langchain.callbacks import CometCallbackHandler\n",
"from langchain.callbacks.tracers.comet import CometTracer\n",
"from langchain.chains import LLMChain\n",
"from langchain.llms import OpenAI\n",
"from langchain.prompts import PromptTemplate\n",
"\n",
"comet_callback = CometCallbackHandler(\n",
" complexity_metrics=True,\n",
" project_name=\"comet-example-langchain\",\n",
" stream_logs=True,\n",
" tags=[\"synopsis-chain\"],\n",
")\n",
"comet_callback = CometTracer()\n",
"callbacks = [comet_callback]\n",
"\n",
"llm = OpenAI(temperature=0.9, callbacks=callbacks, verbose=True)\n",
Expand All @@ -176,7 +161,7 @@
"\n",
"test_prompts = [{\"title\": \"Documentary about Bigfoot in Paris\"}]\n",
"print(synopsis_chain.apply(test_prompts))\n",
"comet_callback.flush_tracker(synopsis_chain, finish=True)"
"comet_callback.flush()"
]
},
{
Expand All @@ -193,15 +178,10 @@
"outputs": [],
"source": [
"from langchain.agents import initialize_agent, load_tools\n",
"from langchain.callbacks import CometCallbackHandler\n",
"from langchain.callbacks.tracers.comet import CometTracer\n",
"from langchain.llms import OpenAI\n",
"\n",
"comet_callback = CometCallbackHandler(\n",
" project_name=\"comet-example-langchain\",\n",
" complexity_metrics=True,\n",
" stream_logs=True,\n",
" tags=[\"agent\"],\n",
")\n",
"comet_callback = CometTracer()\n",
"callbacks = [comet_callback]\n",
"\n",
"llm = OpenAI(temperature=0.9, callbacks=callbacks, verbose=True)\n",
Expand All @@ -215,118 +195,10 @@
" verbose=True,\n",
")\n",
"agent.run(\n",
" \"Who is Leo DiCaprio's girlfriend? What is her current age raised to the 0.43 power?\"\n",
")\n",
"comet_callback.flush_tracker(agent, finish=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Scenario 4: Using Custom Evaluation Metrics"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The `CometCallbackManager` also allows you to define and use Custom Evaluation Metrics to assess generated outputs from your model. Let's take a look at how this works. \n",
"\n",
"\n",
"In the snippet below, we will use the [ROUGE](https://huggingface.co/spaces/evaluate-metric/rouge) metric to evaluate the quality of a generated summary of an input prompt. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%pip install rouge-score"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from rouge_score import rouge_scorer\n",
"\n",
"from langchain.callbacks import CometCallbackHandler\n",
"from langchain.chains import LLMChain\n",
"from langchain.llms import OpenAI\n",
"from langchain.prompts import PromptTemplate\n",
"\n",
"\n",
"class Rouge:\n",
" def __init__(self, reference):\n",
" self.reference = reference\n",
" self.scorer = rouge_scorer.RougeScorer([\"rougeLsum\"], use_stemmer=True)\n",
"\n",
" def compute_metric(self, generation, prompt_idx, gen_idx):\n",
" prediction = generation.text\n",
" results = self.scorer.score(target=self.reference, prediction=prediction)\n",
"\n",
" return {\n",
" \"rougeLsum_score\": results[\"rougeLsum\"].fmeasure,\n",
" \"reference\": self.reference,\n",
" }\n",
"\n",
"\n",
"reference = \"\"\"\n",
"The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building.\n",
"It was the first structure to reach a height of 300 metres.\n",
"\n",
"It is now taller than the Chrysler Building in New York City by 5.2 metres (17 ft)\n",
"Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France .\n",
"\"\"\"\n",
"rouge_score = Rouge(reference=reference)\n",
"\n",
"template = \"\"\"Given the following article, it is your job to write a summary.\n",
"Article:\n",
"{article}\n",
"Summary: This is the summary for the above article:\"\"\"\n",
"prompt_template = PromptTemplate(input_variables=[\"article\"], template=template)\n",
"\n",
"comet_callback = CometCallbackHandler(\n",
" project_name=\"comet-example-langchain\",\n",
" complexity_metrics=False,\n",
" stream_logs=True,\n",
" tags=[\"custom_metrics\"],\n",
" custom_metrics=rouge_score.compute_metric,\n",
")\n",
"callbacks = [comet_callback]\n",
"\n",
"llm = OpenAI(temperature=0.9, callbacks=callbacks, verbose=True)\n",
"\n",
"synopsis_chain = LLMChain(\n",
" llm=llm, prompt=prompt_template, callbacks=callbacks, verbose=True\n",
" \"Who is Leo DiCaprio's girlfriend? What is her current age raised to the 0.43 power?\",\n",
" callbacks=callbacks,\n",
")\n",
"\n",
"test_prompts = [\n",
" {\n",
" \"article\": \"\"\"\n",
" The tower is 324 metres (1,063 ft) tall, about the same height as\n",
" an 81-storey building, and the tallest structure in Paris. Its base is square,\n",
" measuring 125 metres (410 ft) on each side.\n",
" During its construction, the Eiffel Tower surpassed the\n",
" Washington Monument to become the tallest man-made structure in the world,\n",
" a title it held for 41 years until the Chrysler Building\n",
" in New York City was finished in 1930.\n",
"\n",
" It was the first structure to reach a height of 300 metres.\n",
" Due to the addition of a broadcasting aerial at the top of the tower in 1957,\n",
" it is now taller than the Chrysler Building by 5.2 metres (17 ft).\n",
"\n",
" Excluding transmitters, the Eiffel Tower is the second tallest\n",
" free-standing structure in France after the Millau Viaduct.\n",
" \"\"\"\n",
" }\n",
"]\n",
"print(synopsis_chain.apply(test_prompts))\n",
"comet_callback.flush_tracker(synopsis_chain, finish=True)"
"comet_callback.flush()"
]
}
],
Expand All @@ -346,9 +218,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
"version": "3.10.12"
}
},
"nbformat": 4,
"nbformat_minor": 2
"nbformat_minor": 4
}

0 comments on commit 2c3dd21

Please sign in to comment.