diff --git a/integrations/llm/langchain/notebooks/Comet_with_Langchain.ipynb b/integrations/llm/langchain/notebooks/Comet_with_Langchain.ipynb index bb637913..7ae69d71 100644 --- a/integrations/llm/langchain/notebooks/Comet_with_Langchain.ipynb +++ b/integrations/llm/langchain/notebooks/Comet_with_Langchain.ipynb @@ -20,11 +20,11 @@ "source": [ "In this guide we will demonstrate how to track your Langchain Experiments, Evaluation Metrics, and LLM Sessions with [Comet](https://www.comet.com/site/?utm_source=langchain&utm_medium=referral&utm_campaign=comet_notebook). \n", "\n", - "\n", + "\n", " \"Open\n", "\n", "\n", - "**Example Project:** [Comet with LangChain](https://www.comet.com/examples/comet-example-langchain/view/b5ZThK6OFdhKWVSP3fDfRtrNF/panels?utm_source=langchain&utm_medium=referral&utm_campaign=comet_notebook)" + "**Example Project:** [Comet with LangChain](https://www.comet.com/examples/comet-example-langchain-notebook/prompts?utm_source=langchain&utm_medium=referral&utm_campaign=comet_notebook)" ] }, { @@ -47,11 +47,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -U comet_ml \"langchain>=0.0.162\" openai google-search-results spacy textstat pandas\n", - "\n", - "import sys\n", - "\n", - "!{sys.executable} -m spacy download en_core_web_sm" + "%pip install -U comet_llm \"langchain>=0.0.346\" openai google-search-results spacy textstat pandas numexpr" ] }, { @@ -74,9 +70,9 @@ "metadata": {}, "outputs": [], "source": [ - "import comet_ml\n", + "import comet_llm\n", "\n", - "comet_ml.init(project_name=\"comet-example-langchain\")" + "comet_llm.init(project=\"comet-example-langchain-notebook\")" ] }, { @@ -121,21 +117,15 @@ "source": [ "from datetime import datetime\n", "\n", - "from langchain.callbacks import CometCallbackHandler\n", + "from langchain.callbacks.tracers.comet import CometTracer\n", "from langchain.llms import OpenAI\n", "\n", - "comet_callback = CometCallbackHandler(\n", - " project_name=\"comet-example-langchain\",\n", - " complexity_metrics=True,\n", - " stream_logs=True,\n", - " tags=[\"llm\"],\n", - " visualizations=[\"dep\"],\n", - ")\n", + "comet_callback = CometTracer()\n", "llm = OpenAI(temperature=0.9, callbacks=[comet_callback], verbose=True)\n", "\n", "llm_result = llm.generate([\"Tell me a joke\", \"Tell me a poem\", \"Tell me a fact\"] * 3)\n", "print(\"LLM result\", llm_result)\n", - "comet_callback.flush_tracker(llm, finish=True)" + "comet_callback.flush()" ] }, { @@ -151,17 +141,12 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.callbacks import CometCallbackHandler\n", + "from langchain.callbacks.tracers.comet import CometTracer\n", "from langchain.chains import LLMChain\n", "from langchain.llms import OpenAI\n", "from langchain.prompts import PromptTemplate\n", "\n", - "comet_callback = CometCallbackHandler(\n", - " complexity_metrics=True,\n", - " project_name=\"comet-example-langchain\",\n", - " stream_logs=True,\n", - " tags=[\"synopsis-chain\"],\n", - ")\n", + "comet_callback = CometTracer()\n", "callbacks = [comet_callback]\n", "\n", "llm = OpenAI(temperature=0.9, callbacks=callbacks, verbose=True)\n", @@ -176,7 +161,7 @@ "\n", "test_prompts = [{\"title\": \"Documentary about Bigfoot in Paris\"}]\n", "print(synopsis_chain.apply(test_prompts))\n", - "comet_callback.flush_tracker(synopsis_chain, finish=True)" + "comet_callback.flush()" ] }, { @@ -193,15 +178,10 @@ "outputs": [], "source": [ "from langchain.agents import initialize_agent, load_tools\n", - "from langchain.callbacks import CometCallbackHandler\n", + "from langchain.callbacks.tracers.comet import CometTracer\n", "from langchain.llms import OpenAI\n", "\n", - "comet_callback = CometCallbackHandler(\n", - " project_name=\"comet-example-langchain\",\n", - " complexity_metrics=True,\n", - " stream_logs=True,\n", - " tags=[\"agent\"],\n", - ")\n", + "comet_callback = CometTracer()\n", "callbacks = [comet_callback]\n", "\n", "llm = OpenAI(temperature=0.9, callbacks=callbacks, verbose=True)\n", @@ -215,118 +195,10 @@ " verbose=True,\n", ")\n", "agent.run(\n", - " \"Who is Leo DiCaprio's girlfriend? What is her current age raised to the 0.43 power?\"\n", - ")\n", - "comet_callback.flush_tracker(agent, finish=True)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Scenario 4: Using Custom Evaluation Metrics" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The `CometCallbackManager` also allows you to define and use Custom Evaluation Metrics to assess generated outputs from your model. Let's take a look at how this works. \n", - "\n", - "\n", - "In the snippet below, we will use the [ROUGE](https://huggingface.co/spaces/evaluate-metric/rouge) metric to evaluate the quality of a generated summary of an input prompt. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%pip install rouge-score" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from rouge_score import rouge_scorer\n", - "\n", - "from langchain.callbacks import CometCallbackHandler\n", - "from langchain.chains import LLMChain\n", - "from langchain.llms import OpenAI\n", - "from langchain.prompts import PromptTemplate\n", - "\n", - "\n", - "class Rouge:\n", - " def __init__(self, reference):\n", - " self.reference = reference\n", - " self.scorer = rouge_scorer.RougeScorer([\"rougeLsum\"], use_stemmer=True)\n", - "\n", - " def compute_metric(self, generation, prompt_idx, gen_idx):\n", - " prediction = generation.text\n", - " results = self.scorer.score(target=self.reference, prediction=prediction)\n", - "\n", - " return {\n", - " \"rougeLsum_score\": results[\"rougeLsum\"].fmeasure,\n", - " \"reference\": self.reference,\n", - " }\n", - "\n", - "\n", - "reference = \"\"\"\n", - "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building.\n", - "It was the first structure to reach a height of 300 metres.\n", - "\n", - "It is now taller than the Chrysler Building in New York City by 5.2 metres (17 ft)\n", - "Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France .\n", - "\"\"\"\n", - "rouge_score = Rouge(reference=reference)\n", - "\n", - "template = \"\"\"Given the following article, it is your job to write a summary.\n", - "Article:\n", - "{article}\n", - "Summary: This is the summary for the above article:\"\"\"\n", - "prompt_template = PromptTemplate(input_variables=[\"article\"], template=template)\n", - "\n", - "comet_callback = CometCallbackHandler(\n", - " project_name=\"comet-example-langchain\",\n", - " complexity_metrics=False,\n", - " stream_logs=True,\n", - " tags=[\"custom_metrics\"],\n", - " custom_metrics=rouge_score.compute_metric,\n", - ")\n", - "callbacks = [comet_callback]\n", - "\n", - "llm = OpenAI(temperature=0.9, callbacks=callbacks, verbose=True)\n", - "\n", - "synopsis_chain = LLMChain(\n", - " llm=llm, prompt=prompt_template, callbacks=callbacks, verbose=True\n", + " \"Who is Leo DiCaprio's girlfriend? What is her current age raised to the 0.43 power?\",\n", + " callbacks=callbacks,\n", ")\n", - "\n", - "test_prompts = [\n", - " {\n", - " \"article\": \"\"\"\n", - " The tower is 324 metres (1,063 ft) tall, about the same height as\n", - " an 81-storey building, and the tallest structure in Paris. Its base is square,\n", - " measuring 125 metres (410 ft) on each side.\n", - " During its construction, the Eiffel Tower surpassed the\n", - " Washington Monument to become the tallest man-made structure in the world,\n", - " a title it held for 41 years until the Chrysler Building\n", - " in New York City was finished in 1930.\n", - "\n", - " It was the first structure to reach a height of 300 metres.\n", - " Due to the addition of a broadcasting aerial at the top of the tower in 1957,\n", - " it is now taller than the Chrysler Building by 5.2 metres (17 ft).\n", - "\n", - " Excluding transmitters, the Eiffel Tower is the second tallest\n", - " free-standing structure in France after the Millau Viaduct.\n", - " \"\"\"\n", - " }\n", - "]\n", - "print(synopsis_chain.apply(test_prompts))\n", - "comet_callback.flush_tracker(synopsis_chain, finish=True)" + "comet_callback.flush()" ] } ], @@ -346,9 +218,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.3" + "version": "3.10.12" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 }