Skip to content

Commit

Permalink
Adjusted prompt for snippet analysis and API return
Browse files Browse the repository at this point in the history
Signed-off-by: Jiri Podivin <[email protected]>
  • Loading branch information
jpodivin committed Dec 3, 2024
1 parent 66b80f1 commit a44f259
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 10 deletions.
23 changes: 20 additions & 3 deletions logdetective/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@
"""

SNIPPET_PROMPT_TEMPLATE = """
Analyse following RPM build log snippet.
Analysis of the snippets must be in a format of [X] : [Y], where [X] is a log snippet, and [Y] is the explanation.
Snippets themselves must not be altered in any way whatsoever.
Analyse following RPM build log snippet. Decribe contents accurately, without speculation or suggestions for resolution.
Snippet:
Expand All @@ -43,3 +41,22 @@
Analysis:
"""

PROMPT_TEMPLATE_STAGED = """
Given following log snippets, their explanation, and nothing else, explain what failure, if any, occured during build of this package.
Snippets are in a format of [X] : [Y], where [X] is a log snippet, and [Y] is the explanation.
Snippets are delimited with '================'.
Drawing on information from all snippets, provide complete explanation of the issue and recommend solution.
Snippets:
{}
Analysis:
"""

SNIPPET_DELIMITER = '================'
24 changes: 17 additions & 7 deletions logdetective/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,17 @@
import json
import logging
import os
from typing import List, Annotated
from typing import List, Annotated, Dict

from llama_cpp import CreateCompletionResponse
from fastapi import FastAPI, HTTPException, Depends, Header
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
import requests

from logdetective.constants import PROMPT_TEMPLATE, SNIPPET_PROMPT_TEMPLATE
from logdetective.constants import (
PROMPT_TEMPLATE, SNIPPET_PROMPT_TEMPLATE,
PROMPT_TEMPLATE_STAGED, SNIPPET_DELIMITER)
from logdetective.extractors import DrainExtractor
from logdetective.utils import validate_url, compute_certainty

Expand Down Expand Up @@ -38,10 +40,10 @@ class StagedResponse(Response):
explanation: CreateCompletionResponse
https://llama-cpp-python.readthedocs.io/en/latest/api-reference/#llama_cpp.llama_types.CreateCompletionResponse
response_certainty: float
snippets: list of CreateCompletionResponse
snippets:
list of dictionaries { 'snippet' : '<original_text>, 'comment': CreateCompletionResponse }
"""
snippets: List[CreateCompletionResponse]

snippets: List[Dict[str, str | CreateCompletionResponse]]

LOG = logging.getLogger("logdetective")

Expand Down Expand Up @@ -208,10 +210,18 @@ async def analyze_log_staged(build_log: BuildLog):
analyzed_snippets = await asyncio.gather(
*[submit_text(SNIPPET_PROMPT_TEMPLATE.format(s)) for s in log_summary])

final_analysis = await submit_text(
PROMPT_TEMPLATE.format([e["choices"][0]["text"] for e in analyzed_snippets]))
analyzed_snippets = [
{"snippet":e[0], "comment":e[1]} for e in zip(log_summary, analyzed_snippets)]

final_prompt = PROMPT_TEMPLATE_STAGED.format(
[f"[{e["snippet"]}] : [{e["comment"]["choices"][0]["text"]}]\n{SNIPPET_DELIMITER}\n"
for e in analyzed_snippets])

print("PROMPT TEST: \n", final_prompt, "+++++++++++")
final_analysis = await submit_text(final_prompt)

certainty = 0

if "logprobs" in final_analysis["choices"][0]:
try:
certainty = compute_certainty(
Expand Down

0 comments on commit a44f259

Please sign in to comment.