Skip to content

Commit

Permalink
Optimize chat caching by repositioning definitions in prompt structure.
Browse files Browse the repository at this point in the history
  • Loading branch information
pelikhan committed Oct 1, 2024
1 parent 820ca82 commit 42d841b
Showing 1 changed file with 26 additions and 0 deletions.
26 changes: 26 additions & 0 deletions packages/core/src/promptdom.ts
Original file line number Diff line number Diff line change
Expand Up @@ -510,6 +510,29 @@ export interface PromptNodeRender {
fileOutputs: FileOutput[] // File outputs
}

/**
* To optimize chat caching with openai, move defs to the back of the prompt
* @see https://platform.openai.com/docs/guides/prompt-caching
* @param mode
* @param root
*/
async function layoutPromptNode(mode: string, root: PromptNode) {
let changed = false
await visitNode(root, {
node: (n) => {
// sort children
const before = n.children?.map((c) => c.preview)?.join("\n")
n.children?.sort(
(a, b) =>
(a.type === "def" ? 1 : -1) - (b.type === "def" ? 1 : -1)
)
const after = n.children?.map((c) => c.preview)?.join("\n")
changed = changed || before !== after
},
})
return changed
}

// Function to resolve a prompt node.
async function resolvePromptNode(
model: string,
Expand Down Expand Up @@ -874,6 +897,9 @@ export async function renderPromptNode(
await resolvePromptNode(model, node)
await tracePromptNode(trace, node)

if (await layoutPromptNode(model, node))
await tracePromptNode(trace, node, { label: "layout" })

if (flexTokens)
await flexPromptNode(node, {
...options,
Expand Down

0 comments on commit 42d841b

Please sign in to comment.