Skip to content

Commit

Permalink
feat: anthropic invoke with function call
Browse files Browse the repository at this point in the history
  • Loading branch information
monuelo committed Sep 19, 2024
1 parent f4b313b commit cadea62
Show file tree
Hide file tree
Showing 7 changed files with 202 additions and 117 deletions.
22 changes: 11 additions & 11 deletions anthropic/actions/code.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { shortcircuit } from "@deco/deco";
import { AppContext } from "../mod.ts";
import { Anthropic } from "../deps.ts";
import { shortcircuit } from "@deco/deco";
export interface Props {
/**
* @description The system prompt to be used for the AI Assistant.
Expand All @@ -13,14 +13,7 @@ export interface Props {
/**
* @description The model that will complete your prompt.
*/
model?:
| "claude-3-5-sonnet-20240620"
| "claude-3-opus-20240229"
| "claude-3-sonnet-20240229"
| "claude-3-haiku-20240307"
| "claude-2.1"
| "claude-2.0"
| "claude-instant-1.2";
model?: Anthropic.Model;
/**
* @description The maximum number of tokens to generate.
*
Expand All @@ -29,20 +22,27 @@ export interface Props {
*/
max_tokens?: number;
}

export default async function chat(
{ system, messages, model = "claude-3-opus-20240229", max_tokens = 4096 }:
Props,
{
system,
messages,
model = "claude-3-opus-20240229",
max_tokens = 4096,
}: Props,
_req: Request,
ctx: AppContext,
) {
if (!messages) {
return shortcircuit(new Response("No messages provided", { status: 400 }));
}

const msg = await ctx.anthropic.messages.create({
system,
model,
max_tokens,
messages,
});

return msg;
}
69 changes: 69 additions & 0 deletions anthropic/actions/invoke.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import { shortcircuit } from "@deco/deco";
import { AppContext } from "../mod.ts";
import { Anthropic } from "../deps.ts";
import { getAppTools } from "../utils.ts";

export interface Props {
/**
* @description The system prompt to be used for the AI Assistant.
*/
system?: string;
/**
* @description The messages to be processed by the AI Assistant.
*/
messages: Anthropic.MessageParam[];
/**
* @description The model that will complete your prompt.
*/
model?: Anthropic.Model;
/**
* @description The maximum number of tokens to generate.
*/
max_tokens?: number;
/**
* @description Optional list of available functions (actions or loaders) that the AI Assistant can perform.
*/
availableFunctions?: string[];
/**
* @description The tool choice to be used for the AI Assistant.
*/
tool_choice?: Anthropic.MessageCreateParams["tool_choice"];
}

export default async function invoke(
{
system,
messages,
model = "claude-3-5-sonnet-20240620",
max_tokens = 4096,
availableFunctions = [],
tool_choice = { type: "auto" },
}: Props,
_req: Request,
ctx: AppContext,
) {
if (!messages) {
return shortcircuit(new Response("No messages provided", { status: 400 }));
}

const tools = await getAppTools(availableFunctions ?? []);

const params: Anthropic.MessageCreateParams = {
system,
model,
max_tokens,
messages,
tools,
tool_choice,
};

try {
const msg = await ctx.anthropic.messages.create(params);
return msg;
} catch (error) {
console.error("Error calling Anthropic API:", error);
return shortcircuit(
new Response("Error processing request", { status: 500 }),
);
}
}
120 changes: 19 additions & 101 deletions anthropic/actions/stream.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,9 @@
import { dereferenceJsonSchema } from "../../ai-assistants/schema.ts";
import { shortcircuit } from "@deco/deco";
import { readFromStream } from "@deco/deco/utils";
import { Anthropic } from "../deps.ts";
import { AppContext } from "../mod.ts";
import {
Context,
type JSONSchema7,
lazySchemaFor,
shortcircuit,
} from "@deco/deco";
import { readFromStream } from "@deco/deco/utils";
import { getAppTools } from "../utils.ts";

export interface Props {
/**
* @description The system prompt to be used for the AI Assistant.
Expand All @@ -20,99 +16,24 @@ export interface Props {
/**
* @description The messages to be processed by the AI Assistant.
*/
messages: Anthropic.Beta.Tools.ToolsBetaMessageParam[];
messages: Anthropic.MessageParam[];
/**
* Optional list of available functions (actions or loaders) that the AI Assistant can perform.
*/
availableFunctions?: string[];
/**
* @description The model that will complete your prompt.
*/
model?:
| "claude-3-5-sonnet-20240620"
| "claude-3-opus-20240229"
| "claude-3-sonnet-20240229"
| "claude-3-haiku-20240307"
| "claude-2.1"
| "claude-2.0"
| "claude-instant-1.2";
model?: Anthropic.Model;
/**
* @description The maximum number of tokens to generate.
*
* Different models have different maximum values for this parameter. See
* [models](https://docs.anthropic.com/en/docs/models-overview) for details.
*/
max_tokens?: number;
enableTools?: boolean;
temperature?: number;
}
const notUndefined = <T>(v: T | undefined): v is T => v !== undefined;
const pathFormatter = {
encode: (path: string): string =>
path.replace(/\.ts/g, "").replace(/\//g, "__"),
decode: (encodedPath: string): string =>
encodedPath.replace(/__/g, "/") + ".ts",
};
/**
* Retrieves the available tools for the AI Assistant.
* @param availableFunctions List of functions available for the AI Assistant.
* @returns Promise resolving to a list of tools.
*/
const getAppTools = async (
availableFunctions: string[],
): Promise<Anthropic.Beta.Tools.Tool[] | undefined> => {
const ctx = Context.active();
const runtime = await ctx.runtime!;
const schemas = await lazySchemaFor(ctx).value;
const functionKeys = availableFunctions ??
Object.keys({
...runtime.manifest.loaders,
...runtime.manifest.actions,
});
const tools = functionKeys
.map((functionKey) => {
const functionDefinition = btoa(functionKey);
const schema = schemas.definitions[functionDefinition];
if (
(schema as {
ignoreAI?: boolean;
})?.ignoreAI
) {
return undefined;
}
const propsRef = (schema?.allOf?.[0] as JSONSchema7)?.$ref;
if (!propsRef) {
return undefined;
}
const dereferenced = dereferenceJsonSchema({
$ref: propsRef,
...schemas,
});
if (
dereferenced.type !== "object" ||
dereferenced.oneOf ||
dereferenced.anyOf ||
dereferenced.allOf ||
dereferenced.enum ||
dereferenced.not
) {
return undefined;
}
return {
name: pathFormatter.encode(functionKey),
description:
`Usage for: ${schema?.description}. Example: ${schema?.examples}`,
input_schema: {
...dereferenced,
definitions: undefined,
root: undefined,
title: undefined,
},
};
})
.filter(notUndefined);
return tools as Anthropic.Beta.Tools.Tool[] | undefined;
};

/**
* @title Anthropic chat streaming
* @description Sends messages to the Anthropic API for processing.
Expand All @@ -124,48 +45,45 @@ export default async function chat(
availableFunctions,
model = "claude-3-5-sonnet-20240620",
max_tokens = 1024,
temperature = 1.0,
enableTools,
}: Props,
_req: Request,
ctx: AppContext,
) {
if (!messages) {
return shortcircuit(new Response("No messages provided", { status: 400 }));
}

const tools = await getAppTools(availableFunctions ?? []);

const headers = {
"anthropic-version": "2023-06-01",
"content-type": "application/json",
"anthropic-beta": "max-tokens-3-5-sonnet-2024-07-15",
"x-api-key": ctx.token ?? "",
};
let payload: Anthropic.Beta.Tools.MessageCreateParamsStreaming = {

const payload: Anthropic.MessageCreateParamsStreaming = {
system,
messages,
model,
max_tokens,
temperature,
temperature: 0.5,
stream: true,
tools,
tool_choice: { type: "auto" },
};
if (enableTools) {
payload = {
...payload,
tools,
tool_choice: { type: "auto" },
};
}

const response = await fetch("https://api.anthropic.com/v1/messages", {
method: "POST",
headers,
body: JSON.stringify(payload),
});

if (!response.ok) {
console.error("Failed to send messages to Anthropic API:", response.text);
return shortcircuit(
new Response(await response.text(), { status: response.status }),
);
}
return readFromStream(response, {
shouldDecodeChunk: false,
});

return readFromStream(response);
}
2 changes: 1 addition & 1 deletion anthropic/deps.ts
Original file line number Diff line number Diff line change
@@ -1 +1 @@
export { default as Anthropic } from "https://esm.sh/@anthropic-ai/sdk@0.21.1";
export { default as Anthropic } from "https://esm.sh/@anthropic-ai/sdk@0.27.3";
6 changes: 4 additions & 2 deletions anthropic/manifest.gen.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@
// This file is automatically updated during development when running `dev.ts`.

import * as $$$$$$$$$0 from "./actions/code.ts";
import * as $$$$$$$$$1 from "./actions/stream.ts";
import * as $$$$$$$$$1 from "./actions/invoke.ts";
import * as $$$$$$$$$2 from "./actions/stream.ts";

const manifest = {
"actions": {
"anthropic/actions/code.ts": $$$$$$$$$0,
"anthropic/actions/stream.ts": $$$$$$$$$1,
"anthropic/actions/invoke.ts": $$$$$$$$$1,
"anthropic/actions/stream.ts": $$$$$$$$$2,
},
"name": "anthropic",
"baseUrl": import.meta.url,
Expand Down
Loading

0 comments on commit cadea62

Please sign in to comment.