Skip to content

Commit

Permalink
Merge pull request #119 from samchon/feat/separate
Browse files Browse the repository at this point in the history
Fix `IHttpLlmFunction.seperated` composing bug.
  • Loading branch information
samchon authored Jan 5, 2025
2 parents 3d92b1d + 150ce80 commit ba85b68
Show file tree
Hide file tree
Showing 11 changed files with 195 additions and 2 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@samchon/openapi",
"version": "2.3.1",
"version": "2.3.2",
"description": "OpenAPI definitions and converters for 'typia' and 'nestia'.",
"main": "./lib/index.js",
"module": "./lib/index.mjs",
Expand Down
2 changes: 1 addition & 1 deletion src/composers/HttpLlmApplicationComposer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ export namespace HttpLlmComposer {
additionalProperties: false,
required: properties.map(([k]) => k),
} as any as ILlmSchema.ModelParameters[Model];
if (Object.keys($defs).length)
if (LlmSchemaComposer.isDefs(props.model))
(parameters as any as IChatGptSchema.IParameters).$defs = $defs;
const operation: OpenApi.IOperation = props.route.operation();

Expand Down
19 changes: 19 additions & 0 deletions src/composers/LlmSchemaComposer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,13 @@ export namespace LlmSchemaComposer {
export const separateParameters = <Model extends ILlmSchema.Model>(
model: Model,
) => SEPARATE_PARAMETERS[model];

/**
* @internal
*/
export const isDefs = <Model extends ILlmSchema.Model>(
model: Model,
): boolean => IS_DEFS[model]();
}

const PARAMETERS_CASTERS = {
Expand Down Expand Up @@ -95,3 +102,15 @@ const TYPE_CHECKERS = {
"3.0": LlmTypeCheckerV3,
"3.1": LlmTypeCheckerV3_1,
};

/**
* @internal
*/
const IS_DEFS = {
chatgpt: () => ChatGptSchemaComposer.IS_DEFS,
claude: () => ClaudeSchemaComposer.IS_DEFS,
gemini: () => GeminiSchemaComposer.IS_DEFS,
llama: () => LlamaSchemaComposer.IS_DEFS,
"3.0": () => LlmSchemaV3Composer.IS_DEFS,
"3.1": () => LlmSchemaV3_1Composer.IS_DEFS,
};
5 changes: 5 additions & 0 deletions src/composers/llm/ChatGptSchemaComposer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@ import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker";
import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer";

export namespace ChatGptSchemaComposer {
/**
* @internal
*/
export const IS_DEFS = true;

export const parameters = (props: {
config: IChatGptSchema.IConfig;
components: OpenApi.IComponents;
Expand Down
5 changes: 5 additions & 0 deletions src/composers/llm/ClaudeSchemaComposer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,11 @@ import { IResult } from "../../typings/IResult";
import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer";

export namespace ClaudeSchemaComposer {
/**
* @internal
*/
export const IS_DEFS = true;

export const parameters = (props: {
config: IClaudeSchema.IConfig;
components: OpenApi.IComponents;
Expand Down
5 changes: 5 additions & 0 deletions src/composers/llm/GeminiSchemaComposer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@ import { LlmParametersFinder } from "./LlmParametersComposer";
import { LlmSchemaV3Composer } from "./LlmSchemaV3Composer";

export namespace GeminiSchemaComposer {
/**
* @internal
*/
export const IS_DEFS = false;

export const parameters = (props: {
config: IGeminiSchema.IConfig;
components: OpenApi.IComponents;
Expand Down
5 changes: 5 additions & 0 deletions src/composers/llm/LlamaSchemaComposer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,11 @@ import { IResult } from "../../typings/IResult";
import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer";

export namespace LlamaSchemaComposer {
/**
* @internal
*/
export const IS_DEFS = true;

export const parameters = (props: {
config: ILlamaSchema.IConfig;
components: OpenApi.IComponents;
Expand Down
5 changes: 5 additions & 0 deletions src/composers/llm/LlmSchemaV3Composer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@ import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker";
import { LlmParametersFinder } from "./LlmParametersComposer";

export namespace LlmSchemaV3Composer {
/**
* @internal
*/
export const IS_DEFS = false;

export const parameters = (props: {
config: ILlmSchemaV3.IConfig;
components: OpenApi.IComponents;
Expand Down
5 changes: 5 additions & 0 deletions src/composers/llm/LlmSchemaV3_1Composer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@ import { JsonDescriptionUtil } from "../../utils/internal/JsonDescriptionUtil";
import { LlmParametersFinder } from "./LlmParametersComposer";

export namespace LlmSchemaV3_1Composer {
/**
* @internal
*/
export const IS_DEFS = true;

export const parameters = (props: {
config: ILlmSchemaV3_1.IConfig;
components: OpenApi.IComponents;
Expand Down
72 changes: 72 additions & 0 deletions test/examples/chatgpt-structured-output.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import OpenAI from "openai";
import typia, { IValidation, tags } from "typia";

interface IMember {
email: string & tags.Format<"email">;
name: string;
age: number;
hobbies: string[];
joined_at: string & tags.Format<"date">;
}

const step = async (
failure?: IValidation.IFailure | undefined,
): Promise<IValidation<IMember>> => {
const client: OpenAI = new OpenAI({
apiKey: "<YOUR_OPENAI_API_KEY>",
});
const completion: OpenAI.ChatCompletion =
await client.chat.completions.create({
model: "gpt-4o",
messages: [
{
role: "user",
content: [
"I am a new member of the community.",
"",
"My name is John Doe, and I am 25 years old.",
"I like playing basketball and reading books,",
"and joined to this community at 2022-01-01.",
].join("\n"),
},
...(failure
? [
{
role: "system",
content: [
"You A.I. agent had taken a mistak that",
"returing wrong typed structured data.",
"",
"Here is the detailed list of type errors.",
"Review and correct them at the next step.",
"",
"```json",
JSON.stringify(failure.errors, null, 2),
"```",
].join("\n"),
} satisfies OpenAI.ChatCompletionSystemMessageParam,
]
: []),
],
response_format: {
type: "json_schema",
json_schema: {
name: "member",
schema: typia.llm.parameters<IMember, "chatgpt">() as any,
},
},
});
const member: IMember = JSON.parse(completion.choices[0].message.content!);
return typia.validate(member);
};

const main = async (): Promise<void> => {
let result: IValidation<IMember> | undefined = undefined;
for (let i: number = 0; i < 3; ++i) {
if (result && result.success === true) break;
result = await step(result);
}
console.log(result);
};

main().catch(console.error);
72 changes: 72 additions & 0 deletions test/features/llm/validate_llm_application_separate.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import { TestValidator } from "@nestia/e2e";
import {
HttpLlm,
IHttpLlmApplication,
ILlmSchema,
OpenApi,
OpenApiV3,
OpenApiV3_1,
SwaggerV2,
} from "@samchon/openapi";
import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer";
import { Singleton } from "tstl";
import typia from "typia";

export const test_chatgpt_application_separate = async (): Promise<void> => {
await validate_llm_application_separate("chatgpt", false);
await validate_llm_application_separate("chatgpt", true);
};

export const test_claude_application_separate = async (): Promise<void> => {
await validate_llm_application_separate("claude", false);
await validate_llm_application_separate("claude", true);
};

export const test_gemini_application_separate = async (): Promise<void> => {
await validate_llm_application_separate("gemini", false);
};

export const test_llama_application_separate = async (): Promise<void> => {
await validate_llm_application_separate("llama", false);
await validate_llm_application_separate("llama", true);
};

export const test_llm_v30_application_separate = async (): Promise<void> => {
await validate_llm_application_separate("3.0", false);
await validate_llm_application_separate("3.0", true);
};

export const test_llm_v31_application_separate = async (): Promise<void> => {
await validate_llm_application_separate("3.1", false);
await validate_llm_application_separate("3.1", true);
};

const validate_llm_application_separate = async <
Model extends ILlmSchema.Model,
>(
model: Model,
constraint: boolean,
): Promise<void> => {
const application: IHttpLlmApplication<Model> = HttpLlm.application({
model,
document: await document.get(),
options: {
separate: (schema: any) =>
LlmSchemaComposer.typeChecker(model).isString(schema as any) &&
(schema as any)["x-wrtn-secret-key"] !== undefined,
constraint: constraint as any,
} as any,
});
for (const func of application.functions)
TestValidator.equals("separated")(!!func.separated)(true);
};

const document = new Singleton(async (): Promise<OpenApi.IDocument> => {
const swagger:
| SwaggerV2.IDocument
| OpenApiV3.IDocument
| OpenApiV3_1.IDocument = await fetch(
"https://wrtnio.github.io/connectors/swagger/swagger.json",
).then((r) => r.json());
return OpenApi.convert(typia.assert(swagger));
});

0 comments on commit ba85b68

Please sign in to comment.