Skip to content

Commit

Permalink
Prepare break changes on dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
samchon committed Nov 27, 2024
1 parent 136a51b commit 771c357
Show file tree
Hide file tree
Showing 19 changed files with 314 additions and 870 deletions.
8 changes: 3 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,9 @@ const application: IHttpOpenAiApplication = HttpLlm.application(document);

Kind | `@wrtnio/schema` | `@samchon/openapi`
-------------------------------------------------------------------------------------------|------------------------------|---------------------------
[**HTTP LLM Application**](https://wrtn.ai/studio-pro/tech-specs/meta/schema/#application) | **`IHttpOpenAiApplication`** | **`IHttpLlmApplication`**
[HTTP LLM Function Schema](https://wrtn.ai/studio-pro/tech-specs/meta/schema/#function) | `IHttpOpenAiFunction` | `IHttpLlmFunction`
[LLM Type Schema](https://wrtn.ai/studio-pro/tech-specs/meta/schema/#schema) | `IOpenAiSchema` | `ILlmSchema`
[LLM Function Schema](https://typia.io/docs/llm/application/) | `IOpenAiFunction` | `ILlmFunction`
[LLM Application](https://typia.io/docs/llm/application/) | `IOpenAiApplication` | `ILlmApplication`
[**HTTP LLM Application**](https://wrtn.ai/studio-pro/tech-specs/meta/schema/#application) | **`IHttpOpenAiApplication`** | **`IHttpLlmApplication<"3.0">`**
[HTTP LLM Function Schema](https://wrtn.ai/studio-pro/tech-specs/meta/schema/#function) | `IHttpOpenAiFunction` | `IHttpLlmFunction<3.0">`
[LLM Type Schema](https://wrtn.ai/studio-pro/tech-specs/meta/schema/#schema) | `IOpenAiSchema` | `ILlmSchema<"3.0">`
[**OpenAPI Document**](https://wrtn.ai/studio-pro/tech-specs/openapi/document/#document) | **`ISwagger`** | **`OpenApi.IDocument`**
[Server URL Address](https://wrtn.ai/studio-pro/tech-specs/openapi/document/#server) | `ISwaggerServer` | `OpenApi.IServer`
[API Operation](https://wrtn.ai/studio-pro/tech-specs/openapi/document/#operation) | `ISwaggerOperation` | `OpenApi.IOperation`
Expand Down
15 changes: 5 additions & 10 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
{
"name": "@wrtnio/schema",
"version": "1.3.0",
"version": "2.0.0-dev.20241127",
"description": "JSON and LLM function calling schemas extended for Wrtn Studio Pro",
"main": "lib/index.js",
"module": "./lib/index.mjs",
"typings": "./lib/index.d.ts",
"scripts": {
"prepare": "ts-patch install",
"build": "npm run build:main && npm run build:test",
"build": "npm run build:main",
"build:main": "rimraf lib && tsc && rollup -c",
"build:test": "rimraf bin && tsc -p test/tsconfig.json",
"dev": "npm run build:test -- --watch",
"test": "node bin/test"
"dev": "rimraf lib && tsc --watch"
},
"repository": {
"type": "git",
Expand All @@ -31,10 +29,7 @@
},
"homepage": "https://github.com/wrtnio/schema#readme",
"dependencies": {
"@samchon/openapi": "^1.2.2"
},
"peerDependencies": {
"@samchon/openapi": ">=1.2.2"
"@samchon/openapi": "^2.0.0-dev.20241127-2"
},
"devDependencies": {
"@rollup/plugin-terser": "^0.4.4",
Expand All @@ -46,7 +41,7 @@
"ts-patch": "^3.2.1",
"typescript": "5.5.4",
"typescript-transform-paths": "^3.5.1",
"typia": "^6.10.2"
"typia": "^7.0.0-dev.20241127-2"
},
"files": [
"lib",
Expand Down
138 changes: 138 additions & 0 deletions src/HttpOpenAi.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
import {
HttpLlm,
IHttpConnection,
IHttpLlmApplication,
IHttpLlmFunction,
} from "@samchon/openapi";

import { IHttpOpenAiApplication } from "./IHttpOpenAiApplication";
import { IHttpOpenAiFunction } from "./IHttpOpenAiFunction";
import { ISwagger } from "./ISwagger";

export namespace HttpOpenAi {
/* -----------------------------------------------------------
APPLICATION
----------------------------------------------------------- */
export const application = (props: {
document: ISwagger;
options?: Partial<IHttpOpenAiApplication.IOptions>;
}): IHttpOpenAiApplication => {
const app: IHttpLlmApplication<"3.0"> = HttpLlm.application({
model: "3.0",
document: props.document,
options: props.options,
});
return {
...app,
functions: app.functions.map(functional),
};
};

const functional = (
keyword: IHttpLlmFunction<"3.0">,
): IHttpOpenAiFunction => {
const properties = new Map(
Object.keys(keyword.parameters.properties).map((name, i) => [name, i]),
);
return {
...keyword,
keyword: keyword.parameters,
parameters: Object.values(keyword.parameters.properties),
separated: keyword.separated
? {
llm: Object.entries(keyword.separated.llm?.properties ?? {}).map(
([key, value]) => ({
schema: value,
index: properties.get(key) ?? 0,
}),
),
human: Object.entries(
keyword.separated.human?.properties ?? {},
).map(([key, value]) => ({
schema: value,
index: properties.get(key) ?? 0,
})),
keyword: keyword.separated,
}
: undefined,
};
};

/* -----------------------------------------------------------
FETCHERS
----------------------------------------------------------- */
export interface IFetchProps {
connection: IHttpConnection;
application: IHttpOpenAiApplication;
function: IHttpOpenAiFunction;
arguments: unknown[];
}

export const execute = async (props: IFetchProps): Promise<unknown> =>
HttpLlm.execute(getProps(props));

export const propagate = async (props: IFetchProps): Promise<unknown> =>
HttpLlm.propagate(getProps(props));

const getProps = (props: IFetchProps): HttpLlm.IFetchProps<"3.0"> => {
const keys: string[] = Object.keys(props.function.keyword.properties);
const input: Record<string, unknown> = Object.fromEntries(
props.arguments.map((arg, i) => [keys[i], arg]),
);
return {
connection: props.connection,
application: {
...props.application,
functions: [],
},
function: {
...props.function,
parameters: props.function.keyword,
separated: props.function.separated?.keyword,
},
input,
};
};

/* -----------------------------------------------------------
MERGERS
----------------------------------------------------------- */
export interface IMergeProps {
function: IHttpOpenAiFunction;
llm: unknown[];
human: unknown[];
}

export const mergeParameters = (props: IMergeProps): unknown[] => {
const separated: IHttpOpenAiFunction.ISeparated | undefined =
props.function.separated;
if (separated === undefined)
throw new Error(
"Error on OpenAiDataComposer.parameters(): the function parameters are not separated.",
);
return new Array(props.function.parameters.length).fill(0).map((_, i) => {
const llm: number = separated.llm.findIndex((p) => p.index === i);
const human: number = separated.human.findIndex((p) => p.index === i);
if (llm === -1 && human === -1)
throw new Error(
"Error on OpenAiDataComposer.parameters(): failed to gather separated arguments, because both LLM and human sides are all empty.",
);
return mergeValue(props.llm[llm], props.human[human]);
});
};

export const mergeValue = (x: unknown, y: unknown): unknown =>
typeof x === "object" && typeof y === "object" && x !== null && y !== null
? combineObject(x, y)
: Array.isArray(x) && Array.isArray(y)
? new Array(Math.max(x.length, y.length))
.fill(0)
.map((_, i) => mergeValue(x[i], y[i]))
: (y ?? x);

const combineObject = (x: any, y: any): any => {
const output: any = { ...x };
for (const [k, v] of Object.entries(y)) output[k] = mergeValue(x[k], v);
return output;
};
}
38 changes: 21 additions & 17 deletions src/IHttpOpenAiApplication.ts
Original file line number Diff line number Diff line change
@@ -1,29 +1,28 @@
import { IHttpLlmApplication } from "@samchon/openapi";

import { IOpenAiSchema } from "./IOpenAiSchema";
import { ISwaggerMigrateRoute } from "./ISwaggerMigrateRoute";
import { IHttpOpenAiFunction } from "./IHttpOpenAiFunction";
import { ISwaggerOperation } from "./ISwaggerOperation";

/**
* Application of OpenAI (LLM) function call from OpenAPI document.
*
* `IHttpOpenAiApplication` is a data structure representing a collection of
* {@link IOpenAiFunction LLM function calling schemas} composed from the
* {@link IHttpOpenAiFunction LLM function calling schemas} composed from the
* {@link ISwagger OpenAPI document} and its {@link ISwaggerOperation operation}
* metadata. It also contains {@link IHttpOpenAiApplication.errors failed operations}, and
* adjusted {@link IHttpOpenAiApplication.options options} during the
* `IHttpOpenAiApplication` construction.
*
* About the {@link ISwaggerOperation API operations}, they are converted to
* {@link IOpenAiFunction} type which represents LLM function calling schema.
* {@link IHttpOpenAiFunction} type which represents LLM function calling schema.
* By the way, if tehre're some recursive types which can't escape the
* {@link ISwaggerSchema.IReference} type, the operation would be failed and
* pushed into the {@link IHttpOpenAiApplication.errors}. Otherwise not, the operation
* would be successfully converted to {@link IOpenAiFunction} and its type schemas
* would be successfully converted to {@link IHttpOpenAiFunction} and its type schemas
* are downgraded to {@link OpenApiV3.IJsonSchema} and converted to {@link ILlmSchema}.
*
* About the options, if you've configured {@link IHttpOpenAiApplication.options.keyword}
* (as `true`), number of {@link IOpenAiFunction.parameters} are always 1 and the first
* (as `true`), number of {@link IHttpOpenAiFunction.parameters} are always 1 and the first
* parameter type is always {@link ILlmSchema.IObject}. Otherwise, the parameters would
* be multiple, and the sequence of the parameters are following below rules.
*
Expand Down Expand Up @@ -52,7 +51,7 @@ import { ISwaggerOperation } from "./ISwaggerOperation";
* like secrety key (password) are the examples. In that case, you can separate the
* function parameters to both LLM and Human sides by configuring the
* {@link IHttpOpenAiApplication.IOptions.separate} property. The separated parameters are
* assigned to the {@link IOpenAiFunction.separated} property.
* assigned to the {@link IHttpOpenAiFunction.separated} property.
*
* For reference, the actual function call execution is not by LLM, but by you.
* When the LLM selects the proper function and fills the arguments, you just call
Expand All @@ -66,24 +65,29 @@ import { ISwaggerOperation } from "./ISwaggerOperation";
* before the actual LLM function call execution.
*
* @reference https://platform.openai.com/docs/guides/function-calling
* @deprecated OpenAI's JSON schema specification has been changed
* @author Samchon
*/
export type IHttpOpenAiApplication = IHttpLlmApplication<
IOpenAiSchema,
ISwaggerOperation,
ISwaggerMigrateRoute
>;
export interface IHttpOpenAiApplication
extends Omit<IHttpLlmApplication<"3.0">, "functions"> {
/**
* List of function metadata.
*
* List of function metadata that can be used for the LLM function call.
*
* When you want to execute the function with LLM constructed arguments,
* you can do it through {@link HttpOpenAi.execute} function.
*/
functions: IHttpOpenAiFunction[];
}
export namespace IHttpOpenAiApplication {
/**
* Error occurred in the composition.
*/
export type IError = IHttpLlmApplication.IError<
ISwaggerOperation,
ISwaggerMigrateRoute
>;
export type IError = IHttpLlmApplication.IError;

/**
* Options for composing the LLM application.
*/
export type IOptions = IHttpLlmApplication.IOptions<IOpenAiSchema>;
export type IOptions = IHttpLlmApplication.IOptions<"3.0">;
}
90 changes: 81 additions & 9 deletions src/IHttpOpenAiFunction.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { IHttpLlmFunction } from "@samchon/openapi";

import { IOpenAiSchema } from "./IOpenAiSchema";
import { ISwaggerMigrateRoute } from "./ISwaggerMigrateRoute";
import { ISwaggerOperation } from "./ISwaggerOperation";

/**
Expand Down Expand Up @@ -54,22 +53,95 @@ import { ISwaggerOperation } from "./ISwaggerOperation";
* ```
*
* @reference https://platform.openai.com/docs/guides/function-calling
* @deprecated OpenAI's JSON schema specification has been changed
* @author Samchon
*/
export type IHttpOpenAiFunction = IHttpLlmFunction<
IOpenAiSchema,
ISwaggerOperation,
ISwaggerMigrateRoute
>;
export interface IHttpOpenAiFunction
extends Omit<IHttpLlmFunction<"3.0">, "parameters" | "separated"> {
/**
* List of parameter types.
*
* If you've configured {@link IHttpLlmApplication.IOptions.keyword} as `true`,
* number of {@link IHttpLlmFunction.parameters} are always 1 and the first
* parameter's type is always {@link ILlmSchema.IObject}. The
* properties' rule is:
*
* - `pathParameters`: Path parameters of {@link IHttpMigrateRoute.parameters}
* - `query`: Query parameter of {@link IHttpMigrateRoute.query}
* - `body`: Body parameter of {@link IHttpMigrateRoute.body}
*
* ```typescript
* {
* ...pathParameters,
* query,
* body,
* }
* ```
*
* Otherwise, the parameters would be multiple, and the sequence of the
* parameters are following below rules:
*
* ```typescript
* [
* ...pathParameters,
* ...(query ? [query] : []),
* ...(body ? [body] : []),
* ]
* ```
*/
parameters: IOpenAiSchema[];

/**
* The keyworded parameters.
*/
keyword: IOpenAiSchema.IParameters;

/**
* Collection of separated parameters.
*
* Filled only when {@link IHttpLlmApplication.IOptions.separate} is configured.
*/
separated?: IHttpOpenAiFunction.ISeparated;
}
export namespace IHttpOpenAiFunction {
export interface IOptions extends IOpenAiSchema.IConfig {
separate: null | ((schema: IOpenAiSchema) => boolean);
}

/**
* Collection of separated parameters.
*/
export type ISeparated = IHttpLlmFunction.ISeparated<IOpenAiSchema>;
export interface ISeparated {
/**
* Parameters that would be composed by the LLM.
*/
llm: ISeparatedParameter[];

/**
* Parameters that would be composed by the human.
*/
human: ISeparatedParameter[];

/**
* The keyworded parameters' separation.
*/
keyword: IHttpLlmFunction.ISeparated<IOpenAiSchema.IParameters>;
}

/**
* Separated parameter.
*/
export type ISeparatedParameter =
IHttpLlmFunction.ISeparatedParameter<IOpenAiSchema>;
export interface ISeparatedParameter {
/**
* Index of the parameter.
*
* @type uint
*/
index: number;

/**
* Type schema info of the parameter.
*/
schema: IOpenAiSchema;
}
}
Loading

0 comments on commit 771c357

Please sign in to comment.