diff --git a/ask-llm.js b/ask-llm.js index ad99b14..966d1fe 100755 --- a/ask-llm.js +++ b/ask-llm.js @@ -9,6 +9,29 @@ const LLM_STREAMING = process.env.LLM_STREAMING !== 'no'; const LLM_DEBUG = process.env.LLM_DEBUG; +/** + * Represents a chat message. + * + * @typedef {Object} Message + * @property {'system'|'user'|'assistant'} role + * @property {string} content + */ + +/** + * A callback function to stream then completion. + * + * @callback CompletionHandler + * @param {string} text + * @returns {void} + */ + +/** + * Generates a chat completion using a RESTful LLM API service. + * + * @param {Array} messages - List of chat messages. + * @param {CompletionHandler=} handler - An optional callback to stream the completion. + * @returns {Promise} The completion generated by the LLM. + */ const chat = async (messages, handler) => { const url = `${LLM_API_BASE_URL}/chat/completions`; const auth = LLM_API_KEY ? { 'Authorization': `Bearer ${LLM_API_KEY}` } : {};