From 7312c65456bb6a3e2b5a7ecc1a72af8e8f7a69d9 Mon Sep 17 00:00:00 2001 From: efugier Date: Fri, 10 Nov 2023 12:12:28 +0100 Subject: [PATCH] feat(flow): the --command option now appends to the --prompt --- README.md | 28 ++++++++++++------- src/cutsom_prompt.rs | 65 ++++++++++++++++++++++++++------------------ src/main.rs | 18 ++++++------ 3 files changed, 64 insertions(+), 47 deletions(-) diff --git a/README.md b/README.md index 0c8982f..6227cff 100644 --- a/README.md +++ b/README.md @@ -5,18 +5,26 @@ WIP cli interface to language models to bring them in the Unix ecosystem Usage: pipelm [OPTIONS] [PROMPT] Arguments: - [PROMPT] prompt in the config to fetch + [PROMPT] which prompt in the config to fetch. + The config must have at least one named "default" containing which model and api to hit by default [default: default] Options: - -c, --command custom prompt, incompatible with [PROMTP] - -b, --before prefix to add before custom prompt - -a, --after suffix to add after the imput and the custom prompt - -s, --system-message a system "config" message to send before the prompt - --api which api to hit [default: openai] - -m, --model which model (of the api) to use [default: gpt-3.5-turbo] - -f, --file file to read input from - -h, --help Print help - -V, --version Print version + -c, --command + custom prompt to append before the input + -a, --after-input + suffix to add after the input and the custom prompt + -s, --system-message + a system "config" message to send before the first user message + --api + which api to hit + -m, --model + which model (of the api) to use + -f, --file + file to read input from + -h, --help + Print help + -V, --version + Print version ``` ## A few examples to get started diff --git a/src/cutsom_prompt.rs b/src/cutsom_prompt.rs index fc31f0c..726ac77 100644 --- a/src/cutsom_prompt.rs +++ b/src/cutsom_prompt.rs @@ -4,12 +4,22 @@ use crate::config::{Message, Prompt, PLACEHOLDER_TOKEN}; pub fn customize_prompt( mut prompt: Prompt, + api: &Option, + model: &Option, command: &Option, - after: &Option, + after_input: &Option, system_message: &Option, ) -> Prompt { - debug!("test"); - let empty_prompt = prompt.messages.is_empty(); + debug!("pre-customization promot {:?}", prompt); + // Override parameters + if let Some(api) = api { + prompt.api = api.to_owned(); + } + if let Some(model) = model { + prompt.model = model.to_owned(); + } + + let first_user_message_index = prompt.messages.iter().position(|m| m.role == "system"); // if there's a system message to add, add it before the first user message if let Some(message_content) = system_message { @@ -17,8 +27,6 @@ pub fn customize_prompt( role: "system".to_string(), content: message_content.to_owned(), }; - - let first_user_message_index = prompt.messages.iter().position(|m| m.role == "system"); if let Some(index) = first_user_message_index { prompt.messages.insert(index, system_message); } else { @@ -26,7 +34,7 @@ pub fn customize_prompt( } } - // add stuff if there's some custom things to do + // if prompt customization was provided, add it in a new message let mut prompt_message = String::new(); if let Some(command_text) = command { prompt_message.push_str(command_text); @@ -34,34 +42,37 @@ pub fn customize_prompt( prompt_message.push_str(PLACEHOLDER_TOKEN); } } - if let Some(after_input) = after { - prompt_message.push_str(after_input); - } - - let last_message_contains_input = prompt - .messages - .last() - .is_some_and(|m| m.content.contains(PLACEHOLDER_TOKEN)); - if !prompt_message.is_empty() { prompt.messages.push(Message { role: "user".to_string(), content: prompt_message, }); - } else if last_message_contains_input { - // no command and an empty prompt -> use input as prompt - prompt.messages.push(Message { - role: "user".to_string(), - content: PLACEHOLDER_TOKEN.to_string(), - }); } - if empty_prompt { - // no command and an empty prompt -> use input as prompt - prompt.messages.push(Message { - role: "user".to_string(), - content: PLACEHOLDER_TOKEN.to_string(), - }); + // get the last message for check and make sure it's a user one + let mut last_message = + if prompt.messages.is_empty() | prompt.messages.last().is_some_and(|m| m.role != "user") { + Message { + role: "user".to_string(), + content: PLACEHOLDER_TOKEN.to_string(), + } + } else { + prompt.messages.pop().unwrap() + }; + + // verify that the last message contrains a placeholder + if !last_message.content.contains(PLACEHOLDER_TOKEN) { + last_message.content.push_str(PLACEHOLDER_TOKEN); + } + + // add the after input text + if let Some(after_input_text) = after_input { + let last_message = prompt.messages.last_mut().unwrap(); + last_message.content.push_str(after_input_text); } + + prompt.messages.push(last_message); + + debug!("pre-customization promot {:?}", prompt); prompt } diff --git a/src/main.rs b/src/main.rs index b1a42ce..faaa1d2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -19,7 +19,8 @@ mod config; long_about = None )] struct Cli { - /// prompt in the config to fetch + /// which prompt in the config to fetch. The config must have at least one named "default" + /// containing which model and api to hit by default. #[arg(default_value_t = String::from("default"))] prompt: String, #[command(flatten)] @@ -41,12 +42,12 @@ struct Cli { #[derive(Debug, Args)] #[group(id = "custom_prompt")] struct CustomPrompt { - /// custom prompt, incompatible with [PROMTP] + /// custom prompt to append before the input #[arg(short, long)] command: Option, /// suffix to add after the input and the custom prompt #[arg(short, long)] - after: Option, + after_input: Option, } fn main() { @@ -85,16 +86,13 @@ fn main() { &args.prompt, &available_prompts ); let prompt = prompts.remove(&args.prompt).expect(&prompt_not_found_error); - // None => config::Prompt { - // api: args.api, - // model: args.model, - // messages: Vec::new(), - // }, - // }; + let prompt = cutsom_prompt::customize_prompt( prompt, + &args.api, + &args.model, &args.custom_prompt_args.command, - &args.custom_prompt_args.after, + &args.custom_prompt_args.after_input, &args.system_message, );