Skip to content

Commit

Permalink
feat(flow): the --command option now appends to the --prompt
Browse files Browse the repository at this point in the history
  • Loading branch information
efugier committed Nov 10, 2023
1 parent 9954692 commit 7312c65
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 47 deletions.
28 changes: 18 additions & 10 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,26 @@ WIP cli interface to language models to bring them in the Unix ecosystem
Usage: pipelm [OPTIONS] [PROMPT]
Arguments:
[PROMPT] prompt in the config to fetch
[PROMPT] which prompt in the config to fetch.
The config must have at least one named "default" containing which model and api to hit by default [default: default]
Options:
-c, --command <COMMAND> custom prompt, incompatible with [PROMTP]
-b, --before <BEFORE> prefix to add before custom prompt
-a, --after <AFTER> suffix to add after the imput and the custom prompt
-s, --system-message <SYSTEM_MESSAGE> a system "config" message to send before the prompt
--api <API> which api to hit [default: openai]
-m, --model <MODEL> which model (of the api) to use [default: gpt-3.5-turbo]
-f, --file <FILE> file to read input from
-h, --help Print help
-V, --version Print version
-c, --command <COMMAND>
custom prompt to append before the input
-a, --after-input <AFTER_INPUT>
suffix to add after the input and the custom prompt
-s, --system-message <SYSTEM_MESSAGE>
a system "config" message to send before the first user message
--api <API>
which api to hit
-m, --model <MODEL>
which model (of the api) to use
-f, --file <FILE>
file to read input from
-h, --help
Print help
-V, --version
Print version
```

## A few examples to get started
Expand Down
65 changes: 38 additions & 27 deletions src/cutsom_prompt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,64 +4,75 @@ use crate::config::{Message, Prompt, PLACEHOLDER_TOKEN};

pub fn customize_prompt(
mut prompt: Prompt,
api: &Option<String>,
model: &Option<String>,
command: &Option<String>,
after: &Option<String>,
after_input: &Option<String>,
system_message: &Option<String>,
) -> Prompt {
debug!("test");
let empty_prompt = prompt.messages.is_empty();
debug!("pre-customization promot {:?}", prompt);
// Override parameters
if let Some(api) = api {
prompt.api = api.to_owned();
}
if let Some(model) = model {
prompt.model = model.to_owned();
}

let first_user_message_index = prompt.messages.iter().position(|m| m.role == "system");

// if there's a system message to add, add it before the first user message
if let Some(message_content) = system_message {
let system_message = Message {
role: "system".to_string(),
content: message_content.to_owned(),
};

let first_user_message_index = prompt.messages.iter().position(|m| m.role == "system");
if let Some(index) = first_user_message_index {
prompt.messages.insert(index, system_message);
} else {
prompt.messages.push(system_message);
}
}

// add stuff if there's some custom things to do
// if prompt customization was provided, add it in a new message
let mut prompt_message = String::new();
if let Some(command_text) = command {
prompt_message.push_str(command_text);
if !prompt_message.contains(PLACEHOLDER_TOKEN) {
prompt_message.push_str(PLACEHOLDER_TOKEN);
}
}
if let Some(after_input) = after {
prompt_message.push_str(after_input);
}

let last_message_contains_input = prompt
.messages
.last()
.is_some_and(|m| m.content.contains(PLACEHOLDER_TOKEN));

if !prompt_message.is_empty() {
prompt.messages.push(Message {
role: "user".to_string(),
content: prompt_message,
});
} else if last_message_contains_input {
// no command and an empty prompt -> use input as prompt
prompt.messages.push(Message {
role: "user".to_string(),
content: PLACEHOLDER_TOKEN.to_string(),
});
}

if empty_prompt {
// no command and an empty prompt -> use input as prompt
prompt.messages.push(Message {
role: "user".to_string(),
content: PLACEHOLDER_TOKEN.to_string(),
});
// get the last message for check and make sure it's a user one
let mut last_message =
if prompt.messages.is_empty() | prompt.messages.last().is_some_and(|m| m.role != "user") {
Message {
role: "user".to_string(),
content: PLACEHOLDER_TOKEN.to_string(),
}
} else {
prompt.messages.pop().unwrap()
};

// verify that the last message contrains a placeholder
if !last_message.content.contains(PLACEHOLDER_TOKEN) {
last_message.content.push_str(PLACEHOLDER_TOKEN);
}

// add the after input text
if let Some(after_input_text) = after_input {
let last_message = prompt.messages.last_mut().unwrap();
last_message.content.push_str(after_input_text);
}

prompt.messages.push(last_message);

debug!("pre-customization promot {:?}", prompt);
prompt
}
18 changes: 8 additions & 10 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@ mod config;
long_about = None
)]
struct Cli {
/// prompt in the config to fetch
/// which prompt in the config to fetch. The config must have at least one named "default"
/// containing which model and api to hit by default.
#[arg(default_value_t = String::from("default"))]
prompt: String,
#[command(flatten)]
Expand All @@ -41,12 +42,12 @@ struct Cli {
#[derive(Debug, Args)]
#[group(id = "custom_prompt")]
struct CustomPrompt {
/// custom prompt, incompatible with [PROMTP]
/// custom prompt to append before the input
#[arg(short, long)]
command: Option<String>,
/// suffix to add after the input and the custom prompt
#[arg(short, long)]
after: Option<String>,
after_input: Option<String>,
}

fn main() {
Expand Down Expand Up @@ -85,16 +86,13 @@ fn main() {
&args.prompt, &available_prompts
);
let prompt = prompts.remove(&args.prompt).expect(&prompt_not_found_error);
// None => config::Prompt {
// api: args.api,
// model: args.model,
// messages: Vec::new(),
// },
// };

let prompt = cutsom_prompt::customize_prompt(
prompt,
&args.api,
&args.model,
&args.custom_prompt_args.command,
&args.custom_prompt_args.after,
&args.custom_prompt_args.after_input,
&args.system_message,
);

Expand Down

0 comments on commit 7312c65

Please sign in to comment.