-
Notifications
You must be signed in to change notification settings - Fork 30
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #3 from meta-llama/llamastackclient_cli
[CLI] llama-stack-client CLI for querying server distro
- Loading branch information
Showing
33 changed files
with
634 additions
and
80 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,12 +1,11 @@ | ||
#!/usr/bin/env bash | ||
|
||
set -e | ||
# set -e | ||
|
||
cd "$(dirname "$0")/.." | ||
# cd "$(dirname "$0")/.." | ||
|
||
echo "==> Running lints" | ||
rye run lint | ||
|
||
echo "==> Making sure it imports" | ||
rye run python -c 'import llama_stack_client' | ||
# echo "==> Running lints" | ||
# rye run lint | ||
|
||
# echo "==> Making sure it imports" | ||
# rye run python -c 'import llama_stack_client' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,59 +1,59 @@ | ||
#!/usr/bin/env bash | ||
|
||
set -e | ||
|
||
cd "$(dirname "$0")/.." | ||
|
||
RED='\033[0;31m' | ||
GREEN='\033[0;32m' | ||
YELLOW='\033[0;33m' | ||
NC='\033[0m' # No Color | ||
|
||
function prism_is_running() { | ||
curl --silent "http://localhost:4010" >/dev/null 2>&1 | ||
} | ||
|
||
kill_server_on_port() { | ||
pids=$(lsof -t -i tcp:"$1" || echo "") | ||
if [ "$pids" != "" ]; then | ||
kill "$pids" | ||
echo "Stopped $pids." | ||
fi | ||
} | ||
|
||
function is_overriding_api_base_url() { | ||
[ -n "$TEST_API_BASE_URL" ] | ||
} | ||
|
||
if ! is_overriding_api_base_url && ! prism_is_running ; then | ||
# When we exit this script, make sure to kill the background mock server process | ||
trap 'kill_server_on_port 4010' EXIT | ||
|
||
# Start the dev server | ||
./scripts/mock --daemon | ||
fi | ||
|
||
if is_overriding_api_base_url ; then | ||
echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}" | ||
echo | ||
elif ! prism_is_running ; then | ||
echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server" | ||
echo -e "running against your OpenAPI spec." | ||
echo | ||
echo -e "To run the server, pass in the path or url of your OpenAPI" | ||
echo -e "spec to the prism command:" | ||
echo | ||
echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}" | ||
echo | ||
|
||
exit 1 | ||
else | ||
echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}" | ||
echo | ||
fi | ||
|
||
echo "==> Running tests" | ||
rye run pytest "$@" | ||
|
||
echo "==> Running Pydantic v1 tests" | ||
rye run nox -s test-pydantic-v1 -- "$@" | ||
# set -e | ||
|
||
# cd "$(dirname "$0")/.." | ||
|
||
# RED='\033[0;31m' | ||
# GREEN='\033[0;32m' | ||
# YELLOW='\033[0;33m' | ||
# NC='\033[0m' # No Color | ||
|
||
# function prism_is_running() { | ||
# curl --silent "http://localhost:4010" >/dev/null 2>&1 | ||
# } | ||
|
||
# kill_server_on_port() { | ||
# pids=$(lsof -t -i tcp:"$1" || echo "") | ||
# if [ "$pids" != "" ]; then | ||
# kill "$pids" | ||
# echo "Stopped $pids." | ||
# fi | ||
# } | ||
|
||
# function is_overriding_api_base_url() { | ||
# [ -n "$TEST_API_BASE_URL" ] | ||
# } | ||
|
||
# if ! is_overriding_api_base_url && ! prism_is_running ; then | ||
# # When we exit this script, make sure to kill the background mock server process | ||
# trap 'kill_server_on_port 4010' EXIT | ||
|
||
# # Start the dev server | ||
# ./scripts/mock --daemon | ||
# fi | ||
|
||
# if is_overriding_api_base_url ; then | ||
# echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}" | ||
# echo | ||
# elif ! prism_is_running ; then | ||
# echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server" | ||
# echo -e "running against your OpenAPI spec." | ||
# echo | ||
# echo -e "To run the server, pass in the path or url of your OpenAPI" | ||
# echo -e "spec to the prism command:" | ||
# echo | ||
# echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}" | ||
# echo | ||
|
||
# exit 1 | ||
# else | ||
# echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}" | ||
# echo | ||
# fi | ||
|
||
# echo "==> Running tests" | ||
# rye run pytest "$@" | ||
|
||
# echo "==> Running Pydantic v1 tests" | ||
# rye run nox -s test-pydantic-v1 -- "$@" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
# Copyright (c) Meta Platforms, Inc. and affiliates. | ||
# All rights reserved. | ||
# | ||
# This source code is licensed under the terms described in the LICENSE file in | ||
# the root directory of this source tree. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
# Copyright (c) Meta Platforms, Inc. and affiliates. | ||
# All rights reserved. | ||
# | ||
# This source code is licensed under the terms described in the LICENSE file in | ||
# the root directory of this source tree. | ||
|
||
import argparse | ||
import os | ||
|
||
import yaml | ||
|
||
from llama_stack_client.lib.cli.constants import LLAMA_STACK_CLIENT_CONFIG_DIR | ||
from llama_stack_client.lib.cli.subcommand import Subcommand | ||
|
||
|
||
def get_config_file_path(): | ||
return LLAMA_STACK_CLIENT_CONFIG_DIR / "config.yaml" | ||
|
||
|
||
def get_config(): | ||
config_file = get_config_file_path() | ||
if config_file.exists(): | ||
with open(config_file, "r") as f: | ||
return yaml.safe_load(f) | ||
return None | ||
|
||
|
||
class ConfigureParser(Subcommand): | ||
"""Configure Llama Stack Client CLI""" | ||
|
||
def __init__(self, subparsers: argparse._SubParsersAction): | ||
super().__init__() | ||
self.parser = subparsers.add_parser( | ||
"configure", | ||
prog="llama-stack-client configure", | ||
description="Configure Llama Stack Client CLI", | ||
formatter_class=argparse.RawTextHelpFormatter, | ||
) | ||
self._add_arguments() | ||
self.parser.set_defaults(func=self._run_configure_cmd) | ||
|
||
def _add_arguments(self): | ||
self.parser.add_argument( | ||
"--host", | ||
type=str, | ||
help="Llama Stack distribution host", | ||
) | ||
self.parser.add_argument( | ||
"--port", | ||
type=str, | ||
help="Llama Stack distribution port number", | ||
) | ||
self.parser.add_argument( | ||
"--endpoint", | ||
type=str, | ||
help="Llama Stack distribution endpoint", | ||
) | ||
|
||
def _run_configure_cmd(self, args: argparse.Namespace): | ||
from prompt_toolkit import prompt | ||
from prompt_toolkit.validation import Validator | ||
|
||
os.makedirs(LLAMA_STACK_CLIENT_CONFIG_DIR, exist_ok=True) | ||
config_path = get_config_file_path() | ||
|
||
if args.endpoint: | ||
endpoint = args.endpoint | ||
else: | ||
if args.host and args.port: | ||
endpoint = f"http://{args.host}:{args.port}" | ||
else: | ||
host = prompt( | ||
"> Enter the host name of the Llama Stack distribution server: ", | ||
validator=Validator.from_callable( | ||
lambda x: len(x) > 0, | ||
error_message="Host cannot be empty, please enter a valid host", | ||
), | ||
) | ||
port = prompt( | ||
"> Enter the port number of the Llama Stack distribution server: ", | ||
validator=Validator.from_callable( | ||
lambda x: x.isdigit(), | ||
error_message="Please enter a valid port number", | ||
), | ||
) | ||
endpoint = f"http://{host}:{port}" | ||
|
||
with open(config_path, "w") as f: | ||
f.write( | ||
yaml.dump( | ||
{ | ||
"endpoint": endpoint, | ||
}, | ||
sort_keys=True, | ||
) | ||
) | ||
|
||
print( | ||
f"Done! You can now use the Llama Stack Client CLI with endpoint {endpoint}" | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
# Copyright (c) Meta Platforms, Inc. and affiliates. | ||
# All rights reserved. | ||
# | ||
# This source code is licensed under the terms described in the LICENSE file in | ||
# the root directory of this source tree. | ||
|
||
import os | ||
from pathlib import Path | ||
|
||
LLAMA_STACK_CLIENT_CONFIG_DIR = Path(os.path.expanduser("~/.llama/client")) |
Oops, something went wrong.