Skip to content

Commit

Permalink
support generate config
Browse files Browse the repository at this point in the history
  • Loading branch information
hitsmaxft committed Jan 21, 2024
1 parent d39d5b9 commit 285c1d1
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 10 deletions.
6 changes: 6 additions & 0 deletions gemini-cli-example.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
token=PUT_TOUR_TOKEN_HERE

[generation_config]
top_k=1
top_p=0.9

27 changes: 17 additions & 10 deletions gemini_cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import sys
import toml
import json
from google.generativeai.types.generation_types import GenerateContentResponse
from typing import (Dict,Any)
from google.generativeai.types.generation_types import (GenerateContentResponse)

safety_settings = [
{
Expand All @@ -29,7 +30,7 @@
},
]

def stream_generate_content(prompt, token):
def stream_generate_content(prompt: str, token: str, config: Dict[str, Any]):
# 配置 token
genai.configure(api_key=token)

Expand All @@ -39,41 +40,47 @@ def stream_generate_content(prompt, token):
prompt,
stream = True,
safety_settings = safety_settings,
generation_config = config,
)

for part in response:
print(part.text, end='', flush = True)

def read_token_from_config():
config_path = os.path.expanduser('~/.gemini.toml')
def read_config(custom_path):
config_path = os.path.expanduser(custom_path)
try:
config_data = toml.load(config_path)
return config_data.get('token')
return config_data;
except Exception as e:
print(f"Error reading token from {config_path}: {e}")
return None
return {}


def main():

parser = argparse.ArgumentParser(description="Stream responses from Google Generative AI.")
parser.add_argument('prompt', type=str, help="Prompt to send to the model", nargs='?', default=None)
parser.add_argument('--token', type=str, help="API token for authentication", default=None)
parser.add_argument('-t', '--token', type=str, help="API token for authentication", default=None)
parser.add_argument('-f', '--config-file', type=str, help="Path to the config file", default='~/.gemini-cli.toml')
args = parser.parse_args()



# 读取 prompt,支持从命令行参数或 stdin
if args.prompt is not False:
prompt = args.prompt if args.prompt is not True else sys.stdin.read().strip()
else:
parser.print_help()
return

config = read_config(args.config_file)

# 读取 token,支持从命令行参数或配置文件
token = args.token if args.token is not None else read_token_from_config()
token = args.token if args.token is not None else config.get("token", None)
if token:
stream_generate_content(prompt, token)
stream_generate_content(prompt, token, config.get("generation_config", None))
else:
print("Token not found. Please provide a token via --token argument or ensure your token is correctly set in ~/.gemini.toml.")
print("Token not found. Please provide a token via --token argument or ensure your token is correctly set in ~/.gemini-cli.toml.")

if __name__ == "__main__":
main()

0 comments on commit 285c1d1

Please sign in to comment.