From c89af882c7d33fbc1796b4c77b54b3d844ad1549 Mon Sep 17 00:00:00 2001 From: Yangqing Jia Date: Thu, 14 Sep 2023 08:58:23 -0700 Subject: [PATCH] Update README.md for open-flamingo --- advanced/flamingo/README.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/advanced/flamingo/README.md b/advanced/flamingo/README.md index 0d11a72..6c2c77b 100644 --- a/advanced/flamingo/README.md +++ b/advanced/flamingo/README.md @@ -42,9 +42,13 @@ lep deployment update -n flamingo --public Once the inference service is up (either locally or in the cloud), you can use the client to access it in a programmatical way: ```python -from leptonai.client import Client +from leptonai.client import Client, local, current -client = Client(...) +# Use this if you are running locally +client = Client(local()) +# Or, if you are logged in to your workspace via `lep login` already +# and have launched it: +# client = Client(current(), "flamingo") inputs = { "demo_images": [