Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switch to use importlib instead of deprecated pkg_resources #678

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 14 additions & 13 deletions llama_stack/cli/model/prompt_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def _add_arguments(self):
)

def _run_model_template_cmd(self, args: argparse.Namespace) -> None:
import pkg_resources
import importlib.resources

# Only Llama 3.1 and 3.2 are supported
supported_model_ids = [
Expand All @@ -64,25 +64,26 @@ def _run_model_template_cmd(self, args: argparse.Namespace) -> None:
f"{model_id} is not a valid Model. Choose one from --\n {model_str}"
)

llama_3_1_file = pkg_resources.resource_filename(
"llama_models", "llama3_1/prompt_format.md"
llama_3_1_file = (
importlib.resources.files("llama_models") / "llama3_1/prompt_format.md"
)
llama_3_2_text_file = pkg_resources.resource_filename(
"llama_models", "llama3_2/text_prompt_format.md"
llama_3_2_text_file = (
importlib.resources.files("llama_models") / "llama3_2/text_prompt_format.md"
)
llama_3_2_vision_file = pkg_resources.resource_filename(
"llama_models", "llama3_2/vision_prompt_format.md"
llama_3_2_vision_file = (
importlib.resources.files("llama_models")
/ "llama3_2/vision_prompt_format.md"
)
if model_family(model_id) == ModelFamily.llama3_1:
with open(llama_3_1_file, "r") as f:
content = f.read()
with importlib.resources.as_file(llama_3_1_file) as f:
content = f.open("r").read()
elif model_family(model_id) == ModelFamily.llama3_2:
if is_multimodal(model_id):
with open(llama_3_2_vision_file, "r") as f:
content = f.read()
with importlib.resources.as_file(llama_3_2_vision_file) as f:
content = f.open("r").read()
else:
with open(llama_3_2_text_file, "r") as f:
content = f.read()
with importlib.resources.as_file(llama_3_2_text_file) as f:
content = f.open("r").read()

render_markdown_to_pager(content)

Expand Down
16 changes: 8 additions & 8 deletions llama_stack/cli/stack/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import argparse

import importlib.resources

import os
import shutil
from functools import lru_cache
from pathlib import Path
from typing import List, Optional

import pkg_resources

from llama_stack.cli.subcommand import Subcommand

from llama_stack.distribution.datatypes import (
Expand Down Expand Up @@ -290,13 +291,12 @@ def _run_stack_build_command_from_build_config(

if template_name:
# copy run.yaml from template to build_dir instead of generating it again
template_path = pkg_resources.resource_filename(
"llama_stack", f"templates/{template_name}/run.yaml"
template_path = (
importlib.resources.files("llama_stack")
/ f"templates/{template_name}/run.yaml"
)
os.makedirs(build_dir, exist_ok=True)
run_config_file = build_dir / f"{build_config.name}-run.yaml"
shutil.copy(template_path, run_config_file)

with importlib.resources.as_file(template_path) as path:
shutil.copy(path, run_config_file)
# Find all ${env.VARIABLE} patterns
cprint("Build Successful!", color="green")
else:
Expand Down
15 changes: 8 additions & 7 deletions llama_stack/cli/stack/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ def _add_arguments(self):
)

def _run_stack_run_cmd(self, args: argparse.Namespace) -> None:
import pkg_resources
import importlib.resources

import yaml

from llama_stack.distribution.build import ImageType
Expand Down Expand Up @@ -106,15 +107,15 @@ def _run_stack_run_cmd(self, args: argparse.Namespace) -> None:
config = parse_and_maybe_upgrade_config(config_dict)

if config.docker_image:
script = pkg_resources.resource_filename(
"llama_stack",
"distribution/start_container.sh",
script = (
importlib.resources.files("llama_stack")
/ "distribution/start_container.sh"
)
run_args = [script, config.docker_image]
else:
script = pkg_resources.resource_filename(
"llama_stack",
"distribution/start_conda_env.sh",
script = (
importlib.resources.files("llama_stack")
/ "distribution/start_conda_env.sh"
)
run_args = [
script,
Expand Down
14 changes: 6 additions & 8 deletions llama_stack/distribution/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

import importlib.resources
import logging
from enum import Enum

from pathlib import Path
from typing import Dict, List

import pkg_resources
from pydantic import BaseModel
from termcolor import cprint

Expand Down Expand Up @@ -111,8 +111,8 @@ def build_image(build_config: BuildConfig, build_file_path: Path):
normal_deps += SERVER_DEPENDENCIES

if build_config.image_type == ImageType.docker.value:
script = pkg_resources.resource_filename(
"llama_stack", "distribution/build_container.sh"
script = (
importlib.resources.files("llama_stack") / "distribution/build_container.sh"
)
args = [
script,
Expand All @@ -123,8 +123,8 @@ def build_image(build_config: BuildConfig, build_file_path: Path):
" ".join(normal_deps),
]
elif build_config.image_type == ImageType.conda.value:
script = pkg_resources.resource_filename(
"llama_stack", "distribution/build_conda_env.sh"
script = (
importlib.resources.files("llama_stack") / "distribution/build_conda_env.sh"
)
args = [
script,
Expand All @@ -133,9 +133,7 @@ def build_image(build_config: BuildConfig, build_file_path: Path):
" ".join(normal_deps),
]
elif build_config.image_type == ImageType.venv.value:
script = pkg_resources.resource_filename(
"llama_stack", "distribution/build_venv.sh"
)
script = importlib.resources.files("llama_stack") / "distribution/build_venv.sh"
args = [
script,
build_config.name,
Expand Down
16 changes: 7 additions & 9 deletions llama_stack/distribution/stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,12 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

import importlib.resources
import logging
import os
import re
from pathlib import Path
from typing import Any, Dict, Optional

import pkg_resources
import yaml

from termcolor import colored
Expand Down Expand Up @@ -190,14 +189,13 @@ async def construct_stack(


def get_stack_run_config_from_template(template: str) -> StackRunConfig:
template_path = pkg_resources.resource_filename(
"llama_stack", f"templates/{template}/run.yaml"
template_path = (
importlib.resources.files("llama_stack") / f"templates/{template}/run.yaml"
)

if not Path(template_path).exists():
raise ValueError(f"Template '{template}' not found at {template_path}")

with open(template_path) as f:
run_config = yaml.safe_load(f)
with importlib.resources.as_file(template_path) as path:
if not path.exists():
raise ValueError(f"Template '{template}' not found at {template_path}")
run_config = yaml.safe_load(path.open())

return StackRunConfig(**replace_env_vars(run_config))
Loading