Skip to content

Commit

Permalink
Add furb ruff rule (#1014)
Browse files Browse the repository at this point in the history
  • Loading branch information
collindutter authored Jul 24, 2024
1 parent bd486f8 commit dc3d135
Show file tree
Hide file tree
Showing 28 changed files with 59 additions and 57 deletions.
2 changes: 1 addition & 1 deletion griptape/artifacts/boolean_artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ class BooleanArtifact(BaseArtifact):
value: bool = field(converter=bool, metadata={"serializable": True})

@classmethod
def parse_bool(cls, value: Union[str, bool]) -> BooleanArtifact:
def parse_bool(cls, value: Union[str, bool]) -> BooleanArtifact: # noqa: FBT001
"""Convert a string literal or bool to a BooleanArtifact. The string must be either "true" or "false" with any casing."""
if value is not None:
if isinstance(value, str):
Expand Down
6 changes: 2 additions & 4 deletions griptape/drivers/file_manager/local_file_manager_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,16 +31,14 @@ def try_load_file(self, path: str) -> bytes:
full_path = self._full_path(path)
if self._is_dir(full_path):
raise IsADirectoryError
with open(full_path, "rb") as file:
return file.read()
return Path(full_path).read_bytes()

def try_save_file(self, path: str, value: bytes) -> None:
full_path = self._full_path(path)
if self._is_dir(full_path):
raise IsADirectoryError
os.makedirs(os.path.dirname(full_path), exist_ok=True)
with open(full_path, "wb") as file:
file.write(value)
Path(full_path).write_bytes(value)

def _full_path(self, path: str) -> str:
path = path.lstrip("/")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import os
from pathlib import Path
from typing import Optional

from attrs import define, field
Expand All @@ -14,15 +15,13 @@ class LocalConversationMemoryDriver(BaseConversationMemoryDriver):
file_path: str = field(default="griptape_memory.json", kw_only=True, metadata={"serializable": True})

def store(self, memory: BaseConversationMemory) -> None:
with open(self.file_path, "w") as file:
file.write(memory.to_json())
Path(self.file_path).write_text(memory.to_json())

def load(self) -> Optional[BaseConversationMemory]:
if not os.path.exists(self.file_path):
return None
with open(self.file_path) as file:
memory = BaseConversationMemory.from_json(file.read())
memory = BaseConversationMemory.from_json(Path(self.file_path).read_text())

memory.driver = self
memory.driver = self

return memory
return memory
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def upsert_vector(
If a vector with the given vector ID already exists, it is updated; otherwise, a new vector is inserted.
Metadata associated with the vector can also be provided.
"""
vector_id = vector_id if vector_id else str_to_hash(str(vector))
vector_id = vector_id or str_to_hash(str(vector))
doc = {"vector": vector, "namespace": namespace, "metadata": meta}
doc.update(kwargs)
if self.service == "aoss":
Expand Down
4 changes: 2 additions & 2 deletions griptape/drivers/vector/azure_mongodb_vector_store_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ def query(
# Using the embedding driver to convert the query string into a vector
vector = self.embedding_driver.embed_string(query)

count = count if count else BaseVectorStoreDriver.DEFAULT_QUERY_COUNT
offset = offset if offset else 0
count = count or BaseVectorStoreDriver.DEFAULT_QUERY_COUNT
offset = offset or 0

pipeline = []

Expand Down
4 changes: 2 additions & 2 deletions griptape/drivers/vector/base_vector_store_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def upsert_text_artifact(
else:
meta["artifact"] = artifact.to_json()

vector = artifact.embedding if artifact.embedding else artifact.generate_embedding(self.embedding_driver)
vector = artifact.embedding or artifact.generate_embedding(self.embedding_driver)

if isinstance(vector, list):
return self.upsert_vector(vector, vector_id=vector_id, namespace=namespace, meta=meta, **kwargs)
Expand All @@ -112,7 +112,7 @@ def upsert_text(
self.embedding_driver.embed_string(string),
vector_id=vector_id,
namespace=namespace,
meta=meta if meta else {},
meta=meta or {},
**kwargs,
)

Expand Down
5 changes: 3 additions & 2 deletions griptape/drivers/vector/local_vector_store_driver.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import json
import operator
import os
import threading
from dataclasses import asdict
Expand Down Expand Up @@ -58,7 +59,7 @@ def upsert_vector(
meta: Optional[dict] = None,
**kwargs,
) -> str:
vector_id = vector_id if vector_id else utils.str_to_hash(str(vector))
vector_id = vector_id or utils.str_to_hash(str(vector))

with self.thread_lock:
self.entries[self._namespaced_vector_id(vector_id, namespace=namespace)] = self.Entry(
Expand Down Expand Up @@ -101,7 +102,7 @@ def query(
entries_and_relatednesses = [
(entry, self.relatedness_fn(query_embedding, entry.vector)) for entry in entries.values()
]
entries_and_relatednesses.sort(key=lambda x: x[1], reverse=True)
entries_and_relatednesses.sort(key=operator.itemgetter(1), reverse=True)

result = [
BaseVectorStoreDriver.Entry(id=er[0].id, vector=er[0].vector, score=er[1], meta=er[0].meta)
Expand Down
4 changes: 2 additions & 2 deletions griptape/drivers/vector/marqo_vector_store_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def load_entry(self, vector_id: str, *, namespace: Optional[str] = None) -> Opti
if result and "_tensor_facets" in result and len(result["_tensor_facets"]) > 0:
return BaseVectorStoreDriver.Entry(
id=result["_id"],
meta={k: v for k, v in result.items() if k not in ["_id"]},
meta={k: v for k, v in result.items() if k != "_id"},
vector=result["_tensor_facets"][0]["_embedding"],
)
else:
Expand Down Expand Up @@ -190,7 +190,7 @@ def query(
The list of query results.
"""
params = {
"limit": count if count else BaseVectorStoreDriver.DEFAULT_QUERY_COUNT,
"limit": count or BaseVectorStoreDriver.DEFAULT_QUERY_COUNT,
"attributes_to_retrieve": ["*"] if include_metadata else ["_id"],
"filter_string": f"namespace:{namespace}" if namespace else None,
} | kwargs
Expand Down
4 changes: 2 additions & 2 deletions griptape/drivers/vector/mongodb_atlas_vector_store_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@ def query(
# Using the embedding driver to convert the query string into a vector
vector = self.embedding_driver.embed_string(query)

count = count if count else BaseVectorStoreDriver.DEFAULT_QUERY_COUNT
offset = offset if offset else 0
count = count or BaseVectorStoreDriver.DEFAULT_QUERY_COUNT
offset = offset or 0

pipeline = [
{
Expand Down
4 changes: 2 additions & 2 deletions griptape/drivers/vector/opensearch_vector_store_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def upsert_vector(
If a vector with the given vector ID already exists, it is updated; otherwise, a new vector is inserted.
Metadata associated with the vector can also be provided.
"""
vector_id = vector_id if vector_id else utils.str_to_hash(str(vector))
vector_id = vector_id or utils.str_to_hash(str(vector))
doc = {"vector": vector, "namespace": namespace, "metadata": meta}
doc.update(kwargs)
response = self.client.index(index=self.index_name, id=vector_id, body=doc)
Expand Down Expand Up @@ -138,7 +138,7 @@ def query(
Returns:
A list of BaseVectorStoreDriver.Entry objects, each encapsulating the retrieved vector, its similarity score, metadata, and namespace.
"""
count = count if count else BaseVectorStoreDriver.DEFAULT_QUERY_COUNT
count = count or BaseVectorStoreDriver.DEFAULT_QUERY_COUNT
vector = self.embedding_driver.embed_string(query)
# Base k-NN query
query_body = {"size": count, "query": {"knn": {field_name: {"vector": vector, "k": count}}}}
Expand Down
4 changes: 2 additions & 2 deletions griptape/drivers/vector/pinecone_vector_store_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def upsert_vector(
meta: Optional[dict] = None,
**kwargs,
) -> str:
vector_id = vector_id if vector_id else str_to_hash(str(vector))
vector_id = vector_id or str_to_hash(str(vector))

params: dict[str, Any] = {"namespace": namespace} | kwargs

Expand Down Expand Up @@ -95,7 +95,7 @@ def query(
vector = self.embedding_driver.embed_string(query)

params = {
"top_k": count if count else BaseVectorStoreDriver.DEFAULT_QUERY_COUNT,
"top_k": count or BaseVectorStoreDriver.DEFAULT_QUERY_COUNT,
"namespace": namespace,
"include_values": include_vectors,
"include_metadata": include_metadata,
Expand Down
2 changes: 1 addition & 1 deletion griptape/drivers/vector/redis_vector_store_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def upsert_vector(
If a vector with the given vector ID already exists, it is updated; otherwise, a new vector is inserted.
Metadata associated with the vector can also be provided.
"""
vector_id = vector_id if vector_id else str_to_hash(str(vector))
vector_id = vector_id or str_to_hash(str(vector))
key = self._generate_key(vector_id, namespace)
bytes_vector = json.dumps(vector).encode("utf-8")

Expand Down
2 changes: 1 addition & 1 deletion griptape/loaders/email_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def load(self, source: EmailQuery, *args, **kwargs) -> ListArtifact | ErrorArtif

top_n = max(0, messages_count - max_count) if max_count else 0
for i in range(messages_count, top_n, -1):
result, data = client.fetch(str(i), "(RFC822)")
_result, data = client.fetch(str(i), "(RFC822)")

if data is None or not data or data[0] is None:
continue
Expand Down
4 changes: 2 additions & 2 deletions griptape/mixins/media_artifact_file_output_mixin.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import os
from pathlib import Path
from typing import TYPE_CHECKING, Optional

from attrs import Attribute, define, field
Expand Down Expand Up @@ -41,5 +42,4 @@ def _write_to_file(self, artifact: BlobArtifact) -> None:
if os.path.dirname(outfile):
os.makedirs(os.path.dirname(outfile), exist_ok=True)

with open(outfile, "wb") as f:
f.write(artifact.value)
Path(outfile).write_bytes(artifact.value)
4 changes: 2 additions & 2 deletions griptape/tasks/base_image_generation_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import os
from abc import ABC
from pathlib import Path
from typing import TYPE_CHECKING

from attrs import Attribute, define, field
Expand Down Expand Up @@ -60,5 +61,4 @@ def all_negative_rulesets(self) -> list[Ruleset]:

def _read_from_file(self, path: str) -> MediaArtifact:
self.structure.logger.info("Reading image from %s", os.path.abspath(path))
with open(path, "rb") as file:
return ImageLoader().load(file.read())
return ImageLoader().load(Path(path).read_bytes())
4 changes: 2 additions & 2 deletions griptape/tokenizers/openai_tokenizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def _default_max_input_tokens(self) -> int:
tokens = next((v for k, v in self.MODEL_PREFIXES_TO_MAX_INPUT_TOKENS.items() if self.model.startswith(k)), None)
offset = 0 if self.model in self.EMBEDDING_MODELS else self.TOKEN_OFFSET

return (tokens if tokens else self.DEFAULT_MAX_TOKENS) - offset
return (tokens or self.DEFAULT_MAX_TOKENS) - offset

def _default_max_output_tokens(self) -> int:
tokens = next(
Expand All @@ -84,7 +84,7 @@ def count_tokens(self, text: str | list[dict], model: Optional[str] = None) -> i
https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb.
"""
if isinstance(text, list):
model = model if model else self.model
model = model or self.model

try:
encoding = tiktoken.encoding_for_model(model)
Expand Down
4 changes: 2 additions & 2 deletions griptape/tools/audio_transcription_client/tool.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

from pathlib import Path
from typing import TYPE_CHECKING, Any, cast

from attrs import Factory, define, field
Expand Down Expand Up @@ -31,8 +32,7 @@ class AudioTranscriptionClient(BaseTool):
def transcribe_audio_from_disk(self, params: dict) -> TextArtifact | ErrorArtifact:
audio_path = params["values"]["path"]

with open(audio_path, "rb") as f:
audio_artifact = self.audio_loader.load(f.read())
audio_artifact = self.audio_loader.load(Path(audio_path).read_bytes())

return self.engine.run(audio_artifact)

Expand Down
2 changes: 1 addition & 1 deletion griptape/tools/base_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def tool_dir(self) -> str:
return os.path.dirname(os.path.abspath(class_file))

def install_dependencies(self, env: Optional[dict[str, str]] = None) -> None:
env = env if env else {}
env = env or {}

command = [sys.executable, "-m", "pip", "install", "-r", "requirements.txt"]

Expand Down
5 changes: 2 additions & 3 deletions griptape/tools/computer/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,7 @@ def execute_code_in_container(self, filename: str, code: str) -> BaseArtifact:
local_file_path = os.path.join(local_workdir, filename)

try:
with open(local_file_path, "w") as f:
f.write(code)
Path(local_file_path).write_text(code)

return self.execute_command_in_container(f"python {container_file_path}")
except Exception as e:
Expand Down Expand Up @@ -188,7 +187,7 @@ def build_image(self, tool: BaseTool) -> None:

def dependencies(self) -> list[str]:
with open(self.requirements_txt_path) as file:
return [line.strip() for line in file.readlines()]
return [line.strip() for line in file]

def __del__(self) -> None:
if self._tempdir:
Expand Down
4 changes: 2 additions & 2 deletions griptape/tools/image_query_client/tool.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

from pathlib import Path
from typing import TYPE_CHECKING, Any, cast

from attrs import Factory, define, field
Expand Down Expand Up @@ -40,8 +41,7 @@ def query_image_from_disk(self, params: dict) -> TextArtifact | ErrorArtifact:

image_artifacts = []
for image_path in image_paths:
with open(image_path, "rb") as f:
image_artifacts.append(self.image_loader.load(f.read()))
image_artifacts.append(self.image_loader.load(Path(image_path).read_bytes()))

return self.image_query_engine.run(query, image_artifacts)

Expand Down
3 changes: 1 addition & 2 deletions griptape/utils/conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@ def lines(self) -> list[str]:
lines = []

for run in self.memory.runs:
lines.append(f"Q: {run.input}")
lines.append(f"A: {run.output}")
lines.extend((f"Q: {run.input}", f"A: {run.output}"))

return lines

Expand Down
4 changes: 2 additions & 2 deletions griptape/utils/file_utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

from concurrent import futures
from pathlib import Path
from typing import Optional

import griptape.utils as utils
Expand All @@ -15,8 +16,7 @@ def load_file(path: str) -> bytes:
Returns:
The content of the file.
"""
with open(path, "rb") as f:
return f.read()
return Path(path).read_bytes()


def load_files(paths: list[str], futures_executor: Optional[futures.ThreadPoolExecutor] = None) -> dict[str, bytes]:
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,7 @@ select = [
"TCH", # flake8-type-checking
"ERA", # eradicate
"PGH", # pygrep-hooks
"FURB", # refurb
]
ignore = [
"UP007", # non-pep604-annotation
Expand All @@ -278,6 +279,7 @@ ignore = [
"ANN401", # any-type
"PT011", # pytest-raises-too-broad
]
preview = true
[tool.ruff.lint.pydocstyle]
convention = "google"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@ def temp_dir(self):
def write_file(path: str, content: bytes) -> None:
full_path = os.path.join(temp_dir, path)
os.makedirs(os.path.dirname(full_path), exist_ok=True)
with open(full_path, "wb") as f:
f.write(content)
Path(full_path).write_bytes(content)

def mkdir(path: str) -> None:
full_path = os.path.join(temp_dir, path)
Expand All @@ -28,8 +27,7 @@ def copy_test_resources(resource_path: str) -> None:
file_dir = os.path.dirname(__file__)
full_path = os.path.join(file_dir, "../../../resources", resource_path)
full_path = os.path.normpath(full_path)
with open(full_path, "rb") as source:
content = source.read()
content = Path(full_path).read_bytes()
dest_path = os.path.join(temp_dir, "resources", resource_path)
write_file(dest_path, content)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def _mock_fetch_url(self, mocker):
# characters to the body.
mocker.patch(
"trafilatura.fetch_url"
).return_value = f'<!DOCTYPE html><html>{"x"*243}<a href="foobar.com">foobar</a></html>'
).return_value = f'<!DOCTYPE html><html>{"x" * 243}<a href="foobar.com">foobar</a></html>'

@pytest.fixture()
def web_scraper(self):
Expand Down
Loading

0 comments on commit dc3d135

Please sign in to comment.