Skip to content

Commit

Permalink
Used default logger
Browse files Browse the repository at this point in the history
  • Loading branch information
Personal authored and Personal committed Sep 28, 2023
1 parent cad13db commit f56c2e8
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 20 deletions.
2 changes: 2 additions & 0 deletions dbt/adapters/athena/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@
from dbt.adapters.sql import SQLConnectionManager
from dbt.contracts.connection import AdapterResponse, Connection, ConnectionState
from dbt.exceptions import ConnectionError, DbtRuntimeError
from dbt.events import AdapterLogger


logger = AdapterLogger("Athena")

Expand Down
38 changes: 18 additions & 20 deletions dbt/adapters/athena/lakeformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,9 @@
from pydantic import BaseModel

from dbt.adapters.athena.relation import AthenaRelation
from dbt.events import AdapterLogger
from dbt.adapters.athena.constants import LOGGER
from dbt.exceptions import DbtRuntimeError

logger = AdapterLogger("AthenaLakeFormation")


class LfTagsConfig(BaseModel):
enabled: bool = False
Expand Down Expand Up @@ -51,7 +49,7 @@ def process_lf_tags(self) -> None:

def _remove_lf_tags_columns(self, existing_lf_tags: GetResourceLFTagsResponseTypeDef) -> None:
lf_tags_columns = existing_lf_tags.get("LFTagsOnColumns", [])
logger.debug(f"COLUMNS: {lf_tags_columns}")
LOGGER.debug(f"COLUMNS: {lf_tags_columns}")
if lf_tags_columns:
to_remove = {}
for column in lf_tags_columns:
Expand All @@ -64,7 +62,7 @@ def _remove_lf_tags_columns(self, existing_lf_tags: GetResourceLFTagsResponseTyp
to_remove[tag_key][tag_value] = [column["Name"]]
else:
to_remove[tag_key][tag_value].append(column["Name"])
logger.debug(f"TO REMOVE: {to_remove}")
LOGGER.debug(f"TO REMOVE: {to_remove}")
for tag_key, tag_config in to_remove.items():
for tag_value, columns in tag_config.items():
resource = {
Expand All @@ -79,15 +77,15 @@ def _apply_lf_tags_table(
self, table_resource: ResourceTypeDef, existing_lf_tags: GetResourceLFTagsResponseTypeDef
) -> None:
lf_tags_table = existing_lf_tags.get("LFTagsOnTable", [])
logger.debug(f"EXISTING TABLE TAGS: {lf_tags_table}")
logger.debug(f"CONFIG TAGS: {self.lf_tags}")
LOGGER.debug(f"EXISTING TABLE TAGS: {lf_tags_table}")
LOGGER.debug(f"CONFIG TAGS: {self.lf_tags}")

to_remove = {
tag["TagKey"]: tag["TagValues"]
for tag in lf_tags_table
if tag["TagKey"] not in self.lf_tags # type: ignore
}
logger.debug(f"TAGS TO REMOVE: {to_remove}")
LOGGER.debug(f"TAGS TO REMOVE: {to_remove}")
if to_remove:
response = self.lf_client.remove_lf_tags_from_resource(
Resource=table_resource, LFTags=[{"TagKey": k, "TagValues": v} for k, v in to_remove.items()]
Expand Down Expand Up @@ -128,9 +126,9 @@ def _parse_and_log_lf_response(
for failure in failures:
tag = failure.get("LFTag", {}).get("TagKey")
error = failure.get("Error", {}).get("ErrorMessage")
logger.error(f"Failed to {verb} {tag} for " + resource_msg + f" - {error}")
LOGGER.error(f"Failed to {verb} {tag} for " + resource_msg + f" - {error}")
raise DbtRuntimeError(base_msg)
logger.debug(f"Success: {verb} LF tags {lf_tags} to " + resource_msg)
LOGGER.debug(f"Success: {verb} LF tags {lf_tags} to " + resource_msg)


class FilterConfig(BaseModel):
Expand Down Expand Up @@ -178,10 +176,10 @@ def get_filters(self) -> Dict[str, DataCellsFilterTypeDef]:

def process_filters(self, config: LfGrantsConfig) -> None:
current_filters = self.get_filters()
logger.debug(f"CURRENT FILTERS: {current_filters}")
LOGGER.debug(f"CURRENT FILTERS: {current_filters}")

to_drop = [f for name, f in current_filters.items() if name not in config.data_cell_filters.filters]
logger.debug(f"FILTERS TO DROP: {to_drop}")
LOGGER.debug(f"FILTERS TO DROP: {to_drop}")
for f in to_drop:
self.lf_client.delete_data_cells_filter(
TableCatalogId=f["TableCatalogId"],
Expand All @@ -195,7 +193,7 @@ def process_filters(self, config: LfGrantsConfig) -> None:
for name, f in config.data_cell_filters.filters.items()
if name not in current_filters
]
logger.debug(f"FILTERS TO ADD: {to_add}")
LOGGER.debug(f"FILTERS TO ADD: {to_add}")
for f in to_add:
self.lf_client.create_data_cells_filter(TableData=f)

Expand All @@ -204,13 +202,13 @@ def process_filters(self, config: LfGrantsConfig) -> None:
for name, f in config.data_cell_filters.filters.items()
if name in current_filters and f.to_update(current_filters[name])
]
logger.debug(f"FILTERS TO UPDATE: {to_update}")
LOGGER.debug(f"FILTERS TO UPDATE: {to_update}")
for f in to_update:
self.lf_client.update_data_cells_filter(TableData=f)

def process_permissions(self, config: LfGrantsConfig) -> None:
for name, f in config.data_cell_filters.filters.items():
logger.debug(f"Start processing permissions for filter: {name}")
LOGGER.debug(f"Start processing permissions for filter: {name}")
current_permissions = self.lf_client.list_permissions(
Resource={
"DataCellsFilter": {
Expand All @@ -231,9 +229,9 @@ def process_permissions(self, config: LfGrantsConfig) -> None:
Entries=[self._permission_entry(name, principal, idx) for idx, principal in enumerate(to_revoke)],
)
revoke_principals_msg = "\n".join(to_revoke)
logger.debug(f"Revoked permissions for filter {name} from principals:\n{revoke_principals_msg}")
LOGGER.debug(f"Revoked permissions for filter {name} from principals:\n{revoke_principals_msg}")
else:
logger.debug(f"No redundant permissions found for filter: {name}")
LOGGER.debug(f"No redundant permissions found for filter: {name}")

to_add = {p for p in f.principals if p not in current_principals}
if to_add:
Expand All @@ -242,11 +240,11 @@ def process_permissions(self, config: LfGrantsConfig) -> None:
Entries=[self._permission_entry(name, principal, idx) for idx, principal in enumerate(to_add)],
)
add_principals_msg = "\n".join(to_add)
logger.debug(f"Granted permissions for filter {name} to principals:\n{add_principals_msg}")
LOGGER.debug(f"Granted permissions for filter {name} to principals:\n{add_principals_msg}")
else:
logger.debug(f"No new permissions added for filter {name}")
LOGGER.debug(f"No new permissions added for filter {name}")

logger.debug(f"Permissions are set to be consistent with config for filter: {name}")
LOGGER.debug(f"Permissions are set to be consistent with config for filter: {name}")

def _permission_entry(self, filter_name: str, principal: str, idx: int) -> BatchPermissionsRequestEntryTypeDef:
return {
Expand Down

0 comments on commit f56c2e8

Please sign in to comment.