Skip to content

Commit

Permalink
style(flynt): convert .format and % strings to f-strings
Browse files Browse the repository at this point in the history
  • Loading branch information
dshemetov committed Jun 23, 2023
1 parent a888f54 commit 9194796
Show file tree
Hide file tree
Showing 24 changed files with 133 additions and 138 deletions.
22 changes: 11 additions & 11 deletions src/acquisition/afhsb/afhsb_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,15 +51,15 @@ def get_flu_cat(dx):
if dx.startswith(prefix):
return 1
for i in range(12, 19):
prefix = "J{}".format(i)
prefix = f"J{i}"
if dx.startswith(prefix):
return 2
for i in range(0, 7):
prefix = "J0{}".format(i)
prefix = f"J0{i}"
if dx.startswith(prefix):
return 3
for i in range(20, 23):
prefix = "J{}".format(i)
prefix = f"J{i}"
if dx.startswith(prefix):
return 3
for prefix in ["J40", "R05", "H669", "R509", "B9789"]:
Expand All @@ -79,7 +79,7 @@ def get_field(row, column):

def row2flu(row):
for i in range(1, 9):
dx = get_field(row, "dx{}".format(i))
dx = get_field(row, f"dx{i}")
flu_cat = get_flu_cat(dx)
if flu_cat is not None:
return flu_cat
Expand Down Expand Up @@ -136,7 +136,7 @@ def get_country_mapping():


def format_dmisid_csv(filename, target_name):
src_path = os.path.join(TARGET_DIR, "{}.csv".format(filename))
src_path = os.path.join(TARGET_DIR, f"{filename}.csv")
dst_path = os.path.join(TARGET_DIR, target_name)

src_csv = open(src_path, encoding="utf-8-sig")
Expand Down Expand Up @@ -231,10 +231,10 @@ def state2region_csv():

def write_afhsb_csv(period):
flu_mapping = {0: "ili-flu3", 1: "flu1", 2: "flu2-flu1", 3: "flu3-flu2"}
results_dict = pickle.load(open(os.path.join(TARGET_DIR, "{}.pickle".format(period)), "rb"))
results_dict = pickle.load(open(os.path.join(TARGET_DIR, f"{period}.pickle"), "rb"))

fieldnames = ["id", "epiweek", "dmisid", "flu_type", "visit_sum"]
with open(os.path.join(TARGET_DIR, "{}.csv".format(period)), "w") as csvfile:
with open(os.path.join(TARGET_DIR, f"{period}.csv"), "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()

Expand All @@ -248,7 +248,7 @@ def write_afhsb_csv(period):
for flu in sorted(dmisid_dict.keys()):
visit_sum = dmisid_dict[flu]
i += 1
epiweek = int("{}{:02d}".format(year, week))
epiweek = int(f"{year}{week:02d}")
flu_type = flu_mapping[flu]

row = {
Expand Down Expand Up @@ -290,8 +290,8 @@ def dmisid_start_time():


def fillin_zero_to_csv(period, dmisid_start_record):
src_path = os.path.join(TARGET_DIR, "{}.csv".format(period))
dst_path = os.path.join(TARGET_DIR, "filled_{}.csv".format(period))
src_path = os.path.join(TARGET_DIR, f"{period}.csv")
dst_path = os.path.join(TARGET_DIR, f"filled_{period}.csv")

# Load data into a dictionary
src_csv = open(src_path)
Expand Down Expand Up @@ -352,7 +352,7 @@ def fillin_zero_to_csv(period, dmisid_start_record):
if i % 100000 == 0:
print(row)
i += 1
print("Wrote {} rows".format(i))
print(f"Wrote {i} rows")


######################### Functions for AFHSB data ##########################
Expand Down
2 changes: 1 addition & 1 deletion src/acquisition/afhsb/afhsb_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def init_region_table(sourcefile):


def init_raw_data(table_name, sourcefile):
print("Initialize {}".format(table_name))
print(f"Initialize {table_name}")
(u, p) = secrets.db.epi
cnx = connector.connect(user=u, passwd=p, database="epidata")
create_table_cmd = f"""
Expand Down
2 changes: 1 addition & 1 deletion src/acquisition/cdcp/cdc_dropbox_receiver.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def fetch_data():
if resp.status_code != 200:
raise Exception(["resp.status_code", resp.status_code])
dropbox_len = meta.size
print(" need %d bytes..." % dropbox_len)
print(f" need {int(dropbox_len)} bytes...")
content_len = int(resp.headers.get("Content-Length", -1))
if dropbox_len != content_len:
info = ["dropbox_len", dropbox_len, "content_len", content_len]
Expand Down
6 changes: 3 additions & 3 deletions src/acquisition/cdcp/cdc_extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def get_total_hits(cur, epiweek, state):
for (total,) in cur:
pass
if total is None:
raise Exception("missing data for %d-%s" % (epiweek, state))
raise Exception(f"missing data for {int(epiweek)}-{state}")
return total


Expand Down Expand Up @@ -166,7 +166,7 @@ def extract(first_week=None, last_week=None, test_mode=False):
cur.execute("SELECT max(`epiweek`) FROM `cdc_meta`")
for (last_week,) in cur:
pass
print("extracting %d--%d" % (first_week, last_week))
print(f"extracting {int(first_week)}--{int(last_week)}")

# update each epiweek
for epiweek in flu.range_epiweeks(first_week, last_week, inclusive=True):
Expand All @@ -180,7 +180,7 @@ def extract(first_week=None, last_week=None, test_mode=False):
store_result(cur, epiweek, state, *nums, total)
print(f" {epiweek}-{state}: {' '.join(str(n) for n in nums)} ({total})")
except Exception as ex:
print(" %d-%s: failed" % (epiweek, state), ex)
print(f" {int(epiweek)}-{state}: failed", ex)
# raise ex
sys.stdout.flush()

Expand Down
2 changes: 1 addition & 1 deletion src/acquisition/cdcp/cdc_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ def parse_zip(zf, level=1):
if handler is not None:
with zf.open(name) as temp:
count = handler(csv.reader(io.StringIO(str(temp.read(), "utf-8"))))
print(prefix, " %d rows" % count)
print(prefix, f" {int(count)} rows")
else:
print(prefix, " (ignored)")

Expand Down
12 changes: 6 additions & 6 deletions src/acquisition/ecdc/ecdc_db_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def safe_int(i):
def get_rows(cnx, table="ecdc_ili"):
# Count and return the number of rows in the `ecdc_ili` table.
select = cnx.cursor()
select.execute("SELECT count(1) num FROM %s" % table)
select.execute(f"SELECT count(1) num FROM {table}")
for (num,) in select:
pass
select.close()
Expand All @@ -100,7 +100,7 @@ def update_from_file(issue, date, dir, test_mode=False):
u, p = secrets.db.epi
cnx = mysql.connector.connect(user=u, password=p, database="epidata")
rows1 = get_rows(cnx, "ecdc_ili")
print("rows before: %d" % (rows1))
print(f"rows before: {int(rows1)}")
insert = cnx.cursor()

# load the data, ignoring empty rows
Expand All @@ -115,9 +115,9 @@ def update_from_file(issue, date, dir, test_mode=False):
row["region"] = data[4]
row["incidence_rate"] = data[3]
rows.append(row)
print(" loaded %d rows" % len(rows))
print(f" loaded {len(rows)} rows")
entries = [obj for obj in rows if obj]
print(" found %d entries" % len(entries))
print(f" found {len(entries)} entries")

sql = """
INSERT INTO
Expand Down Expand Up @@ -149,7 +149,7 @@ def update_from_file(issue, date, dir, test_mode=False):
else:
cnx.commit()
rows2 = get_rows(cnx)
print("rows after: %d (added %d)" % (rows2, rows2 - rows1))
print(f"rows after: {int(rows2)} (added {int(rows2 - rows1)})")
cnx.close()


Expand All @@ -171,7 +171,7 @@ def main():
raise Exception("--file and --issue must both be present or absent")

date = datetime.datetime.now().strftime("%Y-%m-%d")
print("assuming release date is today, %s" % date)
print(f"assuming release date is today, {date}")

ensure_tables_exist()
if args.file:
Expand Down
6 changes: 3 additions & 3 deletions src/acquisition/flusurv/flusurv.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def fetch_json(path, payload, call_count=1, requests_impl=requests):

# it's polite to self-identify this "bot"
delphi_url = "https://delphi.cmu.edu/index.html"
user_agent = "Mozilla/5.0 (compatible; delphibot/1.0; +%s)" % delphi_url
user_agent = f"Mozilla/5.0 (compatible; delphibot/1.0; +{delphi_url})"

# the FluSurv AMF server
flusurv_url = "https://gis.cdc.gov/GRASP/Flu3/" + path
Expand All @@ -106,7 +106,7 @@ def fetch_json(path, payload, call_count=1, requests_impl=requests):
if resp.status_code == 500 and call_count <= 2:
# the server often fails with this status, so wait and retry
delay = 10 * call_count
print("got status %d, will retry in %d sec..." % (resp.status_code, delay))
print(f"got status {int(resp.status_code)}, will retry in {int(delay)} sec...")
time.sleep(delay)
return fetch_json(path, payload, call_count=call_count + 1)
elif resp.status_code != 200:
Expand Down Expand Up @@ -180,7 +180,7 @@ def extract_from_object(data_in):
raise Exception("no data found")

# print the result and return flu data
print("found data for %d weeks" % len(data_out))
print(f"found data for {len(data_out)} weeks")
return data_out


Expand Down
6 changes: 3 additions & 3 deletions src/acquisition/flusurv/flusurv_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def update(issue, location_name, test_mode=False):
cnx = mysql.connector.connect(host=secrets.db.host, user=u, password=p, database="epidata")
cur = cnx.cursor()
rows1 = get_rows(cur)
print("rows before: %d" % rows1)
print(f"rows before: {int(rows1)}")

# SQL for insert/update
sql = """
Expand Down Expand Up @@ -148,7 +148,7 @@ def update(issue, location_name, test_mode=False):

# commit and disconnect
rows2 = get_rows(cur)
print("rows after: %d (+%d)" % (rows2, rows2 - rows1))
print(f"rows after: {int(rows2)} (+{int(rows2 - rows1)})")
cur.close()
if test_mode:
print("test mode: not committing database changes")
Expand All @@ -170,7 +170,7 @@ def main():

# scrape current issue from the main page
issue = flusurv.get_current_issue()
print("current issue: %d" % issue)
print(f"current issue: {int(issue)}")

# fetch flusurv data
if args.location == "all":
Expand Down
14 changes: 7 additions & 7 deletions src/acquisition/fluview/fluview.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,23 +108,23 @@ def get_tier_ids(name):
location_ids[Key.TierType.hhs] = sorted(set(location_ids[Key.TierType.hhs]))
num = len(location_ids[Key.TierType.hhs])
if num != 10:
raise Exception("expected 10 hhs regions, found %d" % num)
raise Exception(f"expected 10 hhs regions, found {int(num)}")

# add location ids for census divisions
for row in data[Key.TierListEntry.cen]:
location_ids[Key.TierType.cen].append(row[Key.TierIdEntry.cen])
location_ids[Key.TierType.cen] = sorted(set(location_ids[Key.TierType.cen]))
num = len(location_ids[Key.TierType.cen])
if num != 9:
raise Exception("expected 9 census divisions, found %d" % num)
raise Exception(f"expected 9 census divisions, found {int(num)}")

# add location ids for states
for row in data[Key.TierListEntry.sta]:
location_ids[Key.TierType.sta].append(row[Key.TierIdEntry.sta])
location_ids[Key.TierType.sta] = sorted(set(location_ids[Key.TierType.sta]))
num = len(location_ids[Key.TierType.sta])
if num != 57:
raise Exception("expected 57 states/territories/cities, found %d" % num)
raise Exception(f"expected 57 states/territories/cities, found {int(num)}")

# return a useful subset of the metadata
# (latest epiweek, latest season, tier ids, location ids)
Expand Down Expand Up @@ -181,7 +181,7 @@ def save_latest(path=None):
data = fetch_metadata(sess)
info = get_issue_and_locations(data)
issue = info["epiweek"]
print("current issue: %d" % issue)
print(f"current issue: {int(issue)}")

# establish timing
dt = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
Expand All @@ -200,7 +200,7 @@ def save_latest(path=None):
("cen", Key.TierType.cen),
("sta", Key.TierType.sta),
):
name = "ilinet_%s_%d_%s.zip" % (delphi_name, issue, dt)
name = f"ilinet_{delphi_name}_{int(issue)}_{dt}.zip"
if path is None:
filename = name
else:
Expand All @@ -209,12 +209,12 @@ def save_latest(path=None):
locations = info["location_ids"][cdc_name]

# download and show timing information
print("downloading %s" % delphi_name)
print(f"downloading {delphi_name}")
t0 = time.time()
size = download_data(tier_id, locations, seasons, filename)
t1 = time.time()

print(" saved %s (%d bytes in %.1f seconds)" % (filename, size, t1 - t0))
print(f" saved {filename} ({int(size)} bytes in {t1 - t0:.1f} seconds)")
files.append(filename)

# return the current issue and the list of downloaded files
Expand Down
Loading

0 comments on commit 9194796

Please sign in to comment.