Skip to content

Commit

Permalink
drop Python 3.9
Browse files Browse the repository at this point in the history
  • Loading branch information
e3rd committed Nov 20, 2023
1 parent 35adc0f commit 86539d3
Show file tree
Hide file tree
Showing 9 changed files with 82 additions and 97 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/run-unittest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.8, 3.9, "3.10", 3.11, 3.12]
python-version: ["3.10", 3.11, 3.12]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
Expand Down
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,6 @@
/tests/**
!/tests/*
/tests/statistics.txt
convey.log
convey.log
output_*
*@example*
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# CHANGELOG

## 1.4.5
* drop Python3.9 support
* fix: Setting field type by `--type` suppresses the auto-detection.

## 1.4.4 (2023-05-26)
Expand Down
4 changes: 2 additions & 2 deletions convey/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
__author__ = "Edvard Rejthar, CSIRT.CZ"
__date__ = "$Feb 26, 2015 8:13:25 PM$"

if sys.version_info[0:2] < (3, 7):
print("We need at least Python 3.7, your version is " + sys.version + ". Try an older Convey release or rather upgrade Python.")
if sys.version_info[0:2] < (3, 10):
print("We need at least Python 3.10, your version is " + sys.version + ". Try an older Convey release or rather upgrade Python.")
exit()


Expand Down
47 changes: 16 additions & 31 deletions convey/action_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import re
from difflib import SequenceMatcher
import logging
from typing import Any, Callable, List, Optional
from typing import List, Optional
from pathlib import Path
from sys import exit

Expand Down Expand Up @@ -159,36 +159,21 @@ def add_uniquing(self, col_i=None):
self.parser.is_processable = True

def assure_aggregation_group_by(self, fn, field, group, grouping_probably_wanted=True, exit_on_fail=False) -> Optional[Field]:
a, b, c = (fn == Aggregate.count, group is None, grouping_probably_wanted)
if (a, b) == (True, True):
group = field
elif (a, b, c) == (False, True, True):
# here, self.select col might return None
group = self.select_col("group by", prepended_field=("no grouping", "aggregate whole column"))
elif (a, b) == (True, False):
if field != group:
logger.error(f"Count column '{field.name}' must be the same"
f" as the grouping column '{group.name}'.")
if exit_on_fail:
exit()
else:
raise Cancelled
return group # XX as of Python 3.10 replace with the following
# match (fn == Aggregate.count, group is None, grouping_probably_wanted):
# case True, True, _:
# group = field
# case False, True, True:
# # here, self.select col might return None
# group = self.select_col("group by", prepended_field=("no grouping", "aggregate whole column"))
# case True, False, _:
# if field != group:
# logger.error(f"Count column '{field.name}' must be the same"
# f" as the grouping column '{group.name}'.")
# if exit_on_fail:
# exit()
# else:
# raise Cancelled
# return group
match (fn == Aggregate.count, group is None, grouping_probably_wanted):
case True, True, _:
group = field
case False, True, True:
# here, self.select col might return None
group = self.select_col("group by", prepended_field=("no grouping", "aggregate whole column"))
case True, False, _:
if field != group:
logger.error(f"Count column '{field.name}' must be the same"
f" as the grouping column '{group.name}'.")
if exit_on_fail:
exit()
else:
raise Cancelled
return group

def add_filtering(self, include=True, col_i=None, val=None):
if col_i is None:
Expand Down
61 changes: 26 additions & 35 deletions convey/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@

logger = logging.getLogger(__name__)


def send_ipc(pipe, msg, refresh_stdout):
sys.stdout = sys.stderr = StringIO() # creating a new one is faster than truncating the old
console_handler.setStream(sys.stdout)
Expand Down Expand Up @@ -139,9 +140,7 @@ def check_daemon(self, args):
server.listen()
sys.stdout_real = stdout = sys.stdout
sys.stdout = sys.stderr = StringIO()
if sys.version_info >= (3, 7): # XX remove when dropped Python 3.6 support.
# In 3.6, logging message from the daemon will not work.
console_handler.setStream(sys.stdout)
console_handler.setStream(sys.stdout)
PromptSession.__init__ = lambda _, *ar, **kw: (_ for _ in ()).throw(
ConnectionAbortedError('Prompt raised.'))
return True, stdout, server
Expand Down Expand Up @@ -246,11 +245,11 @@ def process_args(self, is_daemon, args, argparser) -> Optional[ActionController]
# --output=FILE → an output file generated (Config.get("output") -> parser.target_file set)
args.output = None
for flag in ["output", "web", "whois", "nmap", "dig", "delimiter", "quote_char", "compute_preview",
"user_agent",
"multiple_hostname_ip", "multiple_cidr_ip", "web_timeout", "whois_ttl", "disable_external",
"debug", "crash_post_mortem",
"testing", "attach_files", "attach_paths_from_path_column", "jinja", "subject", "body", "references",
"whois_delete_unknown", "whois_reprocessable_unknown", "whois_cache"]:
"user_agent",
"multiple_hostname_ip", "multiple_cidr_ip", "web_timeout", "whois_ttl", "disable_external",
"debug", "crash_post_mortem",
"testing", "attach_files", "attach_paths_from_path_column", "jinja", "subject", "body", "references",
"whois_delete_unknown", "whois_reprocessable_unknown", "whois_cache"]:
if getattr(args, flag) is not None:
Config.set(flag, getattr(args, flag))
if args.headless or args.send_test:
Expand Down Expand Up @@ -285,8 +284,8 @@ def process_args(self, is_daemon, args, argparser) -> Optional[ActionController]
Config.set("single_detect", True)
Config.set("adding-new-fields", bool(new_fields))
self.wrapper = Wrapper(args.file_or_input, args.file, args.input,
args.type, args.fresh, args.reprocess,
args.whois_delete)
args.type, args.fresh, args.reprocess,
args.whois_delete)
self.parser: Parser = self.wrapper.parser
ac = ActionController(self.parser, args.reprocess)

Expand Down Expand Up @@ -325,7 +324,7 @@ def get_column_i(col, check):
if res:
print(res)
exit()
if is_daemon and self.see_menu: # if we will need menu, daemon must stop here
if is_daemon and self.see_menu: # if we will need menu, daemon must stop here
raise ConnectionAbortedError("displaying a menu is too complex")

if args.aggregate:
Expand Down Expand Up @@ -492,7 +491,8 @@ def _(_):
break
else:
# I cannot use a mere `input()` here, it would interfere with promtpt_toolkit and freeze
Dialog(autowidgetsize=True).msgbox("No column selected to aggregate with.\nUse arrows to select a column first.")
Dialog(autowidgetsize=True).msgbox(
"No column selected to aggregate with.\nUse arrows to select a column first.")
refresh()

# @bindings.add('escape', 'n') # alt-n to rename header
Expand Down Expand Up @@ -799,7 +799,7 @@ def add_list(labels):
if ret == "ok":
# these processing settings should be removed
for v in values[
::-1]: # we reverse the list, we need to pop bigger indices first without shifting lower indices
::-1]: # we reverse the list, we need to pop bigger indices first without shifting lower indices
fn, v = discard[int(v) - 1]
fn(v)
if st["aggregate"] and not st["aggregate"][1]:
Expand All @@ -819,36 +819,27 @@ def redo_menu(self):
menu.add("Rework whole file again", self.wrapper.clear)
menu.sout()



def close(self):
self.wrapper.save(last_chance=True) # re-save cache file
if not Config.get("yes"):
if not Config.is_quiet():
# Build processing settings list
l = []
o = []
st = self.parser.settings
fields = self.parser.fields

for type_, items in st.items():
# XX code does not return its custom part
if not items and items != 0:
continue
if type_ == "split":
l.append(f"--split {fields[items]}")
elif type_ == "add":
l.extend(f"--field {f},{str(f.source_field)}" for f in items)
elif type_ == "filter":
l.extend(f"--{'include' if include else 'exclude'}-filter {fields[f].name},{val}"
for include, f, val in items)
elif type_ == "unique":
l.extend(f"--unique {fields[f].name}" for f in items)
elif type_ == "aggregate":
# XX does not work well - at least, they are printed out opposite way
l.append(f"--aggregate {items.group_by}," + ",".join(
f"{fn.__name__},{col.name}" for fn, col in items.actions))
if l:
print(f" Settings cached:\n convey {self.parser.source_file} " + " ".join(l) + "\n")
# XX code does not return its custom part
if col := st["split"]:
o.append(f"--split {fields[col]}")
o.extend(f"--field {f},{str(f.source_field)}" for f in st["add"])
o.extend(f"--{'include' if include else 'exclude'}-filter {fields[f].name},{val}"
for include, f, val in st["filter"])
o.extend(f"--unique {fields[f].name}" for f in st["unique"])
if col := st["aggregate"]:
o.append(f"--aggregate " + ",".join(f"{col.name},{fn.__name__}" for fn, col in col.actions)
+ (f",{col.group_by}" if col.group_by else ""))
if o:
print(f" Settings cached:\n convey {self.parser.source_file} " + " ".join(o) + "\n")

print("Finished.")
exit(0)
Expand Down
8 changes: 1 addition & 7 deletions convey/definition.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,10 @@
from csv import Dialect
from sys import version_info
from typing import Callable, Dict, TYPE_CHECKING, List, Optional, Tuple, Union
from typing import Callable, Dict, TYPE_CHECKING, List, Optional, Tuple, Union, TypedDict

from .action import AggregateAction, MergeAction
from .field import Field

try:
from typing import TypedDict
except ImportError: # remove as of Python 3.7
if not TYPE_CHECKING: # that way, IDE displays correct values and Python 3.7 still works
TypedDict = Dict

Unique = Optional[List[int]]
Filter = Optional[List[Tuple[bool, int, str]]]

Expand Down
2 changes: 1 addition & 1 deletion convey/identifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def identify_fields(self, quiet=False):
logger.debug(f"Possible type of the field '{field}': {possible_types}")
return True

def get_fitting_type(self, source_field_i, target_field, try_plaintext=False):
def get_fitting_type(self, source_field_i: int, target_field, try_plaintext=False):
""" Loops all types the field could be and return the type best suited method for compute new field. """
_min = 999
fitting_type = None
Expand Down
50 changes: 31 additions & 19 deletions tests/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,25 +18,35 @@
from convey.dialogue import Cancelled

logging.basicConfig(stream=sys.stderr, level=logging.WARNING)
os.chdir("tests") # all mentioned resources files are in that folder
os.chmod("red-permission.gif", S_IRUSR | S_IRGRP) # make file unreadable to others

# to evade project folder pollution, chdir to a temp folder
PROJECT_DIR = Path.cwd()
# temp = TemporaryDirectory() XX As the output folder appears in the file folder, this has diminished effect.
# os.chdir(temp.name)
os.chdir("tests")

def p(s):
""" all mentioned resources files are in the tests folder """
return PROJECT_DIR / "tests" / Path(s)

HELLO_B64 = 'aGVsbG8='
SHEET_CSV = Path("sheet.csv")
GIF_CSV = Path("gif.csv")
PERSON_CSV = Path("person.csv")
PERSON_XLS = Path("person.xls")
PERSON_XLSX = Path("person.xlsx")
PERSON_ODS = Path("person.ods")
COMBINED_SHEET_PERSON = Path("combined_sheet_person.csv")
PERSON_HEADER_CSV = Path("person_header.csv")
COMBINED_LIST_METHOD = Path("combined_list_method.csv")
SHEET_DUPLICATED_CSV = Path("sheet_duplicated.csv")
SHEET_HEADER_CSV = Path("sheet_header.csv")
SHEET_HEADER_ITSELF_CSV = Path("sheet_header_itself.csv")
SHEET_HEADER_PERSON_CSV = Path("sheet_header_person.csv")
SHEET_PERSON_CSV = Path("sheet_person.csv")
PERSON_GIF_CSV = Path("person_gif.csv")
CONSUMPTION = Path("consumption.csv")
SHEET_CSV = p("sheet.csv")
GIF_CSV = p("gif.csv")
PERSON_CSV = p("person.csv")
PERSON_XLS = p("person.xls")
PERSON_XLSX = p("person.xlsx")
PERSON_ODS = p("person.ods")
COMBINED_SHEET_PERSON = p("combined_sheet_person.csv")
PERSON_HEADER_CSV = p("person_header.csv")
COMBINED_LIST_METHOD = p("combined_list_method.csv")
SHEET_DUPLICATED_CSV = p("sheet_duplicated.csv")
SHEET_HEADER_CSV = p("sheet_header.csv")
SHEET_HEADER_ITSELF_CSV = p("sheet_header_itself.csv")
SHEET_HEADER_PERSON_CSV = p("sheet_header_person.csv")
SHEET_PERSON_CSV = p("sheet_person.csv")
PERSON_GIF_CSV = p("person_gif.csv")
CONSUMPTION = p("consumption.csv")
p("red-permission.gif").chmod(S_IRUSR | S_IRGRP) # make file unreadable to others


class Convey:
Expand All @@ -53,7 +63,7 @@ def __init__(self, *args, filename: Union[str, Path] = None, text=None, whois=Fa
self.debug = debug

# XX travis will not work will daemon=true (which imposes slow testing)
self.cmd = ["../convey.py", "--output", "--reprocess", "--headless",
self.cmd = [str(PROJECT_DIR / "convey.py"), "--output", "--reprocess", "--headless",
"--daemon", "false", "--debug", "false", "--crash-post-mortem", "false"]
if filename is None and not text and len(args) == 1 and not str(args[0]).startswith("-"):
filename = args[0]
Expand Down Expand Up @@ -294,6 +304,8 @@ def test_aggregate(self):
check1 = True
if f.name == "bulb" and f.read_text() == "sum(price)\n370.0\n":
check2 = True
print("ZDEEEEEEEEE", list(Path().glob("consumption.csv_convey*/*")))
print(check1)
self.assertTrue(check1)
self.assertTrue(check2)

Expand Down

0 comments on commit 86539d3

Please sign in to comment.