Skip to content

Commit

Permalink
chore(ruff): variable renames, formatting, sort imports, use with open
Browse files Browse the repository at this point in the history
  • Loading branch information
rouilj committed Jan 5, 2025
1 parent db65d40 commit 283608f
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 43 deletions.
71 changes: 30 additions & 41 deletions roundup/test/memorydb.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,14 @@
'''Implement an in-memory hyperdb for testing purposes.
'''

import shutil
import os
import shutil
import time

from roundup import date
from roundup import hyperdb
from roundup import roundupdb
from roundup import security
from roundup import password
from roundup import configuration
from roundup.backends import back_anydbm
from roundup.backends import indexer_dbm
from roundup.backends import sessions_dbm
from roundup.backends import indexer_common
from roundup.support import ensureParentsExist
from roundup import configuration, date, hyperdb, password, roundupdb, security
from roundup.anypy.strings import s2b
from roundup.backends import back_anydbm, indexer_common, indexer_dbm, sessions_dbm
from roundup.support import ensureParentsExist
from roundup.test.tx_Source_detector import init as tx_Source_init

default_prefix = '../../share/roundup/templates/classic'
Expand Down Expand Up @@ -51,31 +43,30 @@ def create(journaltag, create=True, debug=False, prefix=default_prefix):
prefix = os.path.join(os.path.dirname(__file__), prefix)

schema = os.path.join(prefix, 'schema.py')
vars = hyperdb.__dict__
vars['Class'] = Class
vars['FileClass'] = FileClass
vars['IssueClass'] = IssueClass
vars['db'] = db
fd = open(schema)
exec(compile(fd.read(), schema, 'exec'), vars)
fd.close()
hyperdb_vars = hyperdb.__dict__
hyperdb_vars['Class'] = Class
hyperdb_vars['FileClass'] = FileClass
hyperdb_vars['IssueClass'] = IssueClass
hyperdb_vars['db'] = db

with open(schema) as fd:
exec(compile(fd.read(), schema, 'exec'), hyperdb_vars)

initial_data = os.path.join(prefix, 'initial_data.py')
vars = dict(db=db, admin_email='[email protected]',
adminpw=password.Password('sekrit', config=db.config))
fd = open(initial_data)
exec(compile(fd.read(), initial_data, 'exec'), vars)
fd.close()
admin_vars = {"db": db, "admin_email": "[email protected]",
"adminpw": password.Password('sekrit', config=db.config)}
with open(initial_data) as fd:
exec(compile(fd.read(), initial_data, 'exec'), admin_vars)

# load standard detectors
dirname = os.path.join(prefix, 'detectors')
for fn in os.listdir(dirname):
if not fn.endswith('.py'): continue # noqa: E701
vars = {}
exec_vars = {}
with open(os.path.join(dirname, fn)) as fd:
exec(compile(fd.read(),
os.path.join(dirname, fn), 'exec'), vars)
vars['init'](db)
os.path.join(dirname, fn), 'exec'), exec_vars)
exec_vars['init'](db)

tx_Source_init(db)

Expand Down Expand Up @@ -200,7 +191,7 @@ def set(self, infoid, **newvalues):
float(newvalues['__timestamp'])
except ValueError:
if infoid in self:
del(newvalues['__timestamp'])
del (newvalues['__timestamp'])
else:
newvalues['__timestamp'] = time.time()
self[infoid].update(newvalues)
Expand Down Expand Up @@ -301,8 +292,8 @@ def __init__(self, config, journaltag=None):
self.journals = self.__class__.memdb.get('journals', {})

def filename(self, classname, nodeid, property=None, create=0):
shutil.copyfile(__file__, __file__+'.dummy')
return __file__+'.dummy'
shutil.copyfile(__file__, __file__ + '.dummy')
return __file__ + '.dummy'

def filesize(self, classname, nodeid, property=None, create=0):
return len(self.getfile(classname, nodeid, property))
Expand Down Expand Up @@ -423,8 +414,8 @@ def newid(self, classname):
self.ids[classname] += 1
return str(self.ids[classname])

def setid(self, classname, id):
self.ids[classname] = int(id)
def setid(self, classname, nodeid):
self.ids[classname] = int(nodeid)

#
# Journal
Expand Down Expand Up @@ -477,8 +468,8 @@ def pack(self, pack_before):
kept_journals = []
for entry in db[key]:
# unpack the entry
(nodeid, date_stamp, self.journaltag, action,
params) = entry
(_nodeid, date_stamp, self.journaltag, action,
_params) = entry
date_stamp = date_stamp.serialise()
# if the entry is after the pack date, _or_ the initial
# create entry, then it stays
Expand All @@ -502,15 +493,13 @@ def __init__(self, db, classname, **properties):
def export_files(self, dirname, nodeid):
dest = self.exportFilename(dirname, nodeid)
ensureParentsExist(dest)
f = open(dest, 'wb')
f.write(self.db.files[self.classname, nodeid, None])
f.close()
with open(dest, 'wb') as f:
f.write(self.db.files[self.classname, nodeid, None])

def import_files(self, dirname, nodeid):
source = self.exportFilename(dirname, nodeid)
f = open(source, 'rb')
self.db.files[self.classname, nodeid, None] = f.read()
f.close()
with open(source, 'rb') as f:
self.db.files[self.classname, nodeid, None] = f.read()
mime_type = None
props = self.getprops()
if 'type' in props:
Expand Down
14 changes: 12 additions & 2 deletions roundup/test/mocknull.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ def __init__(self, **kwargs):
self.__dict__[key] = value

def __call__(self, *args, **kwargs): return MockNull()

def __getattr__(self, name):
# This allows assignments which assume all intermediate steps are Null
# objects if they don't exist yet.
Expand All @@ -16,16 +17,25 @@ def __getattr__(self, name):
return getattr(self, name)

def __getitem__(self, key): return self

def __bool__(self): return False
# Python 2 compatibility:
__nonzero__ = __bool__

def __contains__(self, key): return False

def __eq__(self, rhs): return False

def __ne__(self, rhs): return False

def __str__(self): return ''
def __repr__(self): return '<MockNull 0x%x>'%id(self)
def gettext(self, str): return str

def __repr__(self): return '<MockNull 0x%x>' % id(self)

def gettext(self, string): return string

_ = gettext

def get(self, name, default=None):
try:
return self.__dict__[name.lower()]
Expand Down

0 comments on commit 283608f

Please sign in to comment.