diff --git a/lieer/__init__.py b/lieer/__init__.py index 6e934be..1169ade 100644 --- a/lieer/__init__.py +++ b/lieer/__init__.py @@ -1,2 +1 @@ from .gmailieer import * - diff --git a/lieer/gmailieer.py b/lieer/gmailieer.py index 1802752..d267f02 100755 --- a/lieer/gmailieer.py +++ b/lieer/gmailieer.py @@ -18,933 +18,1177 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import os -import sys -import argparse -import googleapiclient -import googleapiclient.errors -import notmuch2 +import os +import sys +import argparse +import googleapiclient +import googleapiclient.errors +import notmuch2 from .remote import Remote -from .local import Local +from .local import Local -class Gmailieer: - cwd = None - - def main (self): - parser = argparse.ArgumentParser ('gmi') - self.parser = parser - - common = argparse.ArgumentParser (add_help = False) - common.add_argument ('-C', '--path', type = str, default = None, help = 'path') - - common.add_argument ('-c', '--credentials', type = str, default = None, - help = 'optional credentials file for google api') - common.add_argument ('-s', '--no-progress', action = 'store_true', - default = False, help = 'Disable progressbar (always off when output is not TTY)') - - common.add_argument ('-q', '--quiet', action = 'store_true', - default = False, help = 'Produce less output (implies -s)') - - common.add_argument ('-v', '--verbose', action='store_true', - default = False, help = 'print list of changes') +class Gmailieer: + cwd = None + def main(self): + parser = argparse.ArgumentParser("gmi") + self.parser = parser - subparsers = parser.add_subparsers (help = 'actions', dest = 'action') - subparsers.required = True + common = argparse.ArgumentParser(add_help=False) + common.add_argument("-C", "--path", type=str, default=None, help="path") - # pull - parser_pull = subparsers.add_parser ('pull', - help = 'pull new e-mail and remote tag-changes', - description = 'pull', - parents = [common] + common.add_argument( + "-c", + "--credentials", + type=str, + default=None, + help="optional credentials file for google api", ) - parser_pull.add_argument ('-t', '--list-labels', action='store_true', default = False, - help = 'list all remote labels (pull)') - - parser_pull.add_argument ('--limit', type = int, default = None, - help = 'Maximum number of messages to pull (soft limit, GMail may return more), note that this may upset the tally of synchronized messages.') - - - parser_pull.add_argument ('-d', '--dry-run', action='store_true', - default = False, help = 'do not make any changes') - - parser_pull.add_argument ('-f', '--force', action = 'store_true', - default = False, help = 'Force a full synchronization to be performed') - - parser_pull.add_argument ('-r', '--resume', action = 'store_true', - default = False, help = 'Resume previous incomplete synchronization if possible (this might cause local changes made in the interim to be ignored when pushing)') - - parser_pull.set_defaults (func = self.pull) - - # push - parser_push = subparsers.add_parser ('push', parents = [common], - description = 'push', - help = 'push local tag-changes') - - parser_push.add_argument ('--limit', type = int, default = None, - help = 'Maximum number of messages to push, note that this may upset the tally of synchronized messages.') - - parser_push.add_argument ('-d', '--dry-run', action='store_true', - default = False, help = 'do not make any changes') - - parser_push.add_argument ('-f', '--force', action = 'store_true', - default = False, help = 'Push even when there has been remote changes (might overwrite remote tag-changes)') + common.add_argument( + "-s", + "--no-progress", + action="store_true", + default=False, + help="Disable progressbar (always off when output is not TTY)", + ) - parser_push.set_defaults (func = self.push) + common.add_argument( + "-q", + "--quiet", + action="store_true", + default=False, + help="Produce less output (implies -s)", + ) - # send - parser_send = subparsers.add_parser ('send', parents = [common], - description = 'Read a MIME message from STDIN and send.', - help = 'send a MIME message read from STDIN.') + common.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + help="print list of changes", + ) - parser_send.add_argument ('-d', '--dry-run', action='store_true', - default = False, help = 'do not actually send message') + subparsers = parser.add_subparsers(help="actions", dest="action") + subparsers.required = True - # Ignored arguments for sendmail compatibility - if '-oi' in sys.argv: - sys.argv.remove('-oi') + # pull + parser_pull = subparsers.add_parser( + "pull", + help="pull new e-mail and remote tag-changes", + description="pull", + parents=[common], + ) - if '-i' in sys.argv: - sys.argv.remove('-i') + parser_pull.add_argument( + "-t", + "--list-labels", + action="store_true", + default=False, + help="list all remote labels (pull)", + ) - parser_send.add_argument('-i', action='store_true', default = None, help = 'Ignored: always implied, allowed for sendmail compatibility.', dest = 'i3') - parser_send.add_argument('-t', '--read-recipients', action='store_true', - default = False, dest = 'read_recipients', - help = 'Read recipients from message headers. This is always done by GMail. If this option is not specified, the same addresses (as those in the headers) must be specified as additional arguments.') + parser_pull.add_argument( + "--limit", + type=int, + default=None, + help="Maximum number of messages to pull (soft limit, GMail may return more), note that this may upset the tally of synchronized messages.", + ) - parser_send.add_argument('-f', type = str, help = 'Ignored: has no effect, allowed for sendmail compatibility.', dest = 'i1') + parser_pull.add_argument( + "-d", + "--dry-run", + action="store_true", + default=False, + help="do not make any changes", + ) - parser_send.add_argument('recipients', nargs = '*', default = [], - help = 'Recipients to send this message to (these are essentially ignored, but they are validated against the header fields.)') + parser_pull.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help="Force a full synchronization to be performed", + ) - parser_send.set_defaults (func = self.send) + parser_pull.add_argument( + "-r", + "--resume", + action="store_true", + default=False, + help="Resume previous incomplete synchronization if possible (this might cause local changes made in the interim to be ignored when pushing)", + ) - # sync - parser_sync = subparsers.add_parser ('sync', parents = [common], - description = 'sync', - help = 'sync changes (flags have same meaning as for push and pull)') + parser_pull.set_defaults(func=self.pull) - parser_sync.add_argument ('--limit', type = int, default = None, - help = 'Maximum number of messages to sync, note that this may upset the tally of synchronized messages.') + # push + parser_push = subparsers.add_parser( + "push", parents=[common], description="push", help="push local tag-changes" + ) - parser_sync.add_argument ('-d', '--dry-run', action='store_true', - default = False, help = 'do not make any changes') + parser_push.add_argument( + "--limit", + type=int, + default=None, + help="Maximum number of messages to push, note that this may upset the tally of synchronized messages.", + ) - parser_sync.add_argument ('-f', '--force', action = 'store_true', - default = False, help = 'Push even when there has been remote changes, and force a full remote-to-local synchronization') + parser_push.add_argument( + "-d", + "--dry-run", + action="store_true", + default=False, + help="do not make any changes", + ) - parser_sync.add_argument ('-r', '--resume', action = 'store_true', - default = False, help = 'Resume previous incomplete synchronization if possible (this might cause local changes made in the interim to be ignored when pushing)') + parser_push.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help="Push even when there has been remote changes (might overwrite remote tag-changes)", + ) - parser_sync.set_defaults (func = self.sync) + parser_push.set_defaults(func=self.push) - # auth - parser_auth = subparsers.add_parser ('auth', parents = [common], - description = 'authorize', - help = 'authorize lieer with your GMail account') + # send + parser_send = subparsers.add_parser( + "send", + parents=[common], + description="Read a MIME message from STDIN and send.", + help="send a MIME message read from STDIN.", + ) - parser_auth.add_argument ('-f', '--force', action = 'store_true', - default = False, help = 'Re-authorize') + parser_send.add_argument( + "-d", + "--dry-run", + action="store_true", + default=False, + help="do not actually send message", + ) - # These are taken from oauth2lib/tools.py for compatibility with its - # run_flow() method used during oauth - parser_auth.add_argument('--auth-host-name', default='localhost', - help='Hostname when running a local web server') - parser_auth.add_argument('--auth-host-port', default=[8080, 8090], type=int, - nargs='*', - help='Port web server should listen on') - parser_auth.add_argument('--noauth_local_webserver', action='store_true', - default=False, - help='Do not run a local web server (no longer supported by Google)') + # Ignored arguments for sendmail compatibility + if "-oi" in sys.argv: + sys.argv.remove("-oi") - parser_auth.set_defaults (func = self.authorize) + if "-i" in sys.argv: + sys.argv.remove("-i") - # init - parser_init = subparsers.add_parser ('init', parents = [common], - description = 'initialize', - help = 'initialize local e-mail repository and authorize') + parser_send.add_argument( + "-i", + action="store_true", + default=None, + help="Ignored: always implied, allowed for sendmail compatibility.", + dest="i3", + ) + parser_send.add_argument( + "-t", + "--read-recipients", + action="store_true", + default=False, + dest="read_recipients", + help="Read recipients from message headers. This is always done by GMail. If this option is not specified, the same addresses (as those in the headers) must be specified as additional arguments.", + ) - parser_init.add_argument ('--replace-slash-with-dot', action = 'store_true', default = False, - help = 'This will replace \'/\' with \'.\' in gmail labels (make sure you realize the implications)') + parser_send.add_argument( + "-f", + type=str, + help="Ignored: has no effect, allowed for sendmail compatibility.", + dest="i1", + ) - parser_init.add_argument ('--no-auth', action = 'store_true', default = False, - help = 'Do not immediately authorize as well (you will need to run \'auth\' afterwards)') + parser_send.add_argument( + "recipients", + nargs="*", + default=[], + help="Recipients to send this message to (these are essentially ignored, but they are validated against the header fields.)", + ) - parser_init.add_argument ('account', type = str, help = 'GMail account to use') + parser_send.set_defaults(func=self.send) - parser_init.set_defaults (func = self.initialize) + # sync + parser_sync = subparsers.add_parser( + "sync", + parents=[common], + description="sync", + help="sync changes (flags have same meaning as for push and pull)", + ) + parser_sync.add_argument( + "--limit", + type=int, + default=None, + help="Maximum number of messages to sync, note that this may upset the tally of synchronized messages.", + ) - # set option - parser_set = subparsers.add_parser ('set', - description = 'set option', - parents = [common], - help = 'set options for repository') + parser_sync.add_argument( + "-d", + "--dry-run", + action="store_true", + default=False, + help="do not make any changes", + ) - parser_set.add_argument ('-t', '--timeout', type = float, - default = None, help = 'Set HTTP timeout in seconds (0 means forever or system timeout)') + parser_sync.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help="Push even when there has been remote changes, and force a full remote-to-local synchronization", + ) - parser_set.add_argument ('--replace-slash-with-dot', action = 'store_true', default = False, - help = 'This will replace \'/\' with \'.\' in gmail labels (Important: see the manual and make sure you realize the implications)') + parser_sync.add_argument( + "-r", + "--resume", + action="store_true", + default=False, + help="Resume previous incomplete synchronization if possible (this might cause local changes made in the interim to be ignored when pushing)", + ) - parser_set.add_argument ('--no-replace-slash-with-dot', action = 'store_true', default = False) + parser_sync.set_defaults(func=self.sync) - parser_set.add_argument ('--drop-non-existing-labels', action = 'store_true', default = False, - help = 'Allow missing labels on the GMail side to be dropped (see https://github.com/gauteh/lieer/issues/48)') + # auth + parser_auth = subparsers.add_parser( + "auth", + parents=[common], + description="authorize", + help="authorize lieer with your GMail account", + ) - parser_set.add_argument ('--no-drop-non-existing-labels', action = 'store_true', default = False) + parser_auth.add_argument( + "-f", "--force", action="store_true", default=False, help="Re-authorize" + ) - parser_set.add_argument ('--ignore-empty-history', action = 'store_true', default = False, - help = 'Sometimes GMail indicates more changes, but an empty set is returned (see https://github.com/gauteh/lieer/issues/120)') + # These are taken from oauth2lib/tools.py for compatibility with its + # run_flow() method used during oauth + parser_auth.add_argument( + "--auth-host-name", + default="localhost", + help="Hostname when running a local web server", + ) + parser_auth.add_argument( + "--auth-host-port", + default=[8080, 8090], + type=int, + nargs="*", + help="Port web server should listen on", + ) + parser_auth.add_argument( + "--noauth_local_webserver", + action="store_true", + default=False, + help="Do not run a local web server (no longer supported by Google)", + ) - parser_set.add_argument ('--no-ignore-empty-history', action = 'store_true', default = False) + parser_auth.set_defaults(func=self.authorize) - parser_set.add_argument ('--ignore-tags-local', type = str, - default = None, help = 'Set custom tags to ignore when syncing from local to remote (comma-separated, after translations). Important: see the manual.') + # init + parser_init = subparsers.add_parser( + "init", + parents=[common], + description="initialize", + help="initialize local e-mail repository and authorize", + ) - parser_set.add_argument ('--ignore-tags-remote', type = str, - default = None, help = 'Set custom tags to ignore when syncing from remote to local (comma-separated, before translations). Important: see the manual.') + parser_init.add_argument( + "--replace-slash-with-dot", + action="store_true", + default=False, + help="This will replace '/' with '.' in gmail labels (make sure you realize the implications)", + ) - parser_set.add_argument ('--file-extension', type = str, default = None, - help = 'Add a file extension before the maildir status flags (e.g.: "mbox"). Important: see the manual about changing this setting after initial sync.') + parser_init.add_argument( + "--no-auth", + action="store_true", + default=False, + help="Do not immediately authorize as well (you will need to run 'auth' afterwards)", + ) - parser_set.add_argument ('--remove-local-messages', action = 'store_true', default = False, - help = 'Remove messages that have been deleted on the remote (default is on)') - parser_set.add_argument ('--no-remove-local-messages', action = 'store_true', default = False, - help = 'Do not remove messages that have been deleted on the remote') + parser_init.add_argument("account", type=str, help="GMail account to use") - parser_set.add_argument ('--local-trash-tag', type = str, default = None, - help = 'The local tag to use for the remote label TRASH.') + parser_init.set_defaults(func=self.initialize) - parser_set.add_argument ('--translation-list-overlay', type = str, default = None, - help = 'A list with an even number of items representing a list of pairs of (remote, local), where each pair is added to the tag translation.') + # set option + parser_set = subparsers.add_parser( + "set", + description="set option", + parents=[common], + help="set options for repository", + ) - parser_set.set_defaults (func = self.set) + parser_set.add_argument( + "-t", + "--timeout", + type=float, + default=None, + help="Set HTTP timeout in seconds (0 means forever or system timeout)", + ) + parser_set.add_argument( + "--replace-slash-with-dot", + action="store_true", + default=False, + help="This will replace '/' with '.' in gmail labels (Important: see the manual and make sure you realize the implications)", + ) - args = parser.parse_args (sys.argv[1:]) - self.args = args + parser_set.add_argument( + "--no-replace-slash-with-dot", action="store_true", default=False + ) - if args.quiet: - args.no_progress = True + parser_set.add_argument( + "--drop-non-existing-labels", + action="store_true", + default=False, + help="Allow missing labels on the GMail side to be dropped (see https://github.com/gauteh/lieer/issues/48)", + ) - args.func (args) + parser_set.add_argument( + "--no-drop-non-existing-labels", action="store_true", default=False + ) - def initialize (self, args): - self.setup (args, False) - self.local.initialize_repository (args.replace_slash_with_dot, args.account) + parser_set.add_argument( + "--ignore-empty-history", + action="store_true", + default=False, + help="Sometimes GMail indicates more changes, but an empty set is returned (see https://github.com/gauteh/lieer/issues/120)", + ) - if not args.no_auth: - self.local.load_repository () - self.remote = Remote (self) + parser_set.add_argument( + "--no-ignore-empty-history", action="store_true", default=False + ) - try: - self.remote.authorize () - except: - print ("") - print ("") - print ("init: repository is set up, but authorization failed. re-run 'gmi auth' with proper parameters to complete authorization") - print ("") - print ("") - print ("") - print ("") - raise + parser_set.add_argument( + "--ignore-tags-local", + type=str, + default=None, + help="Set custom tags to ignore when syncing from local to remote (comma-separated, after translations). Important: see the manual.", + ) - def authorize (self, args): - print ("authorizing..") - self.setup (args, False, True) - self.remote.authorize (args.force) + parser_set.add_argument( + "--ignore-tags-remote", + type=str, + default=None, + help="Set custom tags to ignore when syncing from remote to local (comma-separated, before translations). Important: see the manual.", + ) - def setup (self, args, dry_run = False, load = False, block = False): - global tqdm + parser_set.add_argument( + "--file-extension", + type=str, + default=None, + help='Add a file extension before the maildir status flags (e.g.: "mbox"). Important: see the manual about changing this setting after initial sync.', + ) - # common options - if args.path is not None: - self.vprint ("path: %s" % args.path) - if args.action == "init" and not os.path.exists(args.path): - os.makedirs(args.path) + parser_set.add_argument( + "--remove-local-messages", + action="store_true", + default=False, + help="Remove messages that have been deleted on the remote (default is on)", + ) + parser_set.add_argument( + "--no-remove-local-messages", + action="store_true", + default=False, + help="Do not remove messages that have been deleted on the remote", + ) - args.path = os.path.expanduser(args.path) - if os.path.isdir(args.path): - self.cwd = os.getcwd() - os.chdir(args.path) - else: - print("error: %s is not a valid path!" % args.path) - raise NotADirectoryError("error: %s is not a valid path!" % args.path) + parser_set.add_argument( + "--local-trash-tag", + type=str, + default=None, + help="The local tag to use for the remote label TRASH.", + ) - self.dry_run = dry_run - self.verbose = args.verbose - self.HAS_TQDM = (not args.no_progress) - self.credentials_file = args.credentials + parser_set.add_argument( + "--translation-list-overlay", + type=str, + default=None, + help="A list with an even number of items representing a list of pairs of (remote, local), where each pair is added to the tag translation.", + ) - if self.HAS_TQDM: - if not (sys.stderr.isatty() and sys.stdout.isatty()): - self.HAS_TQDM = False - else: - try: - from tqdm import tqdm - self.HAS_TQDM = True - except ImportError: - self.HAS_TQDM = False + parser_set.set_defaults(func=self.set) + + args = parser.parse_args(sys.argv[1:]) + self.args = args + + if args.quiet: + args.no_progress = True + + args.func(args) + + def initialize(self, args): + self.setup(args, False) + self.local.initialize_repository(args.replace_slash_with_dot, args.account) + + if not args.no_auth: + self.local.load_repository() + self.remote = Remote(self) + + try: + self.remote.authorize() + except: + print("") + print("") + print( + "init: repository is set up, but authorization failed. re-run 'gmi auth' with proper parameters to complete authorization" + ) + print("") + print("") + print("") + print("") + raise + + def authorize(self, args): + print("authorizing..") + self.setup(args, False, True) + self.remote.authorize(args.force) + + def setup(self, args, dry_run=False, load=False, block=False): + global tqdm + + # common options + if args.path is not None: + self.vprint("path: %s" % args.path) + if args.action == "init" and not os.path.exists(args.path): + os.makedirs(args.path) + + args.path = os.path.expanduser(args.path) + if os.path.isdir(args.path): + self.cwd = os.getcwd() + os.chdir(args.path) + else: + print("error: %s is not a valid path!" % args.path) + raise NotADirectoryError("error: %s is not a valid path!" % args.path) - if not self.HAS_TQDM: - from .nobar import tqdm + self.dry_run = dry_run + self.verbose = args.verbose + self.HAS_TQDM = not args.no_progress + self.credentials_file = args.credentials - if self.dry_run: - print ("dry-run: ", self.dry_run) + if self.HAS_TQDM: + if not (sys.stderr.isatty() and sys.stdout.isatty()): + self.HAS_TQDM = False + else: + try: + from tqdm import tqdm - self.local = Local (self) - if load: - self.local.load_repository (block) - self.remote = Remote (self) + self.HAS_TQDM = True + except ImportError: + self.HAS_TQDM = False - def sync (self, args): - self.setup (args, args.dry_run, True) - self.force = args.force - self.limit = args.limit - self.list_labels = False - self.resume = args.resume + if not self.HAS_TQDM: + from .nobar import tqdm + + if self.dry_run: + print("dry-run: ", self.dry_run) + + self.local = Local(self) + if load: + self.local.load_repository(block) + self.remote = Remote(self) - self.remote.get_labels () + def sync(self, args): + self.setup(args, args.dry_run, True) + self.force = args.force + self.limit = args.limit + self.list_labels = False + self.resume = args.resume - # will try to push local changes, this operation should not make - # any changes to the local store or any of the file names. - self.push (args, True) + self.remote.get_labels() - # will pull in remote changes, overwriting local changes and effectively - # resolving any conflicts. - self.pull (args, True) + # will try to push local changes, this operation should not make + # any changes to the local store or any of the file names. + self.push(args, True) - def push (self, args, setup = False): - if not setup: - self.setup (args, args.dry_run, True) + # will pull in remote changes, overwriting local changes and effectively + # resolving any conflicts. + self.pull(args, True) - self.force = args.force - self.limit = args.limit + def push(self, args, setup=False): + if not setup: + self.setup(args, args.dry_run, True) - self.remote.get_labels () + self.force = args.force + self.limit = args.limit - # loading local changes + self.remote.get_labels() - with notmuch2.Database() as db: - rev = db.revision().rev - if rev == self.local.state.lastmod: - self.vprint ("push: everything is up-to-date.") - return + # loading local changes - qry = "path:%s/** and lastmod:%d..%d" % (self.local.nm_relative, self.local.state.lastmod, rev) + with notmuch2.Database() as db: + rev = db.revision().rev + if rev == self.local.state.lastmod: + self.vprint("push: everything is up-to-date.") + return - messages = [db.get(m.path) for m in db.messages(qry)] + qry = "path:%s/** and lastmod:%d..%d" % ( + self.local.nm_relative, + self.local.state.lastmod, + rev, + ) - if self.limit is not None and len(messages) > self.limit: - messages = messages[:self.limit] + messages = [db.get(m.path) for m in db.messages(qry)] - # get gids and filter out messages outside this repository - messages, gids = self.local.messages_to_gids (messages) + if self.limit is not None and len(messages) > self.limit: + messages = messages[: self.limit] - # get meta-data on changed messages from remote - remote_messages = [] - self.bar_create (leave = True, total = len(gids), desc = 'receiving metadata') + # get gids and filter out messages outside this repository + messages, gids = self.local.messages_to_gids(messages) - def _got_msgs (ms): - for m in ms: - self.bar_update (1) - remote_messages.append (m) + # get meta-data on changed messages from remote + remote_messages = [] + self.bar_create(leave=True, total=len(gids), desc="receiving metadata") - self.remote.get_messages (gids, _got_msgs, 'minimal') - self.bar_close () + def _got_msgs(ms): + for m in ms: + self.bar_update(1) + remote_messages.append(m) - # resolve changes - self.bar_create (leave = True, total = len(gids), desc = 'resolving changes') - actions = [] - for rm, nm in zip(remote_messages, messages): - actions.append (self.remote.update (rm, nm, self.local.state.last_historyId, self.force)) - self.bar_update (1) + self.remote.get_messages(gids, _got_msgs, "minimal") + self.bar_close() - self.bar_close () + # resolve changes + self.bar_create(leave=True, total=len(gids), desc="resolving changes") + actions = [] + for rm, nm in zip(remote_messages, messages): + actions.append( + self.remote.update( + rm, nm, self.local.state.last_historyId, self.force + ) + ) + self.bar_update(1) - # remove no-ops - actions = [ a for a in actions if a ] + self.bar_close() - # limit - if self.limit is not None and len(actions) >= self.limit: - actions = actions[:self.limit] + # remove no-ops + actions = [a for a in actions if a] - # push changes - if len(actions) > 0: - self.bar_create (leave = True, total = len(actions), desc = 'pushing, 0 changed') - changed = 0 + # limit + if self.limit is not None and len(actions) >= self.limit: + actions = actions[: self.limit] - def cb (_): - nonlocal changed - self.bar_update (1) - changed += 1 - if not self.args.quiet and self.bar: - self.bar.set_description ('pushing, %d changed' % changed) + # push changes + if len(actions) > 0: + self.bar_create( + leave=True, total=len(actions), desc="pushing, 0 changed" + ) + changed = 0 - self.remote.push_changes (actions, cb) + def cb(_): + nonlocal changed + self.bar_update(1) + changed += 1 + if not self.args.quiet and self.bar: + self.bar.set_description("pushing, %d changed" % changed) + + self.remote.push_changes(actions, cb) + + self.bar_close() + else: + self.vprint("push: nothing to push") - self.bar_close () - else: - self.vprint ('push: nothing to push') + if not self.remote.all_updated: + # will not set last_mod, this forces messages to be pushed again at next push + print("push: not all changes could be pushed, will re-try at next push.") + else: + # TODO: Once we get more confident we might set the last history Id here to + # avoid pulling back in the changes we just pushed. Currently there's a race + # if something is modified remotely (new email, changed tags), so this might + # not really be possible. + pass + + if not self.dry_run and self.remote.all_updated: + self.local.state.set_lastmod(rev) + + self.vprint( + "remote historyId: %d" + % self.remote.get_current_history_id(self.local.state.last_historyId) + ) - if not self.remote.all_updated: - # will not set last_mod, this forces messages to be pushed again at next push - print ("push: not all changes could be pushed, will re-try at next push.") - else: - # TODO: Once we get more confident we might set the last history Id here to - # avoid pulling back in the changes we just pushed. Currently there's a race - # if something is modified remotely (new email, changed tags), so this might - # not really be possible. - pass + def pull(self, args, setup=False): + if not setup: + self.setup(args, args.dry_run, True) - if not self.dry_run and self.remote.all_updated: - self.local.state.set_lastmod (rev) + self.list_labels = args.list_labels + self.force = args.force + self.limit = args.limit + self.resume = args.resume - self.vprint ("remote historyId: %d" % self.remote.get_current_history_id (self.local.state.last_historyId)) + self.remote.get_labels() # to make sure label map is initialized - def pull (self, args, setup = False): - if not setup: - self.setup (args, args.dry_run, True) + if self.list_labels: + for k, l in self.remote.labels.items(): + print("{0: <30} {1}".format(l, k)) + return - self.list_labels = args.list_labels - self.force = args.force - self.limit = args.limit - self.resume = args.resume + if self.force: + self.vprint("pull: full synchronization (forced)") + self.full_pull() - self.remote.get_labels () # to make sure label map is initialized + elif self.local.state.last_historyId == 0: + self.vprint( + "pull: full synchronization (no previous synchronization state)" + ) + self.full_pull() - if self.list_labels: - for k,l in self.remote.labels.items (): - print ("{0: <30} {1}".format (l, k)) - return + else: + self.vprint( + "pull: partial synchronization.. (hid: %d)" + % self.local.state.last_historyId + ) + self.partial_pull() + + def partial_pull(self): + # get history + bar = None + history = [] + last_id = self.remote.get_current_history_id(self.local.state.last_historyId) - if self.force: - self.vprint ("pull: full synchronization (forced)") - self.full_pull () + try: + for hist in self.remote.get_history_since(self.local.state.last_historyId): + history.extend(hist) - elif self.local.state.last_historyId == 0: - self.vprint ("pull: full synchronization (no previous synchronization state)") - self.full_pull () - - else: - self.vprint ("pull: partial synchronization.. (hid: %d)" % self.local.state.last_historyId) - self.partial_pull () + if bar is None: + self.bar_create(leave=True, desc="fetching changes") - def partial_pull (self): - # get history - bar = None - history = [] - last_id = self.remote.get_current_history_id (self.local.state.last_historyId) - - try: - for hist in self.remote.get_history_since (self.local.state.last_historyId): - history.extend (hist) - - if bar is None: - self.bar_create (leave = True, desc = 'fetching changes') - - self.bar_update (len(hist)) - - if self.limit is not None and len(history) >= self.limit: - break - - except googleapiclient.errors.HttpError as excep: - if excep.resp.status == 404: - print ("pull: historyId is too old, full sync required.") - self.full_pull () - return - else: - raise - - except Remote.NoHistoryException: - print ("pull: failed, re-try in a bit.") - raise + self.bar_update(len(hist)) - finally: - if bar is not None: - self.bar_close() + if self.limit is not None and len(history) >= self.limit: + break - # figure out which changes need to be applied - added_messages = [] # added messages, if they are later deleted they will be - # removed from this list - - deleted_messages = [] # deleted messages, if they are later added they will be - # removed from this list - - labels_changed = [] # list of messages which have had their label changed - # the entry will be the last and most recent one in case - # of multiple changes. if the message is either deleted - # or added after the label change it will be removed from - # this list. - - def remove_from_all (m): - nonlocal added_messages, deleted_messages, labels_changed - remove_from_list (deleted_messages, m) - remove_from_list (labels_changed, m) - remove_from_list (added_messages, m) - - def remove_from_list (lst, m): - e = next ((e for e in lst if e['id'] == m['id']), None) - if e is not None: - lst.remove (e) - return True - return False - - if len(history) > 0: - self.bar_create (total = len(history), leave = True, desc = 'resolving changes') - else: - bar = None - - for h in history: - if 'messagesAdded' in h: - for m in h['messagesAdded']: - mm = m['message'] - if not (set(mm.get('labelIds', [])) & self.remote.not_sync): - remove_from_all (mm) - added_messages.append (mm) - - if 'messagesDeleted' in h: - for m in h['messagesDeleted']: - mm = m['message'] - # might silently fail to delete this - remove_from_all (mm) - if self.local.has (mm['id']): - deleted_messages.append (mm) - - # messages that are subsequently deleted by a later action will be removed - # from either labels_changed or added_messages. - if 'labelsAdded' in h: - for m in h['labelsAdded']: - mm = m['message'] - if not (set(mm.get('labelIds', [])) & self.remote.not_sync): - new = remove_from_list (added_messages, mm) or not self.local.has (mm['id']) - remove_from_list (labels_changed, mm) - if new: - added_messages.append (mm) # needs to fetched - else: - labels_changed.append (mm) - else: - # in case a not_sync tag has been added to a scheduled message - remove_from_list (added_messages, mm) - remove_from_list (labels_changed, mm) - - if self.local.has (mm['id']): - remove_from_list (deleted_messages, mm) - deleted_messages.append (mm) - - if 'labelsRemoved' in h: - for m in h['labelsRemoved']: - mm = m['message'] - if not (set(mm.get('labelIds', [])) & self.remote.not_sync): - new = remove_from_list (added_messages, mm) or not self.local.has (mm['id']) - remove_from_list (labels_changed, mm) - if new: - added_messages.append (mm) # needs to fetched + except googleapiclient.errors.HttpError as excep: + if excep.resp.status == 404: + print("pull: historyId is too old, full sync required.") + self.full_pull() + return else: - labels_changed.append (mm) - else: - # in case a not_sync tag has been added - remove_from_list (added_messages, mm) - remove_from_list (labels_changed, mm) - - if self.local.has (mm['id']): - remove_from_list (deleted_messages, mm) - deleted_messages.append (mm) - - self.bar_update (1) - - if bar: - self.bar_close () - - changed = False - # fetching new messages - if len (added_messages) > 0: - message_gids = [m['id'] for m in added_messages] - updated = self.get_content (message_gids) - - # updated labels for the messages that already existed - needs_update_gid = list(set(message_gids) - set(updated)) - needs_update = [m for m in added_messages if m['id'] in needs_update_gid] - labels_changed.extend (needs_update) - - changed = True - - if self.local.config.remove_local_messages and len(deleted_messages) > 0: - with notmuch2.Database(mode = notmuch2.Database.MODE.READ_WRITE) as db: - for m in tqdm (deleted_messages, leave = True, desc = 'removing messages'): - self.local.remove (m['id'], db) - - changed = True - - if len (labels_changed) > 0: - lchanged = 0 - with notmuch2.Database(mode = notmuch2.Database.MODE.READ_WRITE) as db: - self.bar_create (total = len(labels_changed), leave = True, desc = 'updating tags (0)') - for m in labels_changed: - r = self.local.update_tags (m, None, db) - if r: - lchanged += 1 - if not self.args.quiet and self.bar: - self.bar.set_description ('updating tags (%d)' % lchanged) - - self.bar_update (1) - self.bar_close () - - - changed = True + raise + + except Remote.NoHistoryException: + print("pull: failed, re-try in a bit.") + raise + + finally: + if bar is not None: + self.bar_close() + + # figure out which changes need to be applied + added_messages = [] # added messages, if they are later deleted they will be + # removed from this list + + deleted_messages = [] # deleted messages, if they are later added they will be + # removed from this list + + labels_changed = [] # list of messages which have had their label changed + # the entry will be the last and most recent one in case + # of multiple changes. if the message is either deleted + # or added after the label change it will be removed from + # this list. + + def remove_from_all(m): + nonlocal added_messages, deleted_messages, labels_changed + remove_from_list(deleted_messages, m) + remove_from_list(labels_changed, m) + remove_from_list(added_messages, m) + + def remove_from_list(lst, m): + e = next((e for e in lst if e["id"] == m["id"]), None) + if e is not None: + lst.remove(e) + return True + return False + + if len(history) > 0: + self.bar_create(total=len(history), leave=True, desc="resolving changes") + else: + bar = None + + for h in history: + if "messagesAdded" in h: + for m in h["messagesAdded"]: + mm = m["message"] + if not (set(mm.get("labelIds", [])) & self.remote.not_sync): + remove_from_all(mm) + added_messages.append(mm) + + if "messagesDeleted" in h: + for m in h["messagesDeleted"]: + mm = m["message"] + # might silently fail to delete this + remove_from_all(mm) + if self.local.has(mm["id"]): + deleted_messages.append(mm) + + # messages that are subsequently deleted by a later action will be removed + # from either labels_changed or added_messages. + if "labelsAdded" in h: + for m in h["labelsAdded"]: + mm = m["message"] + if not (set(mm.get("labelIds", [])) & self.remote.not_sync): + new = remove_from_list( + added_messages, mm + ) or not self.local.has(mm["id"]) + remove_from_list(labels_changed, mm) + if new: + added_messages.append(mm) # needs to fetched + else: + labels_changed.append(mm) + else: + # in case a not_sync tag has been added to a scheduled message + remove_from_list(added_messages, mm) + remove_from_list(labels_changed, mm) + + if self.local.has(mm["id"]): + remove_from_list(deleted_messages, mm) + deleted_messages.append(mm) + + if "labelsRemoved" in h: + for m in h["labelsRemoved"]: + mm = m["message"] + if not (set(mm.get("labelIds", [])) & self.remote.not_sync): + new = remove_from_list( + added_messages, mm + ) or not self.local.has(mm["id"]) + remove_from_list(labels_changed, mm) + if new: + added_messages.append(mm) # needs to fetched + else: + labels_changed.append(mm) + else: + # in case a not_sync tag has been added + remove_from_list(added_messages, mm) + remove_from_list(labels_changed, mm) + + if self.local.has(mm["id"]): + remove_from_list(deleted_messages, mm) + deleted_messages.append(mm) + + self.bar_update(1) + + if bar: + self.bar_close() + + changed = False + # fetching new messages + if len(added_messages) > 0: + message_gids = [m["id"] for m in added_messages] + updated = self.get_content(message_gids) + + # updated labels for the messages that already existed + needs_update_gid = list(set(message_gids) - set(updated)) + needs_update = [m for m in added_messages if m["id"] in needs_update_gid] + labels_changed.extend(needs_update) + + changed = True + + if self.local.config.remove_local_messages and len(deleted_messages) > 0: + with notmuch2.Database(mode=notmuch2.Database.MODE.READ_WRITE) as db: + for m in tqdm(deleted_messages, leave=True, desc="removing messages"): + self.local.remove(m["id"], db) + + changed = True + + if len(labels_changed) > 0: + lchanged = 0 + with notmuch2.Database(mode=notmuch2.Database.MODE.READ_WRITE) as db: + self.bar_create( + total=len(labels_changed), leave=True, desc="updating tags (0)" + ) + for m in labels_changed: + r = self.local.update_tags(m, None, db) + if r: + lchanged += 1 + if not self.args.quiet and self.bar: + self.bar.set_description("updating tags (%d)" % lchanged) + + self.bar_update(1) + self.bar_close() + + changed = True + + if not changed: + self.vprint("pull: everything is up-to-date.") + + if not self.dry_run: + self.local.state.set_last_history_id(last_id) + + if last_id > 0: + self.vprint("current historyId: %d" % last_id) + + def full_pull(self): + total = 1 + + self.bar_create(leave=True, total=total, desc="fetching messages") + + # NOTE: + # this list might grow gigantic for large quantities of e-mail, not really sure + # about how much memory this will take. this is just a list of some + # simple metadata like message ids. + message_gids = [] + last_id = self.remote.get_current_history_id(self.local.state.last_historyId) + + resume_file = os.path.join(self.local.wd, ".resume-pull.gmailieer.json") + + if not self.resume: + if os.path.exists(resume_file): + self.vprint("pull: previous pull can be resumed using --resume") + + # continue filling up or create new resume-file + previous = self.load_resume(resume_file, last_id) + + elif self.resume and not os.path.exists(resume_file): + self.vprint( + "pull: no previous resume file exists, continuing with full pull" + ) + previous = self.load_resume(resume_file, last_id) - if not changed: - self.vprint ("pull: everything is up-to-date.") + else: + self.vprint("pull: attempting to resume previous pull..") + assert self.resume + previous = self.load_resume(resume_file, last_id) - if not self.dry_run: - self.local.state.set_last_history_id (last_id) + # check if lastid is still valid + if not self.remote.is_history_id_valid(previous.lastId): + self.vprint("pull: resume file too old, starting from scratch.") - if (last_id > 0): - self.vprint ('current historyId: %d' % last_id) + previous.delete() + previous = self.load_resume(resume_file, last_id) - def full_pull (self): - total = 1 + for mset in self.remote.all_messages(): + (total, gids) = mset - self.bar_create(leave = True, total = total, desc = 'fetching messages') + self.bar.total = total + self.bar_update(len(gids)) - # NOTE: - # this list might grow gigantic for large quantities of e-mail, not really sure - # about how much memory this will take. this is just a list of some - # simple metadata like message ids. - message_gids = [] - last_id = self.remote.get_current_history_id(self.local.state.last_historyId) + for m in gids: + message_gids.append(m["id"]) - resume_file = os.path.join(self.local.wd, ".resume-pull.gmailieer.json") + if self.limit is not None and len(message_gids) >= self.limit: + break - if not self.resume: - if os.path.exists(resume_file): - self.vprint("pull: previous pull can be resumed using --resume") + self.bar_close() - # continue filling up or create new resume-file - previous = self.load_resume(resume_file, last_id) + if self.local.config.remove_local_messages: + if self.limit and not self.dry_run: + raise ValueError( + '--limit with "remove_local_messages" will cause lots of messages to be deleted' + ) + + # removing files that have been deleted remotely + all_remote = set(message_gids) + all_local = set(self.local.gids.keys()) + remove = list(all_local - all_remote) + self.bar_create(leave=True, total=len(remove), desc="removing deleted") + with notmuch2.Database(mode=notmuch2.Database.MODE.READ_WRITE) as db: + for m in remove: + self.local.remove(m, db) + self.bar_update(1) + + self.bar_close() + + if len(message_gids) > 0: + # get content for new messages + updated = self.get_content(message_gids) + + # get updated labels for the rest + needs_update = list(set(message_gids) - set(updated)) + + if self.resume: + self.vprint( + "pull: resume: skipping metadata for %d messages" + % len(previous.meta_fetched) + ) + needs_update = list(set(needs_update) - set(previous.meta_fetched)) + + self.get_meta(needs_update, previous, self.resume) + else: + self.vprint("pull: no messages.") - elif self.resume and not os.path.exists(resume_file): - self.vprint("pull: no previous resume file exists, continuing with full pull") - previous = self.load_resume(resume_file, last_id) + # set notmuch lastmod time, since we have now synced everything from remote + # to local + with notmuch2.Database() as db: + rev = db.revision().rev - else: - self.vprint("pull: attempting to resume previous pull..") - assert self.resume - previous = self.load_resume(resume_file, last_id) + if not self.dry_run: + self.local.state.set_lastmod(rev) - # check if lastid is still valid - if not self.remote.is_history_id_valid(previous.lastId): - self.vprint("pull: resume file too old, starting from scratch.") + if self.resume: + self.local.state.set_last_history_id(previous.lastId) + else: + self.local.state.set_last_history_id(last_id) + self.vprint("pull: complete, removing resume file") previous.delete() - previous = self.load_resume(resume_file, last_id) - - for mset in self.remote.all_messages(): - (total, gids) = mset - - self.bar.total = total - self.bar_update(len(gids)) - - for m in gids: - message_gids.append(m['id']) - - if self.limit is not None and len(message_gids) >= self.limit: - break - - self.bar_close() - if self.local.config.remove_local_messages: - if self.limit and not self.dry_run: - raise ValueError('--limit with "remove_local_messages" will cause lots of messages to be deleted') + self.vprint("current historyId: %d, current revision: %d" % (last_id, rev)) + if self.resume: + self.vprint("pull: resume: performing partial pull to complete") + self.partial_pull() - # removing files that have been deleted remotely - all_remote = set(message_gids) - all_local = set(self.local.gids.keys()) - remove = list(all_local - all_remote) - self.bar_create (leave = True, total = len(remove), desc = 'removing deleted') - with notmuch2.Database (mode = notmuch2.Database.MODE.READ_WRITE) as db: - for m in remove: - self.local.remove(m, db) - self.bar_update (1) + self.vprint( + "pull: note that local changes made in the interim might be ignored in the next push" + ) - self.bar_close() + def get_meta(self, msgids, previous=None, resume=False): + """ + Only gets the minimal message objects in order to check if labels are up-to-date. - if len(message_gids) > 0: - # get content for new messages - updated = self.get_content(message_gids) + `previous` and `resume` is passed by `full_pull` to track progress and resume previous metadata pull. + """ - # get updated labels for the rest - needs_update = list(set(message_gids) - set(updated)) - - if self.resume: - self.vprint("pull: resume: skipping metadata for %d messages" % len(previous.meta_fetched)) - needs_update = list(set(needs_update) - set(previous.meta_fetched)) - - self.get_meta(needs_update, previous, self.resume) - else: - self.vprint("pull: no messages.") - - # set notmuch lastmod time, since we have now synced everything from remote - # to local - with notmuch2.Database() as db: - rev = db.revision().rev - - if not self.dry_run: - self.local.state.set_lastmod(rev) - - if self.resume: - self.local.state.set_last_history_id(previous.lastId) - else: - self.local.state.set_last_history_id(last_id) - - self.vprint('pull: complete, removing resume file') - previous.delete() - - self.vprint('current historyId: %d, current revision: %d' % (last_id, rev)) - if self.resume: - self.vprint("pull: resume: performing partial pull to complete") - self.partial_pull() - - self.vprint("pull: note that local changes made in the interim might be ignored in the next push") - - def get_meta (self, msgids, previous = None, resume = False): - """ - Only gets the minimal message objects in order to check if labels are up-to-date. - - `previous` and `resume` is passed by `full_pull` to track progress and resume previous metadata pull. - """ - - if len (msgids) > 0: - if resume: - total = len(msgids) + len(previous.meta_fetched) - else: - total = len(msgids) - - self.bar_create (leave = True, total = total, desc = 'receiving metadata') - - if resume and previous is not None: - self.bar_update(len(previous.meta_fetched)) - - # opening db for whole metadata sync - def _got_msgs (ms): - with notmuch2.Database(mode = notmuch2.Database.MODE.READ_WRITE) as db: - for m in ms: - self.bar_update (1) - self.local.update_tags (m, None, db) - - if previous is not None: - gids = [m['id'] for m in ms] - previous.update(gids) - - self.remote.get_messages (msgids, _got_msgs, 'minimal') - - self.bar_close () - - else: - self.vprint ("receiving metadata: everything up-to-date.") - - def get_content (self, msgids): - """ - Get the full email source of the messages that we do not already have - - Returns: - list of messages which were updated, these have also been updated in Notmuch and - does not need to be partially updated. - - """ - - need_content = [ m for m in msgids if not self.local.has (m) ] - - if len (need_content) > 0: - - self.bar_create (leave = True, total = len(need_content), desc = 'receiving content') - - def _got_msgs (ms): - # opening db per message batch since it takes some time to download each one - with notmuch2.Database(mode = notmuch2.Database.MODE.READ_WRITE) as db: - for m in ms: - self.bar_update (1) - self.local.store (m, db) + if len(msgids) > 0: + if resume: + total = len(msgids) + len(previous.meta_fetched) + else: + total = len(msgids) - self.remote.get_messages (need_content, _got_msgs, 'raw') + self.bar_create(leave=True, total=total, desc="receiving metadata") - self.bar_close () + if resume and previous is not None: + self.bar_update(len(previous.meta_fetched)) - else: - self.vprint ("receiving content: everything up-to-date.") + # opening db for whole metadata sync + def _got_msgs(ms): + with notmuch2.Database(mode=notmuch2.Database.MODE.READ_WRITE) as db: + for m in ms: + self.bar_update(1) + self.local.update_tags(m, None, db) - return need_content + if previous is not None: + gids = [m["id"] for m in ms] + previous.update(gids) - def load_resume(self, f, lastid): - """ - Load a previous incomplete pull from resume file or create new resume file. - """ - from .resume import ResumePull - if os.path.exists(f): - try: - return ResumePull.load(f) - except Exception as ex: - self.vprint("failed to load resume file, creating new: %s" % ex) - return ResumePull.new(f, lastid) - else: - return ResumePull.new(f, lastid) + self.remote.get_messages(msgids, _got_msgs, "minimal") - def send (self, args): - self.setup (args, args.dry_run, True, True) - self.remote.get_labels () + self.bar_close() - msg = sys.stdin.buffer.read() + else: + self.vprint("receiving metadata: everything up-to-date.") - # check if in-reply-to is set and find threadId - threadId = None + def get_content(self, msgids): + """ + Get the full email source of the messages that we do not already have - import email - eml = email.message_from_bytes(msg) + Returns: + list of messages which were updated, these have also been updated in Notmuch and + does not need to be partially updated. - # If there are recipients passed on the CLI, we need to compare them with - # what's in the message headers, as they need to match the message body - # (we can't express other recipients via the GMail API) + """ - cli_recipients = set(args.recipients) + need_content = [m for m in msgids if not self.local.has(m)] - # construct existing recipient address list from To, Cc, Bcc headers - header_recipients = set() - for field_name in ("To", "Cc", "Bcc"): - # get all field values for the given field - field_values = eml.get_all(field_name, []) + if len(need_content) > 0: + self.bar_create( + leave=True, total=len(need_content), desc="receiving content" + ) - # parse these into a list of realnames and addresses - for (_, address) in email.utils.getaddresses(field_values): - header_recipients.add(address) + def _got_msgs(ms): + # opening db per message batch since it takes some time to download each one + with notmuch2.Database(mode=notmuch2.Database.MODE.READ_WRITE) as db: + for m in ms: + self.bar_update(1) + self.local.store(m, db) - if args.read_recipients: - if not header_recipients.issuperset(cli_recipients): - raise ValueError ( - "Recipients passed via sendmail(1) arguments, but not part of message headers: {}".format(", ".join(cli_recipients.difference(header_recipients)))) - elif not header_recipients == cli_recipients: - raise ValueError ( - "Recipients passed via sendmail(1) arguments ({}) differ from those in message headers ({}), perhaps you are missing the '-t' option?".format(", ".join(cli_recipients), ", ".join(header_recipients))) + self.remote.get_messages(need_content, _got_msgs, "raw") - self.vprint ("sending message, from: %s.." % (eml.get('From'))) + self.bar_close() - if 'In-Reply-To' in eml: - repl = eml['In-Reply-To'].strip().strip('<>') - self.vprint("looking for original message: %s" % repl) - with notmuch2.Database(mode = notmuch2.Database.MODE.READ_ONLY) as db: - try: - nmsg = db.find(repl) - except LookupError: - nmsg = None - if nmsg is not None: - (_, gids) = self.local.messages_to_gids([nmsg]) - if nmsg.header('Subject') != eml['Subject']: - self.vprint ("warning: subject does not match, might not be able to associate with existing thread.") - - if len(gids) > 0: - gmsg = self.remote.get_message(gids[0]) - threadId = gmsg['threadId'] - self.vprint ("found existing thread for new message: %s" % threadId) - else: - self.vprint ("warning: could not find gid of parent message, sent message will not be associated in the same thread") else: - self.vprint ("warning: could not find parent message, sent message will not be associated in the same thread") - - if not args.dry_run: - msg = self.remote.send(msg, threadId) - self.get_content([msg['id']]) - self.get_meta([msg['id']]) - - self.vprint ("message sent successfully: %s" % msg['id']) - - def set (self, args): - args.credentials = '' # for setup() - self.setup (args, False, True) - - if args.timeout is not None: - self.local.config.set_timeout (args.timeout) - - if args.replace_slash_with_dot: - self.local.config.set_replace_slash_with_dot (args.replace_slash_with_dot) - - if args.no_replace_slash_with_dot: - self.local.config.set_replace_slash_with_dot (not args.no_replace_slash_with_dot) - - if args.drop_non_existing_labels: - self.local.config.set_drop_non_existing_label (args.drop_non_existing_labels) - - if args.no_drop_non_existing_labels: - self.local.config.set_drop_non_existing_label (not args.no_drop_non_existing_labels) - - if args.ignore_empty_history: - self.local.config.set_ignore_empty_history (True) - - if args.no_ignore_empty_history: - self.local.config.set_ignore_empty_history (False) - - if args.remove_local_messages: - self.local.config.set_remove_local_messages (True) - - if args.no_remove_local_messages: - self.local.config.set_remove_local_messages (False) - - if args.ignore_tags_local is not None: - self.local.config.set_ignore_tags (args.ignore_tags_local) - - if args.ignore_tags_remote is not None: - self.local.config.set_ignore_remote_labels (args.ignore_tags_remote) - - if args.file_extension is not None: - self.local.config.set_file_extension (args.file_extension) - - if args.local_trash_tag is not None: - self.local.config.set_local_trash_tag (args.local_trash_tag) - - if args.translation_list_overlay is not None: - self.local.config.set_translation_list_overlay (args.translation_list_overlay) - - print ("Repository information and settings:") - print ("Account ...........: %s" % self.local.config.account) - print ("historyId .........: %d" % self.local.state.last_historyId) - print ("lastmod ...........: %d" % self.local.state.lastmod) - print ("Timeout ...........: %f" % self.local.config.timeout) - print ("File extension ....: %s" % self.local.config.file_extension) - print ("Remove local messages .....:", self.local.config.remove_local_messages) - print ("Drop non existing labels...:", self.local.config.drop_non_existing_label) - print ("Ignore empty history ......:", self.local.config.ignore_empty_history) - print ("Replace . with / ..........:", self.local.config.replace_slash_with_dot) - print ("Ignore tags (local) .......:", self.local.config.ignore_tags) - print ("Ignore labels (remote) ....:", self.local.config.ignore_remote_labels) - print ("Trash tag (local) .........:", self.local.config.local_trash_tag) - print ("Translation list overlay ..:", self.local.config.translation_list_overlay) - - def vprint (self, *args, **kwargs): - """ - Print unless --quiet. - """ - if not self.args.quiet: - print (*args, **kwargs) - - def bar_create(self, leave = True, total = None, desc = ''): - """ - Create progress bar. - """ - if not self.args.quiet: - self.bar = tqdm (leave = True, total = total, desc = desc) - - def bar_update(self, n): - """ - Update progress bar. - """ - if not self.args.quiet: - self.bar.update (n) - - def bar_close(self): - """ - Close progress bar. - """ - if not self.args.quiet: - self.bar.close() + self.vprint("receiving content: everything up-to-date.") + + return need_content + + def load_resume(self, f, lastid): + """ + Load a previous incomplete pull from resume file or create new resume file. + """ + from .resume import ResumePull + + if os.path.exists(f): + try: + return ResumePull.load(f) + except Exception as ex: + self.vprint("failed to load resume file, creating new: %s" % ex) + return ResumePull.new(f, lastid) + else: + return ResumePull.new(f, lastid) + + def send(self, args): + self.setup(args, args.dry_run, True, True) + self.remote.get_labels() + + msg = sys.stdin.buffer.read() + + # check if in-reply-to is set and find threadId + threadId = None + + import email + + eml = email.message_from_bytes(msg) + + # If there are recipients passed on the CLI, we need to compare them with + # what's in the message headers, as they need to match the message body + # (we can't express other recipients via the GMail API) + + cli_recipients = set(args.recipients) + + # construct existing recipient address list from To, Cc, Bcc headers + header_recipients = set() + for field_name in ("To", "Cc", "Bcc"): + # get all field values for the given field + field_values = eml.get_all(field_name, []) + + # parse these into a list of realnames and addresses + for _, address in email.utils.getaddresses(field_values): + header_recipients.add(address) + + if args.read_recipients: + if not header_recipients.issuperset(cli_recipients): + raise ValueError( + "Recipients passed via sendmail(1) arguments, but not part of message headers: {}".format( + ", ".join(cli_recipients.difference(header_recipients)) + ) + ) + elif not header_recipients == cli_recipients: + raise ValueError( + "Recipients passed via sendmail(1) arguments ({}) differ from those in message headers ({}), perhaps you are missing the '-t' option?".format( + ", ".join(cli_recipients), ", ".join(header_recipients) + ) + ) + + self.vprint("sending message, from: %s.." % (eml.get("From"))) + + if "In-Reply-To" in eml: + repl = eml["In-Reply-To"].strip().strip("<>") + self.vprint("looking for original message: %s" % repl) + with notmuch2.Database(mode=notmuch2.Database.MODE.READ_ONLY) as db: + try: + nmsg = db.find(repl) + except LookupError: + nmsg = None + if nmsg is not None: + (_, gids) = self.local.messages_to_gids([nmsg]) + if nmsg.header("Subject") != eml["Subject"]: + self.vprint( + "warning: subject does not match, might not be able to associate with existing thread." + ) + + if len(gids) > 0: + gmsg = self.remote.get_message(gids[0]) + threadId = gmsg["threadId"] + self.vprint( + "found existing thread for new message: %s" % threadId + ) + else: + self.vprint( + "warning: could not find gid of parent message, sent message will not be associated in the same thread" + ) + else: + self.vprint( + "warning: could not find parent message, sent message will not be associated in the same thread" + ) + + if not args.dry_run: + msg = self.remote.send(msg, threadId) + self.get_content([msg["id"]]) + self.get_meta([msg["id"]]) + + self.vprint("message sent successfully: %s" % msg["id"]) + + def set(self, args): + args.credentials = "" # for setup() + self.setup(args, False, True) + + if args.timeout is not None: + self.local.config.set_timeout(args.timeout) + + if args.replace_slash_with_dot: + self.local.config.set_replace_slash_with_dot(args.replace_slash_with_dot) + + if args.no_replace_slash_with_dot: + self.local.config.set_replace_slash_with_dot( + not args.no_replace_slash_with_dot + ) + + if args.drop_non_existing_labels: + self.local.config.set_drop_non_existing_label(args.drop_non_existing_labels) + + if args.no_drop_non_existing_labels: + self.local.config.set_drop_non_existing_label( + not args.no_drop_non_existing_labels + ) + + if args.ignore_empty_history: + self.local.config.set_ignore_empty_history(True) + + if args.no_ignore_empty_history: + self.local.config.set_ignore_empty_history(False) + + if args.remove_local_messages: + self.local.config.set_remove_local_messages(True) + + if args.no_remove_local_messages: + self.local.config.set_remove_local_messages(False) + + if args.ignore_tags_local is not None: + self.local.config.set_ignore_tags(args.ignore_tags_local) + + if args.ignore_tags_remote is not None: + self.local.config.set_ignore_remote_labels(args.ignore_tags_remote) + + if args.file_extension is not None: + self.local.config.set_file_extension(args.file_extension) + + if args.local_trash_tag is not None: + self.local.config.set_local_trash_tag(args.local_trash_tag) + + if args.translation_list_overlay is not None: + self.local.config.set_translation_list_overlay( + args.translation_list_overlay + ) + + print("Repository information and settings:") + print("Account ...........: %s" % self.local.config.account) + print("historyId .........: %d" % self.local.state.last_historyId) + print("lastmod ...........: %d" % self.local.state.lastmod) + print("Timeout ...........: %f" % self.local.config.timeout) + print("File extension ....: %s" % self.local.config.file_extension) + print("Remove local messages .....:", self.local.config.remove_local_messages) + print("Drop non existing labels...:", self.local.config.drop_non_existing_label) + print("Ignore empty history ......:", self.local.config.ignore_empty_history) + print("Replace . with / ..........:", self.local.config.replace_slash_with_dot) + print("Ignore tags (local) .......:", self.local.config.ignore_tags) + print("Ignore labels (remote) ....:", self.local.config.ignore_remote_labels) + print("Trash tag (local) .........:", self.local.config.local_trash_tag) + print( + "Translation list overlay ..:", self.local.config.translation_list_overlay + ) + + def vprint(self, *args, **kwargs): + """ + Print unless --quiet. + """ + if not self.args.quiet: + print(*args, **kwargs) + + def bar_create(self, leave=True, total=None, desc=""): + """ + Create progress bar. + """ + if not self.args.quiet: + self.bar = tqdm(leave=True, total=total, desc=desc) + + def bar_update(self, n): + """ + Update progress bar. + """ + if not self.args.quiet: + self.bar.update(n) + + def bar_close(self): + """ + Close progress bar. + """ + if not self.args.quiet: + self.bar.close() diff --git a/lieer/local.py b/lieer/local.py index 602bf32..220d7c4 100644 --- a/lieer/local.py +++ b/lieer/local.py @@ -26,658 +26,710 @@ import notmuch2 from .remote import Remote + class Local: - wd = None - loaded = False - - - # NOTE: Update README when changing this map. - translate_labels_default = { - 'INBOX' : 'inbox', - 'SPAM' : 'spam', - 'TRASH' : 'trash', - 'UNREAD' : 'unread', - 'STARRED' : 'flagged', - 'IMPORTANT' : 'important', - 'SENT' : 'sent', - 'DRAFT' : 'draft', - 'CHAT' : 'chat', - - 'CATEGORY_PERSONAL' : 'personal', - 'CATEGORY_SOCIAL' : 'social', - 'CATEGORY_PROMOTIONS' : 'promotions', - 'CATEGORY_UPDATES' : 'updates', - 'CATEGORY_FORUMS' : 'forums', - } - - labels_translate_default = { v: k for k, v in translate_labels_default.items () } - - ignore_labels = set ([ - 'archive', - 'arxiv', - 'attachment', - 'encrypted', - 'signed', - 'passed', - 'replied', - 'muted', - 'mute', - 'todo', - 'Trash', - 'voicemail', - ]) - - def update_translation(self, remote, local): - """ - Convenience function to ensure both maps (remote -> local and local -> remote) - get updated when you update a translation. - """ - # Did you reverse the parameters? - assert remote in self.translate_labels - self.translate_labels[remote] = local - self.labels_translate = { v: k for k, v in self.translate_labels.items () } - - def update_translation_list_with_overlay(self, translation_list_overlay): - """ - Takes a list with an even number of items. The list is interpreted as a list of pairs - of (remote, local), where each member of each pair is a string. Each pair is added to the - translation, overwriting the translation if one already exists (in either direction). - If either the remote or the local labels are non-unique, the later items in the list will - overwrite the earlier ones in the direction in which the source is non-unique (for example, - ["a", "1", "b", 2", "a", "3"] will yield {'a': 3, 'b': 2} in one direction and {1: 'a', 2: 'b', 3: 'a'} - in the other). - """ - - if len(translation_list_overlay) % 2 != 0: - raise Exception(f'Translation list overlay must have an even number of items: {translation_list_overlay}') - - for i in range(0,len(translation_list_overlay),2): - (remote, local) = translation_list_overlay[i], translation_list_overlay[i+1] - self.translate_labels[remote] = local - self.labels_translate[local] = remote - - class RepositoryException (Exception): - pass - - - class Config: - replace_slash_with_dot = False - account = None - timeout = 10 * 60 - drop_non_existing_label = False - ignore_empty_history = False - ignore_tags = None - ignore_remote_labels = None - remove_local_messages = True - file_extension = None - local_trash_tag = 'trash' - translation_list_overlay = None - - def __init__ (self, config_f): - self.config_f = config_f - - if os.path.exists (self.config_f): - try: - with open (self.config_f, 'r') as fd: - self.json = json.load (fd) - except json.decoder.JSONDecodeError: - print ("Failed to decode config file `{}`.".format (self.config_f)) - raise - else: - self.json = {} - - self.replace_slash_with_dot = self.json.get ('replace_slash_with_dot', False) - self.account = self.json.get ('account', 'me') - self.timeout = self.json.get ('timeout', 10 * 60) - self.drop_non_existing_label = self.json.get ('drop_non_existing_label', False) - self.ignore_empty_history = self.json.get ('ignore_empty_history', False) - self.remove_local_messages = self.json.get ('remove_local_messages', True) - self.ignore_tags = set(self.json.get ('ignore_tags', [])) - self.ignore_remote_labels = set(self.json.get ('ignore_remote_labels', Remote.DEFAULT_IGNORE_LABELS)) - self.file_extension = self.json.get ('file_extension', '') - self.local_trash_tag = self.json.get ('local_trash_tag', 'trash') - self.translation_list_overlay = self.json.get ('translation_list_overlay', []) - - def write (self): - self.json = {} - - self.json['replace_slash_with_dot'] = self.replace_slash_with_dot - self.json['account'] = self.account - self.json['timeout'] = self.timeout - self.json['drop_non_existing_label'] = self.drop_non_existing_label - self.json['ignore_empty_history'] = self.ignore_empty_history - self.json['ignore_tags'] = list(self.ignore_tags) - self.json['ignore_remote_labels'] = list(self.ignore_remote_labels) - self.json['remove_local_messages'] = self.remove_local_messages - self.json['file_extension'] = self.file_extension - self.json['local_trash_tag'] = self.local_trash_tag - self.json['translation_list_overlay'] = self.translation_list_overlay - - if os.path.exists (self.config_f): - shutil.copyfile (self.config_f, self.config_f + '.bak') - - with tempfile.NamedTemporaryFile (mode = 'w+', dir = os.path.dirname (self.config_f), delete = False) as fd: - json.dump (self.json, fd) - os.rename (fd.name, self.config_f) - - def set_account (self, a): - self.account = a - self.write () - - def set_timeout (self, t): - self.timeout = t - self.write () - - def set_replace_slash_with_dot (self, r): - self.replace_slash_with_dot = r - self.write () - - def set_drop_non_existing_label (self, r): - self.drop_non_existing_label = r - self.write () - - def set_ignore_empty_history (self, r): - self.ignore_empty_history = r - self.write() - - def set_remove_local_messages (self, r): - self.remove_local_messages = r - self.write() - - def set_ignore_tags (self, t): - if len(t.strip ()) == 0: - self.ignore_tags = set() - else: - self.ignore_tags = set([ tt.strip () for tt in t.split(',') ]) - - self.write () - - def set_ignore_remote_labels (self, t): - if len(t.strip ()) == 0: - self.ignore_remote_labels = set() - else: - self.ignore_remote_labels = set([ tt.strip () for tt in t.split(',') ]) - - self.write () - - def set_file_extension (self, t): - try: - with tempfile.NamedTemporaryFile (dir = os.path.dirname (self.config_f), suffix = t) as _: - pass - - self.file_extension = t.strip () - self.write () - except OSError: - print ("Failed creating test file with file extension: " + t + ", not set.") - raise - - def set_local_trash_tag (self, t): - if ',' in t: - print('The local_trash_tag must be a single tag, not a list. Commas are not allowed.') - raise ValueError() - self.local_trash_tag = t.strip() or 'trash' - self.write() - - def set_translation_list_overlay (self, t): - if len(t.strip ()) == 0: - self.translation_list_overlay = [] - else: - self.translation_list_overlay = [ tt.strip () for tt in t.split(',') ] - if len(self.translation_list_overlay) % 2 != 0: - raise Exception(f'Translation list overlay must have an even number of items: {self.translation_list_overlay}') - self.write () - - - - class State: - # last historyid of last synchronized message, anything that has happened - # remotely after this needs to be synchronized. gmail may return a 404 error - # if the history records have been deleted, in which case we have to do a full - # sync. - last_historyId = 0 - - # this is the last modification id of the notmuch db when the previous push was completed. - lastmod = 0 - - def __init__ (self, state_f, config): - self.state_f = state_f - - # True if config file contains state keys and should be migrated. - # We will write both state and config after load if true. - migrate_from_config = False - - if os.path.exists (self.state_f): + wd = None + loaded = False + + # NOTE: Update README when changing this map. + translate_labels_default = { + "INBOX": "inbox", + "SPAM": "spam", + "TRASH": "trash", + "UNREAD": "unread", + "STARRED": "flagged", + "IMPORTANT": "important", + "SENT": "sent", + "DRAFT": "draft", + "CHAT": "chat", + "CATEGORY_PERSONAL": "personal", + "CATEGORY_SOCIAL": "social", + "CATEGORY_PROMOTIONS": "promotions", + "CATEGORY_UPDATES": "updates", + "CATEGORY_FORUMS": "forums", + } + + labels_translate_default = {v: k for k, v in translate_labels_default.items()} + + ignore_labels = set( + [ + "archive", + "arxiv", + "attachment", + "encrypted", + "signed", + "passed", + "replied", + "muted", + "mute", + "todo", + "Trash", + "voicemail", + ] + ) + + def update_translation(self, remote, local): + """ + Convenience function to ensure both maps (remote -> local and local -> remote) + get updated when you update a translation. + """ + # Did you reverse the parameters? + assert remote in self.translate_labels + self.translate_labels[remote] = local + self.labels_translate = {v: k for k, v in self.translate_labels.items()} + + def update_translation_list_with_overlay(self, translation_list_overlay): + """ + Takes a list with an even number of items. The list is interpreted as a list of pairs + of (remote, local), where each member of each pair is a string. Each pair is added to the + translation, overwriting the translation if one already exists (in either direction). + If either the remote or the local labels are non-unique, the later items in the list will + overwrite the earlier ones in the direction in which the source is non-unique (for example, + ["a", "1", "b", 2", "a", "3"] will yield {'a': 3, 'b': 2} in one direction and {1: 'a', 2: 'b', 3: 'a'} + in the other). + """ + + if len(translation_list_overlay) % 2 != 0: + raise Exception( + f"Translation list overlay must have an even number of items: {translation_list_overlay}" + ) + + for i in range(0, len(translation_list_overlay), 2): + (remote, local) = ( + translation_list_overlay[i], + translation_list_overlay[i + 1], + ) + self.translate_labels[remote] = local + self.labels_translate[local] = remote + + class RepositoryException(Exception): + pass + + class Config: + replace_slash_with_dot = False + account = None + timeout = 10 * 60 + drop_non_existing_label = False + ignore_empty_history = False + ignore_tags = None + ignore_remote_labels = None + remove_local_messages = True + file_extension = None + local_trash_tag = "trash" + translation_list_overlay = None + + def __init__(self, config_f): + self.config_f = config_f + + if os.path.exists(self.config_f): + try: + with open(self.config_f, "r") as fd: + self.json = json.load(fd) + except json.decoder.JSONDecodeError: + print("Failed to decode config file `{}`.".format(self.config_f)) + raise + else: + self.json = {} + + self.replace_slash_with_dot = self.json.get("replace_slash_with_dot", False) + self.account = self.json.get("account", "me") + self.timeout = self.json.get("timeout", 10 * 60) + self.drop_non_existing_label = self.json.get( + "drop_non_existing_label", False + ) + self.ignore_empty_history = self.json.get("ignore_empty_history", False) + self.remove_local_messages = self.json.get("remove_local_messages", True) + self.ignore_tags = set(self.json.get("ignore_tags", [])) + self.ignore_remote_labels = set( + self.json.get("ignore_remote_labels", Remote.DEFAULT_IGNORE_LABELS) + ) + self.file_extension = self.json.get("file_extension", "") + self.local_trash_tag = self.json.get("local_trash_tag", "trash") + self.translation_list_overlay = self.json.get( + "translation_list_overlay", [] + ) + + def write(self): + self.json = {} + + self.json["replace_slash_with_dot"] = self.replace_slash_with_dot + self.json["account"] = self.account + self.json["timeout"] = self.timeout + self.json["drop_non_existing_label"] = self.drop_non_existing_label + self.json["ignore_empty_history"] = self.ignore_empty_history + self.json["ignore_tags"] = list(self.ignore_tags) + self.json["ignore_remote_labels"] = list(self.ignore_remote_labels) + self.json["remove_local_messages"] = self.remove_local_messages + self.json["file_extension"] = self.file_extension + self.json["local_trash_tag"] = self.local_trash_tag + self.json["translation_list_overlay"] = self.translation_list_overlay + + if os.path.exists(self.config_f): + shutil.copyfile(self.config_f, self.config_f + ".bak") + + with tempfile.NamedTemporaryFile( + mode="w+", dir=os.path.dirname(self.config_f), delete=False + ) as fd: + json.dump(self.json, fd) + os.rename(fd.name, self.config_f) + + def set_account(self, a): + self.account = a + self.write() + + def set_timeout(self, t): + self.timeout = t + self.write() + + def set_replace_slash_with_dot(self, r): + self.replace_slash_with_dot = r + self.write() + + def set_drop_non_existing_label(self, r): + self.drop_non_existing_label = r + self.write() + + def set_ignore_empty_history(self, r): + self.ignore_empty_history = r + self.write() + + def set_remove_local_messages(self, r): + self.remove_local_messages = r + self.write() + + def set_ignore_tags(self, t): + if len(t.strip()) == 0: + self.ignore_tags = set() + else: + self.ignore_tags = set([tt.strip() for tt in t.split(",")]) + + self.write() + + def set_ignore_remote_labels(self, t): + if len(t.strip()) == 0: + self.ignore_remote_labels = set() + else: + self.ignore_remote_labels = set([tt.strip() for tt in t.split(",")]) + + self.write() + + def set_file_extension(self, t): + try: + with tempfile.NamedTemporaryFile( + dir=os.path.dirname(self.config_f), suffix=t + ) as _: + pass + + self.file_extension = t.strip() + self.write() + except OSError: + print( + "Failed creating test file with file extension: " + t + ", not set." + ) + raise + + def set_local_trash_tag(self, t): + if "," in t: + print( + "The local_trash_tag must be a single tag, not a list. Commas are not allowed." + ) + raise ValueError() + self.local_trash_tag = t.strip() or "trash" + self.write() + + def set_translation_list_overlay(self, t): + if len(t.strip()) == 0: + self.translation_list_overlay = [] + else: + self.translation_list_overlay = [tt.strip() for tt in t.split(",")] + if len(self.translation_list_overlay) % 2 != 0: + raise Exception( + f"Translation list overlay must have an even number of items: {self.translation_list_overlay}" + ) + self.write() + + class State: + # last historyid of last synchronized message, anything that has happened + # remotely after this needs to be synchronized. gmail may return a 404 error + # if the history records have been deleted, in which case we have to do a full + # sync. + last_historyId = 0 + + # this is the last modification id of the notmuch db when the previous push was completed. + lastmod = 0 + + def __init__(self, state_f, config): + self.state_f = state_f + + # True if config file contains state keys and should be migrated. + # We will write both state and config after load if true. + migrate_from_config = False + + if os.path.exists(self.state_f): + try: + with open(self.state_f, "r") as fd: + self.json = json.load(fd) + except json.decoder.JSONDecodeError: + print("Failed to decode state file `{}`.".format(self.state_f)) + raise + + elif os.path.exists(config.config_f): + try: + with open(config.config_f, "r") as fd: + self.json = json.load(fd) + except json.decoder.JSONDecodeError: + print("Failed to decode config file `{}`.".format(config.config_f)) + raise + if any(k in self.json.keys() for k in ["last_historyId", "lastmod"]): + migrate_from_config = True + else: + self.json = {} + + self.last_historyId = self.json.get("last_historyId", 0) + self.lastmod = self.json.get("lastmod", 0) + + if migrate_from_config: + self.write() + config.write() + + def write(self): + self.json = {} + + self.json["last_historyId"] = self.last_historyId + self.json["lastmod"] = self.lastmod + + if os.path.exists(self.state_f): + shutil.copyfile(self.state_f, self.state_f + ".bak") + + with tempfile.NamedTemporaryFile( + mode="w+", dir=os.path.dirname(self.state_f), delete=False + ) as fd: + json.dump(self.json, fd) + os.rename(fd.name, self.state_f) + + def set_last_history_id(self, hid): + self.last_historyId = hid + self.write() + + def set_lastmod(self, m): + self.lastmod = m + self.write() + + # we are in the class "Local"; this is the Local instance constructor + def __init__(self, g): + self.gmailieer = g + self.wd = os.getcwd() + self.dry_run = g.dry_run + self.verbose = g.verbose + + # config and state files for local repository + self.config_f = os.path.join(self.wd, ".gmailieer.json") + self.state_f = os.path.join(self.wd, ".state.gmailieer.json") + self.credentials_f = os.path.join(self.wd, ".credentials.gmailieer.json") + + # mail store + self.md = os.path.join(self.wd, "mail") + + # initialize label translation instance variables + self.translate_labels = Local.translate_labels_default.copy() + self.labels_translate = Local.labels_translate_default.copy() + + def load_repository(self, block=False): + """ + Loads the current local repository + + block (boolean): if repository is in use, wait for lock to be freed (default: False) + """ + + if not os.path.exists(self.config_f): + raise Local.RepositoryException( + "local repository not initialized: could not find config file" + ) + + if any( + [ + not os.path.exists(os.path.join(self.md, mail_dir)) + for mail_dir in ("cur", "new", "tmp") + ] + ): + raise Local.RepositoryException( + "local repository not initialized: could not find mail dir structure" + ) + + ## Check if we are in the notmuch db + with notmuch2.Database() as db: + try: + self.nm_relative = str(Path(self.md).relative_to(db.path)) + except ValueError: + raise Local.RepositoryException( + "local mail repository not in notmuch db" + ) + self.nm_dir = str(Path(self.md).resolve()) + + ## Lock repository try: - with open (self.state_f, 'r') as fd: - self.json = json.load (fd) - except json.decoder.JSONDecodeError: - print ("Failed to decode state file `{}`.".format (self.state_f)) - raise - - elif os.path.exists (config.config_f): + self.lckf = open(".lock", "w") + if block: + fcntl.lockf(self.lckf, fcntl.LOCK_EX) + else: + fcntl.lockf(self.lckf, fcntl.LOCK_EX | fcntl.LOCK_NB) + except OSError: + raise Local.RepositoryException( + "failed to lock repository (probably in use by another gmi instance)" + ) + + self.config = Local.Config(self.config_f) + self.state = Local.State(self.state_f, self.config) + + self.ignore_labels = self.ignore_labels | self.config.ignore_tags + self.update_translation("TRASH", self.config.local_trash_tag) + self.update_translation_list_with_overlay(self.config.translation_list_overlay) + + self.__load_cache__() + + # load notmuch config + with notmuch2.Database() as db: + self.new_tags = db.config.get("new.tags", "").split(";") + self.new_tags = [t.strip() for t in self.new_tags if len(t.strip()) > 0] + + self.loaded = True + + def __load_cache__(self): + ## The Cache: + ## + ## this cache is used to know which messages we have a physical copy of. + ## hopefully this won't grow too gigantic with lots of messages. + self.files = [] + for _, _, fnames in os.walk(os.path.join(self.md, "cur")): + _fnames = ("cur/" + f for f in fnames) + self.files.extend(_fnames) + break + + for _, _, fnames in os.walk(os.path.join(self.md, "new")): + _fnames = ("new/" + f for f in fnames) + self.files.extend(_fnames) + break + + # exclude files that are unlikely to be real message files + self.files = [f for f in self.files if os.path.basename(f)[0] != "."] + + self.gids = {} + for f in self.files: + m = self.__filename_to_gid__(os.path.basename(f)) + self.gids[m] = f + + def initialize_repository(self, replace_slash_with_dot, account): + """ + Sets up a local repository + """ + print("initializing repository in: %s.." % self.wd) + + # check if there is a repository here already or if there is anything that will conflict with setting up one + if os.path.exists(self.config_f): + raise Local.RepositoryException( + "'.gmailieer.json' exists: this repository seems to already be set up!" + ) + + if os.path.exists(self.md): + raise Local.RepositoryException( + "'mail' exists: this repository seems to already be set up!" + ) + + self.config = Local.Config(self.config_f) + self.config.replace_slash_with_dot = replace_slash_with_dot + self.config.account = account + self.config.write() + os.makedirs(os.path.join(self.md, "cur")) + os.makedirs(os.path.join(self.md, "new")) + os.makedirs(os.path.join(self.md, "tmp")) + + def has(self, m): + """Check whether we have message id""" + return m in self.gids + + def contains(self, fname): + """Check whether message file exists is in repository""" + return Path(self.md) in Path(fname).parents + + def __update_cache__(self, nmsg, old=None): + """ + Update cache with filenames from nmsg, removing the old: + + nmsg - notmuch2.Message + old - tuple of old gid and old fname + """ + + # remove old file from cache + if old is not None: + (old_gid, old_f) = old + + old_f = Path(old_f) + self.files.remove(os.path.join(old_f.parent.name, old_f.name)) + self.gids.pop(old_gid) + + # add message to cache + fname_iter = nmsg.filenames() + for _f in fname_iter: + if self.contains(_f): + new_f = Path(_f) + + # there might be more GIDs (and files) for each NotmuchMessage, if so, + # the last matching file will be used in the gids map. + + _m = self.__filename_to_gid__(new_f.name) + self.gids[_m] = os.path.join(new_f.parent.name, new_f.name) + self.files.append(os.path.join(new_f.parent.name, new_f.name)) + + def messages_to_gids(self, msgs): + """ + Gets GIDs from a list of NotmuchMessages, the returned list of tuples may contain + the same NotmuchMessage several times for each matching file. Files outside the + repository are filtered out. + """ + gids = [] + messages = [] + + for m in msgs: + for fname in m.filenames(): + if self.contains(fname): + # get gmail id + gid = self.__filename_to_gid__(os.path.basename(fname)) + if gid: + gids.append(gid) + messages.append(m) + + return (messages, gids) + + def __filename_to_gid__(self, fname): + ext = "" + if self.config.file_extension: + ext = "." + self.config.file_extension + ext += ":2," + + f = fname.rfind(ext) + if f > 5: + return fname[:f] + else: + print( + "'%s' does not contain valid maildir delimiter, correct file name extension, or does not seem to have a valid GID, ignoring." + % fname + ) + return None + + def __make_maildir_name__(self, m, labels): + # https://cr.yp.to/proto/maildir.html + ext = "" + if self.config.file_extension: + ext = "." + self.config.file_extension + + p = m + ext + ":" + info = "2," + + # must be ascii sorted + if "DRAFT" in labels: + info += "D" + + if "STARRED" in labels: + info += "F" + + ## notmuch does not add 'T', so it will only be removed at the next + ## maildir sync flags anyway. + + # if 'TRASH' in labels: + # info += 'T' + + if "UNREAD" not in labels: + info += "S" + + return p + info + + def remove(self, gid, db): + """ + Remove message from local store + """ + assert ( + self.config.remove_local_messages + ), "tried to remove message when 'remove_local_messages' was set to False" + + fname = self.gids.get(gid, None) + ffname = fname + + if fname is None: + print("remove: message does not exist in store: %s" % gid) + return + + fname = os.path.join(self.md, fname) try: - with open (config.config_f, 'r') as fd: - self.json = json.load (fd) - except json.decoder.JSONDecodeError: - print ("Failed to decode config file `{}`.".format (config.config_f)) - raise - if any(k in self.json.keys () for k in ['last_historyId', 'lastmod']): - migrate_from_config = True - else: - self.json = {} - - self.last_historyId = self.json.get ('last_historyId', 0) - self.lastmod = self.json.get ('lastmod', 0) - - if migrate_from_config: - self.write () - config.write () - - def write (self): - self.json = {} - - self.json['last_historyId'] = self.last_historyId - self.json['lastmod'] = self.lastmod - - if os.path.exists (self.state_f): - shutil.copyfile (self.state_f, self.state_f + '.bak') - - with tempfile.NamedTemporaryFile (mode = 'w+', dir = os.path.dirname (self.state_f), delete = False) as fd: - json.dump (self.json, fd) - os.rename (fd.name, self.state_f) - - def set_last_history_id (self, hid): - self.last_historyId = hid - self.write () - - def set_lastmod (self, m): - self.lastmod = m - self.write () - - # we are in the class "Local"; this is the Local instance constructor - def __init__ (self, g): - self.gmailieer = g - self.wd = os.getcwd () - self.dry_run = g.dry_run - self.verbose = g.verbose - - # config and state files for local repository - self.config_f = os.path.join (self.wd, '.gmailieer.json') - self.state_f = os.path.join (self.wd, '.state.gmailieer.json') - self.credentials_f = os.path.join (self.wd, '.credentials.gmailieer.json') - - # mail store - self.md = os.path.join (self.wd, 'mail') - - # initialize label translation instance variables - self.translate_labels = Local.translate_labels_default.copy() - self.labels_translate = Local.labels_translate_default.copy() - - def load_repository (self, block = False): - """ - Loads the current local repository - - block (boolean): if repository is in use, wait for lock to be freed (default: False) - """ - - if not os.path.exists (self.config_f): - raise Local.RepositoryException ('local repository not initialized: could not find config file') - - if any ([not os.path.exists (os.path.join (self.md, mail_dir)) - for mail_dir in ('cur', 'new', 'tmp')]): - raise Local.RepositoryException ('local repository not initialized: could not find mail dir structure') - - ## Check if we are in the notmuch db - with notmuch2.Database () as db: - try: - self.nm_relative=str(Path(self.md).relative_to(db.path)) - except ValueError: - raise Local.RepositoryException ("local mail repository not in notmuch db") - self.nm_dir=str(Path(self.md).resolve()) - - ## Lock repository - try: - self.lckf = open ('.lock', 'w') - if block: - fcntl.lockf (self.lckf, fcntl.LOCK_EX) - else: - fcntl.lockf (self.lckf, fcntl.LOCK_EX | fcntl.LOCK_NB) - except OSError: - raise Local.RepositoryException ("failed to lock repository (probably in use by another gmi instance)") - - self.config = Local.Config (self.config_f) - self.state = Local.State (self.state_f, self.config) - - self.ignore_labels = self.ignore_labels | self.config.ignore_tags - self.update_translation('TRASH', self.config.local_trash_tag) - self.update_translation_list_with_overlay(self.config.translation_list_overlay) - - self.__load_cache__ () - - # load notmuch config - with notmuch2.Database() as db: - self.new_tags = db.config.get("new.tags", "").split(';') - self.new_tags = [t.strip() for t in self.new_tags if len(t.strip()) > 0] - - self.loaded = True - - def __load_cache__ (self): - ## The Cache: - ## - ## this cache is used to know which messages we have a physical copy of. - ## hopefully this won't grow too gigantic with lots of messages. - self.files = [] - for (_, _, fnames) in os.walk (os.path.join (self.md, 'cur')): - _fnames = ( 'cur/' + f for f in fnames ) - self.files.extend (_fnames) - break - - for (_, _, fnames) in os.walk (os.path.join (self.md, 'new')): - _fnames = ( 'new/' + f for f in fnames ) - self.files.extend (_fnames) - break - - # exclude files that are unlikely to be real message files - self.files = [ f for f in self.files if os.path.basename(f)[0] != '.' ] - - self.gids = {} - for f in self.files: - m = self.__filename_to_gid__ (os.path.basename (f)) - self.gids[m] = f - - def initialize_repository (self, replace_slash_with_dot, account): - """ - Sets up a local repository - """ - print ("initializing repository in: %s.." % self.wd) - - # check if there is a repository here already or if there is anything that will conflict with setting up one - if os.path.exists (self.config_f): - raise Local.RepositoryException ("'.gmailieer.json' exists: this repository seems to already be set up!") - - if os.path.exists (self.md): - raise Local.RepositoryException ("'mail' exists: this repository seems to already be set up!") - - self.config = Local.Config (self.config_f) - self.config.replace_slash_with_dot = replace_slash_with_dot - self.config.account = account - self.config.write () - os.makedirs (os.path.join (self.md, 'cur')) - os.makedirs (os.path.join (self.md, 'new')) - os.makedirs (os.path.join (self.md, 'tmp')) - - def has (self, m): - """ Check whether we have message id """ - return (m in self.gids) - - def contains (self, fname): - """ Check whether message file exists is in repository """ - return ( Path(self.md) in Path(fname).parents ) - - def __update_cache__ (self, nmsg, old = None): - """ - Update cache with filenames from nmsg, removing the old: - - nmsg - notmuch2.Message - old - tuple of old gid and old fname - """ - - # remove old file from cache - if old is not None: - (old_gid, old_f) = old - - old_f = Path (old_f) - self.files.remove (os.path.join (old_f.parent.name, old_f.name)) - self.gids.pop (old_gid) - - # add message to cache - fname_iter = nmsg.filenames () - for _f in fname_iter: - if self.contains (_f): - new_f = Path (_f) - - # there might be more GIDs (and files) for each NotmuchMessage, if so, - # the last matching file will be used in the gids map. - - _m = self.__filename_to_gid__ (new_f.name) - self.gids[_m] = os.path.join (new_f.parent.name, new_f.name) - self.files.append (os.path.join (new_f.parent.name, new_f.name)) - - def messages_to_gids (self, msgs): - """ - Gets GIDs from a list of NotmuchMessages, the returned list of tuples may contain - the same NotmuchMessage several times for each matching file. Files outside the - repository are filtered out. - """ - gids = [] - messages = [] - - for m in msgs: - for fname in m.filenames (): - if self.contains (fname): - # get gmail id - gid = self.__filename_to_gid__ (os.path.basename (fname)) - if gid: - gids.append (gid) - messages.append (m) - - return (messages, gids) - - def __filename_to_gid__ (self, fname): - ext = '' - if self.config.file_extension: - ext = '.' + self.config.file_extension - ext += ':2,' - - f = fname.rfind (ext) - if f > 5: - return fname[:f] - else: - print ("'%s' does not contain valid maildir delimiter, correct file name extension, or does not seem to have a valid GID, ignoring." % fname) - return None - - def __make_maildir_name__ (self, m, labels): - # https://cr.yp.to/proto/maildir.html - ext = '' - if self.config.file_extension: - ext = '.' + self.config.file_extension - - p = m + ext + ':' - info = '2,' - - # must be ascii sorted - if 'DRAFT' in labels: - info += 'D' - - if 'STARRED' in labels: - info += 'F' - - ## notmuch does not add 'T', so it will only be removed at the next - ## maildir sync flags anyway. - - # if 'TRASH' in labels: - # info += 'T' - - if 'UNREAD' not in labels: - info += 'S' - - return p + info - - def remove (self, gid, db): - """ - Remove message from local store - """ - assert self.config.remove_local_messages, "tried to remove message when 'remove_local_messages' was set to False" - - fname = self.gids.get (gid, None) - ffname = fname - - if fname is None: - print ("remove: message does not exist in store: %s" % gid) - return - - fname = os.path.join (self.md, fname) - try: - nmsg = db.get(fname) - except LookupError: - nmsg = None - - self.print_changes ("deleting %s: %s." % (gid, fname)) - - if not self.dry_run: - if nmsg is not None: - db.remove(fname) - os.unlink (fname) - - self.files.remove (ffname) - self.gids.pop (gid) - - def store (self, m, db): - """ - Store message in local store - """ - - gid = m['id'] - msg_str = base64.urlsafe_b64decode(m['raw'].encode ('ASCII')) - - # messages from GMail have windows line endings - if os.linesep == '\n': - msg_str = msg_str.replace (b'\r\n', b'\n') - - labels = m.get('labelIds', []) - - bname = self.__make_maildir_name__(gid, labels) - - # add to cache - self.files.append (os.path.join ('cur', bname)) - self.gids[gid] = os.path.join ('cur', bname) - - p = os.path.join (self.md, 'cur', bname) - tmp_p = os.path.join (self.md, 'tmp', bname) - - if os.path.exists (p): - raise Local.RepositoryException ("local file already exists: %s" % p) - - if os.path.exists (tmp_p): - raise Local.RepositoryException ("local temporary file already exists: %s" % tmp_p) + nmsg = db.get(fname) + except LookupError: + nmsg = None - if not self.dry_run: - with open (tmp_p, 'wb') as fd: - fd.write (msg_str) - - # Set atime and mtime of the message file to Gmail receive date - internalDate = int(m['internalDate']) / 1000 # ms to s - os.utime(tmp_p, (internalDate, internalDate)) + self.print_changes("deleting %s: %s." % (gid, fname)) - os.rename (tmp_p, p) - - # add to notmuch - self.update_tags (m, p, db) + if not self.dry_run: + if nmsg is not None: + db.remove(fname) + os.unlink(fname) - def update_tags (self, m, fname, db): - # make sure notmuch tags reflect gmail labels - gid = m['id'] - glabels = m.get('labelIds', []) + self.files.remove(ffname) + self.gids.pop(gid) - # translate labels. Remote.get_labels () must have been called first - labels = [] - for l in glabels: - ll = self.gmailieer.remote.labels.get(l, None) + def store(self, m, db): + """ + Store message in local store + """ - if ll is None and not self.config.drop_non_existing_label: - err = "error: GMail supplied a label that there exists no record for! You can `gmi set --drop-non-existing-labels` to work around the issue (https://github.com/gauteh/lieer/issues/48)" - print (err) - raise Local.RepositoryException (err) - elif ll is None: - pass # drop - else: - labels.append (ll) + gid = m["id"] + msg_str = base64.urlsafe_b64decode(m["raw"].encode("ASCII")) - # remove ignored labels - labels = set(labels) - labels = list(labels - self.gmailieer.remote.ignore_labels) + # messages from GMail have windows line endings + if os.linesep == "\n": + msg_str = msg_str.replace(b"\r\n", b"\n") - # translate to notmuch tags - labels = [self.translate_labels.get (l, l) for l in labels] + labels = m.get("labelIds", []) - # this is my weirdness - if self.config.replace_slash_with_dot: - labels = [l.replace ('/', '.') for l in labels] - - if fname is None: - # this file hopefully already exists and just needs it tags updated, - # let's try to find its name in the gid to fname table. - fname = os.path.join (self.md, self.gids[gid]) + bname = self.__make_maildir_name__(gid, labels) - else: - # new file - fname = os.path.join (self.md, 'cur', fname) - - if not os.path.exists (fname): - if not self.dry_run: - print ("missing file: reloading cache to check for changes..", end = '', flush = True) - self.__load_cache__ () - fname = os.path.join (self.md, self.gids[gid]) - print ("done.") + # add to cache + self.files.append(os.path.join("cur", bname)) + self.gids[gid] = os.path.join("cur", bname) - if not os.path.exists (fname): - raise Local.RepositoryException ("tried to update tags on non-existent file: %s" % fname) + p = os.path.join(self.md, "cur", bname) + tmp_p = os.path.join(self.md, "tmp", bname) - self.print_changes ("tried to update tags on non-existent file: %s" % fname) + if os.path.exists(p): + raise Local.RepositoryException("local file already exists: %s" % p) - try: - nmsg = db.get(fname) - except LookupError: - nmsg = None + if os.path.exists(tmp_p): + raise Local.RepositoryException( + "local temporary file already exists: %s" % tmp_p + ) - if nmsg is None: - self.print_changes ("adding message: %s: %s, with tags: %s" % (gid, fname, str(labels))) - if not self.dry_run: - try: - (nmsg, _) = db.add (fname, sync_flags = True) - except notmuch2.FileNotEmailError: - print('%s is not an email' % fname) - return True - - # adding initial tags - with nmsg.frozen(): - for t in labels: - nmsg.tags.add (t) - - for t in self.new_tags: - nmsg.tags.add (t) - - nmsg.tags.to_maildir_flags() - self.__update_cache__ (nmsg) - - return True - - else: - # message is already in db, set local tags to match remote tags - otags = nmsg.tags - igntags = otags & self.ignore_labels - otags = otags - self.ignore_labels # remove ignored tags while checking - if otags != set (labels): - labels.extend (igntags) # add back local ignored tags before adding if not self.dry_run: - with nmsg.frozen(): - nmsg.tags.clear() - for t in labels: - nmsg.tags.add (t) - nmsg.tags.to_maildir_flags() - self.__update_cache__ (nmsg, (gid, fname)) - - self.print_changes ("changing tags on message: %s from: %s to: %s" % (gid, str(otags), str(labels))) - - return True - else: - return False - - def print_changes (self, changes): - if self.dry_run: - print ("(dry-run) " + changes) - elif self.verbose: - print(changes) - - + with open(tmp_p, "wb") as fd: + fd.write(msg_str) + + # Set atime and mtime of the message file to Gmail receive date + internalDate = int(m["internalDate"]) / 1000 # ms to s + os.utime(tmp_p, (internalDate, internalDate)) + + os.rename(tmp_p, p) + + # add to notmuch + self.update_tags(m, p, db) + + def update_tags(self, m, fname, db): + # make sure notmuch tags reflect gmail labels + gid = m["id"] + glabels = m.get("labelIds", []) + + # translate labels. Remote.get_labels () must have been called first + labels = [] + for l in glabels: + ll = self.gmailieer.remote.labels.get(l, None) + + if ll is None and not self.config.drop_non_existing_label: + err = "error: GMail supplied a label that there exists no record for! You can `gmi set --drop-non-existing-labels` to work around the issue (https://github.com/gauteh/lieer/issues/48)" + print(err) + raise Local.RepositoryException(err) + elif ll is None: + pass # drop + else: + labels.append(ll) + + # remove ignored labels + labels = set(labels) + labels = list(labels - self.gmailieer.remote.ignore_labels) + + # translate to notmuch tags + labels = [self.translate_labels.get(l, l) for l in labels] + + # this is my weirdness + if self.config.replace_slash_with_dot: + labels = [l.replace("/", ".") for l in labels] + + if fname is None: + # this file hopefully already exists and just needs it tags updated, + # let's try to find its name in the gid to fname table. + fname = os.path.join(self.md, self.gids[gid]) + + else: + # new file + fname = os.path.join(self.md, "cur", fname) + + if not os.path.exists(fname): + if not self.dry_run: + print( + "missing file: reloading cache to check for changes..", + end="", + flush=True, + ) + self.__load_cache__() + fname = os.path.join(self.md, self.gids[gid]) + print("done.") + + if not os.path.exists(fname): + raise Local.RepositoryException( + "tried to update tags on non-existent file: %s" % fname + ) + + self.print_changes("tried to update tags on non-existent file: %s" % fname) + try: + nmsg = db.get(fname) + except LookupError: + nmsg = None + + if nmsg is None: + self.print_changes( + "adding message: %s: %s, with tags: %s" % (gid, fname, str(labels)) + ) + if not self.dry_run: + try: + (nmsg, _) = db.add(fname, sync_flags=True) + except notmuch2.FileNotEmailError: + print("%s is not an email" % fname) + return True + + # adding initial tags + with nmsg.frozen(): + for t in labels: + nmsg.tags.add(t) + + for t in self.new_tags: + nmsg.tags.add(t) + + nmsg.tags.to_maildir_flags() + self.__update_cache__(nmsg) + + return True + + else: + # message is already in db, set local tags to match remote tags + otags = nmsg.tags + igntags = otags & self.ignore_labels + otags = otags - self.ignore_labels # remove ignored tags while checking + if otags != set(labels): + labels.extend(igntags) # add back local ignored tags before adding + if not self.dry_run: + with nmsg.frozen(): + nmsg.tags.clear() + for t in labels: + nmsg.tags.add(t) + nmsg.tags.to_maildir_flags() + self.__update_cache__(nmsg, (gid, fname)) + + self.print_changes( + "changing tags on message: %s from: %s to: %s" + % (gid, str(otags), str(labels)) + ) + + return True + else: + return False + + def print_changes(self, changes): + if self.dry_run: + print("(dry-run) " + changes) + elif self.verbose: + print(changes) diff --git a/lieer/nobar.py b/lieer/nobar.py index 988f89e..347816d 100644 --- a/lieer/nobar.py +++ b/lieer/nobar.py @@ -20,83 +20,82 @@ # along with this program. If not, see . import time -from math import floor +from math import floor -class tqdm: - def __init__ (self, iterable = None, leave = True, total = None, desc = '', *args, **kwargs): - self.desc = desc - self.args = args - self.kwargs = kwargs - - if total is not None: - print (desc, '(%d)' % total, '...', end = '', flush = True) - else: - print (desc, '...', end = '', flush = True) - self.start = time.perf_counter () - self.it = 0 +class tqdm: + def __init__(self, iterable=None, leave=True, total=None, desc="", *args, **kwargs): + self.desc = desc + self.args = args + self.kwargs = kwargs - if iterable is not None: - self.iterable = (i for i in iterable) + if total is not None: + print(desc, "(%d)" % total, "...", end="", flush=True) + else: + print(desc, "...", end="", flush=True) - def __next__ (self): - if self.iterable is not None: - self.update (1) + self.start = time.perf_counter() + self.it = 0 - try: - return next(self.iterable) - except StopIteration: - self.close () - raise - else: - raise StopIteration + if iterable is not None: + self.iterable = (i for i in iterable) - def __iter__ (self): - return self + def __next__(self): + if self.iterable is not None: + self.update(1) - def update (self, n, *args): - self.it += n + try: + return next(self.iterable) + except StopIteration: + self.close() + raise + else: + raise StopIteration - INTERVAL = 10 + def __iter__(self): + return self - if (self.it % INTERVAL == 0): - print ('.', end = '', flush = True) + def update(self, n, *args): + self.it += n - def set_description (self, *args, **kwargs): - pass + INTERVAL = 10 - def close (self): - self.end = time.perf_counter () - print ('done:', self.it, 'its in', self.pp_duration (self.end - self.start)) + if self.it % INTERVAL == 0: + print(".", end="", flush=True) - def pp_duration (self, d = None): - dys = floor (d / (24 * 60 * 60)) - d = d - (dys * 24 * 60 * 60) + def set_description(self, *args, **kwargs): + pass - h = floor (d / (60 * 60)) - d = d - (h * 60 * 60) + def close(self): + self.end = time.perf_counter() + print("done:", self.it, "its in", self.pp_duration(self.end - self.start)) - m = floor (d / 60) - d = d - (m * 60) + def pp_duration(self, d=None): + dys = floor(d / (24 * 60 * 60)) + d = d - (dys * 24 * 60 * 60) - s = d + h = floor(d / (60 * 60)) + d = d - (h * 60 * 60) - o = '' - above = False - if dys > 0: - o = '%dd-' % dys - above = True + m = floor(d / 60) + d = d - (m * 60) - if above or h > 0: - o = o + '%02dh:' % h - above = True + s = d - if above or m > 0: - o = o + '%02dm:' % m - above = True + o = "" + above = False + if dys > 0: + o = "%dd-" % dys + above = True - o = o + '%06.3fs' % s + if above or h > 0: + o = o + "%02dh:" % h + above = True - return o + if above or m > 0: + o = o + "%02dm:" % m + above = True + o = o + "%06.3fs" % s + return o diff --git a/lieer/remote.py b/lieer/remote.py index d872d77..46d52e8 100644 --- a/lieer/remote.py +++ b/lieer/remote.py @@ -23,732 +23,862 @@ from google.oauth2.credentials import Credentials from google.auth.transport.requests import Request + class Remote: - SCOPES = ['https://www.googleapis.com/auth/gmail.readonly', 'https://www.googleapis.com/auth/gmail.labels', 'https://www.googleapis.com/auth/gmail.modify'] - APPLICATION_NAME = 'Lieer' - CLIENT_SECRET_FILE = None - authorized = False - - # nothing to see here, move along.. - # - # no seriously: this is not dangerous to keep here, in order to gain - # access to an users account the access_token and/or refresh_token must be - # compromised. these are stored locally. - # - # * https://github.com/gauteh/lieer/pull/9 - # * https://stackoverflow.com/questions/25957027/oauth-2-installed-application-client-secret-considerationsgoogle-api/43061998#43061998 - # * https://stackoverflow.com/questions/19615372/client-secret-in-oauth-2-0?rq=1 - # - OAUTH2_CLIENT_SECRET = { - "client_id":"753933720722-ju82fu305lii0v9rdo6mf9hj40l5juv0.apps.googleusercontent.com", - "project_id":"capable-pixel-160614", - "auth_uri":"https://accounts.google.com/o/oauth2/auth", - "token_uri":"https://accounts.google.com/o/oauth2/token", - "auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs", - "client_secret":"8oudEG0Tvb7YI2V0ykp2Pzz9", - "redirect_uris":["urn:ietf:wg:oauth:2.0:oob", "http://localhost"] + SCOPES = [ + "https://www.googleapis.com/auth/gmail.readonly", + "https://www.googleapis.com/auth/gmail.labels", + "https://www.googleapis.com/auth/gmail.modify", + ] + APPLICATION_NAME = "Lieer" + CLIENT_SECRET_FILE = None + authorized = False + + # nothing to see here, move along.. + # + # no seriously: this is not dangerous to keep here, in order to gain + # access to an users account the access_token and/or refresh_token must be + # compromised. these are stored locally. + # + # * https://github.com/gauteh/lieer/pull/9 + # * https://stackoverflow.com/questions/25957027/oauth-2-installed-application-client-secret-considerationsgoogle-api/43061998#43061998 + # * https://stackoverflow.com/questions/19615372/client-secret-in-oauth-2-0?rq=1 + # + OAUTH2_CLIENT_SECRET = { + "client_id": "753933720722-ju82fu305lii0v9rdo6mf9hj40l5juv0.apps.googleusercontent.com", + "project_id": "capable-pixel-160614", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://accounts.google.com/o/oauth2/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_secret": "8oudEG0Tvb7YI2V0ykp2Pzz9", + "redirect_uris": ["urn:ietf:wg:oauth:2.0:oob", "http://localhost"], } - # Not used, here for documentation purposes - special_labels = [ 'INBOX', - 'SPAM', - 'TRASH', - 'UNREAD', - 'STARRED', - 'IMPORTANT', - 'SENT', - 'DRAFT', - 'CHAT', - 'CATEGORY_PERSONAL', - 'CATEGORY_SOCIAL', - 'CATEGORY_PROMOTIONS', - 'CATEGORY_UPDATES', - 'CATEGORY_FORUMS' - ] - - # these cannot be changed manually - read_only_labels = set(['SENT', 'DRAFT']) - read_only_tags = set(['sent', 'draft']) - - DEFAULT_IGNORE_LABELS = [ 'CATEGORY_PERSONAL', - 'CATEGORY_SOCIAL', - 'CATEGORY_PROMOTIONS', - 'CATEGORY_UPDATES', - 'CATEGORY_FORUMS', - ] - - ignore_labels = set() - - # query to use - query = '-in:chats' - - not_sync = set (['CHAT']) - - # used to indicate whether all messages that should be updated where updated - all_updated = True - - # Handle exponential back-offs in non-batch requests. - _delay = 0 - _delay_ok = 0 - MAX_DELAY = 100 - MAX_CONNECTION_ERRORS = 20 - - ## Batch requests should generally be of size 50, and at most 100. Best overall - ## performance is likely to be at 50 since we will not be throttled. - ## - ## * https://developers.google.com/gmail/api/guides/batch - ## * https://developers.google.com/gmail/api/v1/reference/quota - BATCH_REQUEST_SIZE = 50 - MIN_BATCH_REQUEST_SIZE = 1 - - class BatchException (Exception): - pass - - class UserRateException (Exception): - pass - - class GenericException (Exception): - pass - - class NoHistoryException (Exception): - pass - - def __init__ (self, g): - self.gmailieer = g - - assert g.local.loaded, "local repository must be loaded!" - - self.CLIENT_SECRET_FILE = g.credentials_file - self.account = g.local.config.account - self.dry_run = g.dry_run - self.verbose = g.verbose - - self.ignore_labels = self.gmailieer.local.config.ignore_remote_labels - - def __require_auth__ (func): - def func_wrap (self, *args, **kwargs): - if not self.authorized: - self.authorize () - return func (self, *args, **kwargs) - return func_wrap - - def __wait_delay__ (self): - if self._delay: - time.sleep (self._delay) - - def __request_done__ (self, success): - if success: - if self._delay: - if self._delay_ok > 10: - # after 10 good requests, reduce request delay - self._delay = self._delay // 2 - self._delay_ok = 0 + # Not used, here for documentation purposes + special_labels = [ + "INBOX", + "SPAM", + "TRASH", + "UNREAD", + "STARRED", + "IMPORTANT", + "SENT", + "DRAFT", + "CHAT", + "CATEGORY_PERSONAL", + "CATEGORY_SOCIAL", + "CATEGORY_PROMOTIONS", + "CATEGORY_UPDATES", + "CATEGORY_FORUMS", + ] + + # these cannot be changed manually + read_only_labels = set(["SENT", "DRAFT"]) + read_only_tags = set(["sent", "draft"]) + + DEFAULT_IGNORE_LABELS = [ + "CATEGORY_PERSONAL", + "CATEGORY_SOCIAL", + "CATEGORY_PROMOTIONS", + "CATEGORY_UPDATES", + "CATEGORY_FORUMS", + ] + + ignore_labels = set() + + # query to use + query = "-in:chats" + + not_sync = set(["CHAT"]) + + # used to indicate whether all messages that should be updated where updated + all_updated = True + + # Handle exponential back-offs in non-batch requests. + _delay = 0 + _delay_ok = 0 + MAX_DELAY = 100 + MAX_CONNECTION_ERRORS = 20 + + ## Batch requests should generally be of size 50, and at most 100. Best overall + ## performance is likely to be at 50 since we will not be throttled. + ## + ## * https://developers.google.com/gmail/api/guides/batch + ## * https://developers.google.com/gmail/api/v1/reference/quota + BATCH_REQUEST_SIZE = 50 + MIN_BATCH_REQUEST_SIZE = 1 + + class BatchException(Exception): + pass + + class UserRateException(Exception): + pass + + class GenericException(Exception): + pass + + class NoHistoryException(Exception): + pass + + def __init__(self, g): + self.gmailieer = g + + assert g.local.loaded, "local repository must be loaded!" + + self.CLIENT_SECRET_FILE = g.credentials_file + self.account = g.local.config.account + self.dry_run = g.dry_run + self.verbose = g.verbose + + self.ignore_labels = self.gmailieer.local.config.ignore_remote_labels + + def __require_auth__(func): + def func_wrap(self, *args, **kwargs): + if not self.authorized: + self.authorize() + return func(self, *args, **kwargs) + + return func_wrap + + def __wait_delay__(self): + if self._delay: + time.sleep(self._delay) + + def __request_done__(self, success): + if success: + if self._delay: + if self._delay_ok > 10: + # after 10 good requests, reduce request delay + self._delay = self._delay // 2 + self._delay_ok = 0 + else: + self._delay_ok += 1 else: - self._delay_ok += 1 - else: - self._delay = self._delay * 2 + 1 - self._delay_ok = 0 - if self._delay <= self.MAX_DELAY: - print ("remote: request failed, increasing delay between requests to: %d s" % self._delay) - else: - print ("remote: increased delay to more than maximum of %d s." % self.MAX_DELAY) - raise Remote.GenericException ("cannot increase delay more to more than maximum %d s" % self.MAX_DELAY) - - - - @__require_auth__ - def get_labels (self): - results = self.service.users ().labels ().list (userId = self.account).execute () - labels = results.get ('labels', []) - - self.labels = {} - self.invlabels = {} - for l in labels: - self.labels[l['id']] = l['name'] - self.invlabels[l['name']] = l['id'] - - return self.labels - - @__require_auth__ - def get_current_history_id (self, start): - """ - Get the current history id of the mailbox - """ - try: - results = self.service.users ().history ().list (userId = self.account, startHistoryId = start).execute () - if 'historyId' in results: - return int(results['historyId']) - else: - raise Remote.GenericException ("no historyId field returned") - - except googleapiclient.errors.HttpError: - # this happens if the original historyId is too old, - # try to get last message and the historyId from it. - for mset in self.all_messages (1): - (total, mset) = mset - m = mset[0] - msg = self.get_message (m['id']) - return int(msg['historyId']) - - @__require_auth__ - def is_history_id_valid (self, historyId): - """ - Check if the historyId is valid or too old. - """ - try: - results = self.service.users().history().list(userId = self.account, startHistoryId = historyId).execute() - if 'historyId' in results: - return True - else: - raise Remote.GenericException("no historyId field returned") - - except googleapiclient.errors.HttpError: - return False - - @__require_auth__ - def get_history_since (self, start): - """ - Get all changes since start historyId - """ - self.__wait_delay__ () - results = self.service.users ().history ().list (userId = self.account, startHistoryId = start).execute () - if 'history' in results: - self.__request_done__ (True) - yield results['history'] - - # no history field means that there is no history - - while 'nextPageToken' in results: - pt = results['nextPageToken'] - - self.__wait_delay__ () - _results = self.service.users ().history ().list (userId = self.account, startHistoryId = start, pageToken = pt).execute () - - if 'history' in _results: - self.__request_done__ (True) - results = _results - yield results['history'] - else: - print ("remote: no 'history' when more pages were indicated.") - if not self.gmailieer.local.config.ignore_empty_history: - self.__request_done__ (False) - print ("You can ignore this error with: gmi set --ignore-empty-history (https://github.com/gauteh/lieer/issues/120)") - raise Remote.NoHistoryException () - else: - self.__request_done__ (True) - - @__require_auth__ - def all_messages (self, limit = None): - """ - Get a list of all messages - """ - - self.__wait_delay__ () - results = self.service.users ().messages ().list (userId = self.account, q = self.query, maxResults = limit, includeSpamTrash = True).execute () - - if 'messages' in results: - self.__request_done__ (True) - yield (results['resultSizeEstimate'], results['messages']) - - # no messages field presumably means no messages - - while 'nextPageToken' in results: - pt = results['nextPageToken'] - _results = self.service.users ().messages ().list (userId = self.account, pageToken = pt, q = self.query, maxResults = limit, includeSpamTrash = True).execute () - - if 'messages' in _results: - self.__request_done__ (True) - results = _results - yield (results['resultSizeEstimate'], results['messages']) - else: - self.__request_done__ (True) - print ("remote: warning: no messages when several pages were indicated.") - break - - @__require_auth__ - def get_messages (self, gids, cb, format): - """ - Get the messages - """ - - max_req = self.BATCH_REQUEST_SIZE - req_ok = 0 - N = len (gids) - i = 0 - j = 0 - - # How much to wait before contacting the remote. - user_rate_delay = 0 - # How many requests with the current delay returned ok. - user_rate_ok = 0 - - conn_errors = 0 - - msg_batch = [] # queue up received batch and send in one go to content / db routine - - def _cb (rid, resp, excep): - nonlocal j, msg_batch - if excep is not None: - if type(excep) is googleapiclient.errors.HttpError and excep.resp.status == 404: - # message could not be found this is probably a deleted message, spam or draft - # message since these are not included in the messages.get() query by default. - print ("remote: could not find remote message: %s!" % gids[j]) - j += 1 - return - - elif type(excep) is googleapiclient.errors.HttpError and excep.resp.status == 400: - # message id invalid, probably caused by stray files in the mail repo - print ("remote: message id: %s is invalid! are there any non-lieer files created in the lieer repository?" % gids[j]) - j += 1 - return - - elif type(excep) is googleapiclient.errors.HttpError and excep.resp.status == 403: - raise Remote.UserRateException (excep) - - else: - raise Remote.BatchException(excep) - else: - j += 1 - - msg_batch.append (resp) - - while i < N: - n = 0 - j = i - batch = self.service.new_batch_http_request (callback = _cb) - - while n < max_req and i < N: - gid = gids[i] - batch.add (self.service.users ().messages ().get (userId = self.account, - id = gid, format = format)) - n += 1 - i += 1 - - # we wait if there is a user_rate_delay - if user_rate_delay: - print ("remote: waiting %.1f seconds.." % user_rate_delay) - time.sleep (user_rate_delay) - - try: - batch.execute () - - # gradually reduce user delay upon every ok batch - user_rate_ok += 1 - if user_rate_delay > 0 and user_rate_ok > 0: - user_rate_delay = user_rate_delay // 2 - print ("remote: decreasing delay to %s" % user_rate_delay) - user_rate_ok = 0 - - # gradually increase batch request size upon every ok request - req_ok += 1 - if max_req < self.BATCH_REQUEST_SIZE and req_ok > 0: - max_req = min (max_req * 2, self.BATCH_REQUEST_SIZE) - print ("remote: increasing batch request size to: %d" % max_req) - req_ok = 0 - - conn_errors = 0 - - except Remote.UserRateException: - user_rate_delay = user_rate_delay * 2 + 1 - print ("remote: user rate error, increasing delay to %s" % user_rate_delay) + self._delay = self._delay * 2 + 1 + self._delay_ok = 0 + if self._delay <= self.MAX_DELAY: + print( + "remote: request failed, increasing delay between requests to: %d s" + % self._delay + ) + else: + print( + "remote: increased delay to more than maximum of %d s." + % self.MAX_DELAY + ) + raise Remote.GenericException( + "cannot increase delay more to more than maximum %d s" + % self.MAX_DELAY + ) + + @__require_auth__ + def get_labels(self): + results = self.service.users().labels().list(userId=self.account).execute() + labels = results.get("labels", []) + + self.labels = {} + self.invlabels = {} + for l in labels: + self.labels[l["id"]] = l["name"] + self.invlabels[l["name"]] = l["id"] + + return self.labels + + @__require_auth__ + def get_current_history_id(self, start): + """ + Get the current history id of the mailbox + """ + try: + results = ( + self.service.users() + .history() + .list(userId=self.account, startHistoryId=start) + .execute() + ) + if "historyId" in results: + return int(results["historyId"]) + else: + raise Remote.GenericException("no historyId field returned") + + except googleapiclient.errors.HttpError: + # this happens if the original historyId is too old, + # try to get last message and the historyId from it. + for mset in self.all_messages(1): + (total, mset) = mset + m = mset[0] + msg = self.get_message(m["id"]) + return int(msg["historyId"]) + + @__require_auth__ + def is_history_id_valid(self, historyId): + """ + Check if the historyId is valid or too old. + """ + try: + results = ( + self.service.users() + .history() + .list(userId=self.account, startHistoryId=historyId) + .execute() + ) + if "historyId" in results: + return True + else: + raise Remote.GenericException("no historyId field returned") + + except googleapiclient.errors.HttpError: + return False + + @__require_auth__ + def get_history_since(self, start): + """ + Get all changes since start historyId + """ + self.__wait_delay__() + results = ( + self.service.users() + .history() + .list(userId=self.account, startHistoryId=start) + .execute() + ) + if "history" in results: + self.__request_done__(True) + yield results["history"] + + # no history field means that there is no history + + while "nextPageToken" in results: + pt = results["nextPageToken"] + + self.__wait_delay__() + _results = ( + self.service.users() + .history() + .list(userId=self.account, startHistoryId=start, pageToken=pt) + .execute() + ) + + if "history" in _results: + self.__request_done__(True) + results = _results + yield results["history"] + else: + print("remote: no 'history' when more pages were indicated.") + if not self.gmailieer.local.config.ignore_empty_history: + self.__request_done__(False) + print( + "You can ignore this error with: gmi set --ignore-empty-history (https://github.com/gauteh/lieer/issues/120)" + ) + raise Remote.NoHistoryException() + else: + self.__request_done__(True) + + @__require_auth__ + def all_messages(self, limit=None): + """ + Get a list of all messages + """ + + self.__wait_delay__() + results = ( + self.service.users() + .messages() + .list( + userId=self.account, + q=self.query, + maxResults=limit, + includeSpamTrash=True, + ) + .execute() + ) + + if "messages" in results: + self.__request_done__(True) + yield (results["resultSizeEstimate"], results["messages"]) + + # no messages field presumably means no messages + + while "nextPageToken" in results: + pt = results["nextPageToken"] + _results = ( + self.service.users() + .messages() + .list( + userId=self.account, + pageToken=pt, + q=self.query, + maxResults=limit, + includeSpamTrash=True, + ) + .execute() + ) + + if "messages" in _results: + self.__request_done__(True) + results = _results + yield (results["resultSizeEstimate"], results["messages"]) + else: + self.__request_done__(True) + print("remote: warning: no messages when several pages were indicated.") + break + + @__require_auth__ + def get_messages(self, gids, cb, format): + """ + Get the messages + """ + + max_req = self.BATCH_REQUEST_SIZE + req_ok = 0 + N = len(gids) + i = 0 + j = 0 + + # How much to wait before contacting the remote. + user_rate_delay = 0 + # How many requests with the current delay returned ok. user_rate_ok = 0 - i = j # reset - - except Remote.BatchException: - max_req = max_req // 2 - req_ok = 0 - - if max_req >= self.MIN_BATCH_REQUEST_SIZE: - i = j # reset - print ("remote: reducing batch request size to: %d" % max_req) - else: - max_req = self.MIN_BATCH_REQUEST_SIZE - raise Remote.BatchException ("cannot reduce request any further") - - except ConnectionError as ex: - print ("connection failed, re-trying:", ex) - i = j # reset - conn_errors += 1 - - if conn_errors > self.MAX_CONNECTION_ERRORS: - print ("too many connection errors") - raise - - time.sleep (1) - - finally: - # handle batch - if len(msg_batch) > 0: - cb (msg_batch) - msg_batch.clear () - - @__require_auth__ - def get_message (self, gid, format = 'minimal'): - """ - Get a single message - """ - self.__wait_delay__ () - try: - result = self.service.users ().messages ().get (userId = self.account, - id = gid, format = format).execute () - - except googleapiclient.errors.HttpError as excep: - if excep.resp.status == 403 or excep.resp.status == 500: - self.__request_done__ (False) - return self.get_message (gid, format) - else: - raise - - self.__request_done__ (True) - - return result - - def authorize (self, reauth = False): - if reauth: - credential_path = self.gmailieer.local.credentials_f - if os.path.exists (credential_path): - print ("reauthorizing..") - os.unlink (credential_path) - - self.credentials = self.__get_credentials__ () - - timeout = self.gmailieer.local.config.timeout - if timeout == 0: - timeout = None - - self.service = discovery.build('gmail', 'v1', credentials=self.credentials) - self.authorized = True - - def __store_credentials__ (self, path, credentials): - """ - Store valid credentials in json format - """ - with open(path, 'w') as storage: - storage.write(credentials.to_json()) - - def __get_credentials__ (self): - """ - Gets valid user credentials from storage. - - If nothing has been stored, or if the stored credentials are invalid, - the OAuth2 flow is completed to obtain the new credentials. - - Returns: - Credentials, the obtained credential. - """ - credentials = None - credential_path = self.gmailieer.local.credentials_f - - if os.path.exists(credential_path): - credentials = Credentials.from_authorized_user_file(credential_path, self.SCOPES) - - if not credentials or not credentials.valid: - if credentials and credentials.expired and credentials.refresh_token: - credentials.refresh(Request()) - - elif self.CLIENT_SECRET_FILE is not None: - # use user-provided client_secret - print ("auth: using user-provided api id and secret") - if not os.path.exists (self.CLIENT_SECRET_FILE): - raise Remote.GenericException ("error: no secret client API key file found for authentication at: %s" % self.CLIENT_SECRET_FILE) - - flow = InstalledAppFlow.from_client_secrets_file(self.CLIENT_SECRET_FILE, self.SCOPES) - credentials = flow.run_local_server() - self.__store_credentials__(credential_path, credentials) - - else: - # use default id and secret - client_config = { - "installed": { - "auth_uri": self.OAUTH2_CLIENT_SECRET['auth_uri'], - "token_uri": self.OAUTH2_CLIENT_SECRET['token_uri'], - "client_id": self.OAUTH2_CLIENT_SECRET['client_id'], - "client_secret": self.OAUTH2_CLIENT_SECRET['client_secret'] - } - } - flow = InstalledAppFlow.from_client_config(client_config, self.SCOPES) - credentials = flow.run_local_server() - self.__store_credentials__(credential_path, credentials) - - return credentials - - @__require_auth__ - def update (self, gmsg, nmsg, last_hist, force): - """ - Gets a message and checks which labels it should add and which to delete, returns a - operation which can be submitted in a batch. - """ - - # DUPLICATES: - # - # there might be duplicate messages across gmail accounts with the same - # message id, messages outside the repository are skipped. if there are - # duplicate messages in the same account they are all updated. if one of - # them is changed remotely it will not be updated, any changes on it will - # then be pulled back on next pull overwriting the changes that might have - # been pushed on another duplicate. this will again trigger a change on the - # next push for the other duplicates. after the 2nd pull things should - # settle unless there's been any local changes. - # + conn_errors = 0 - gid = gmsg['id'] - - found = False - for f in nmsg.filenames(): - if gid in str(f): - found = True - - # this can happen if a draft is edited remotely and is synced before it is sent. we'll - # just skip it and it should be resolved on the next pull. - if not found: - print ("update: gid does not match any file name of message, probably a draft, skipping: %s" % gid) - return None - - glabels = gmsg.get('labelIds', []) - - # translate labels. Remote.get_labels () must have been called first - labels = [] - for l in glabels: - ll = self.labels.get(l, None) - - if ll is None and not self.gmailieer.local.config.drop_non_existing_label: - err = "error: GMail supplied a label that there exists no record for! You can `gmi set --drop-non-existing-labels` to work around the issue (https://github.com/gauteh/lieer/issues/48)" - print (err) - raise Remote.GenericException (err) - elif ll is None: - pass # drop - else: - labels.append (ll) - - # remove ignored labels - labels = set(labels) - labels = labels - self.ignore_labels - - # translate to notmuch tags - labels = [self.gmailieer.local.translate_labels.get (l, l) for l in labels] - - # this is my weirdness - if self.gmailieer.local.config.replace_slash_with_dot: - labels = [l.replace ('/', '.') for l in labels] - - labels = set(labels) - - # current tags - tags = nmsg.tags - - # remove special notmuch tags - tags = tags - self.gmailieer.local.ignore_labels - - add = list((tags - labels) - self.read_only_tags) - rem = list((labels - tags) - self.read_only_tags) - - # translate back to gmail labels - add = [self.gmailieer.local.labels_translate.get (k, k) for k in add] - rem = [self.gmailieer.local.labels_translate.get (k, k) for k in rem] - - if self.gmailieer.local.config.replace_slash_with_dot: - add = [a.replace ('.', '/') for a in add] - rem = [r.replace ('.', '/') for r in rem] - - if len(add) > 0 or len(rem) > 0: - # check if this message has been changed remotely since last pull - hist_id = int(gmsg['historyId']) - if hist_id > last_hist: - if not force: - print ("update: remote has changed, will not update: %s (add: %s, rem: %s) (%d > %d)" % (gid, add, rem, hist_id, last_hist)) - self.all_updated = False - return None - - if 'TRASH' in add: - if 'SPAM' in add: - print ("update: %s: Trying to add both TRASH and SPAM, dropping SPAM (add: %s, rem: %s)" % (gid, add, rem)) - add.remove('SPAM') - if 'INBOX' in add: - print ("update: %s: Trying to add both TRASH and INBOX, dropping INBOX (add: %s, rem: %s)" % (gid, add, rem)) - add.remove('INBOX') - elif 'SPAM' in add: - if 'INBOX' in add: - print ("update: %s: Trying to add both SPAM and INBOX, dropping INBOX (add: %s, rem: %s)" % (gid, add, rem)) - add.remove('INBOX') - - self.print_changes ("gid: %s: add: %s, remove: %s" % (gid, str(add), str(rem))) - if self.dry_run: - return None - else: - return self.__push_tags__ (gid, add, rem) - - else: - return None - - @__require_auth__ - def __push_tags__ (self, gid, add, rem): - """ - Push message changes - """ - - _add = [] - for a in add: - _a = self.invlabels.get (a, None) - if _a is None: - # label does not exist - (lid, ll) = self.__create_label__ (a) - self.labels[lid] = ll - self.invlabels[ll] = lid - _add.append (lid) - else: - _add.append (_a) - - _rem = [self.invlabels[r] for r in rem] - - body = { 'addLabelIds' : _add, - 'removeLabelIds' : _rem } - - return self.service.users ().messages ().modify (userId = self.account, - id = gid, body = body) - - @__require_auth__ - def push_changes (self, actions, cb): - """ - Push label changes - """ - max_req = self.BATCH_REQUEST_SIZE - N = len(actions) - i = 0 - j = 0 - - # How much to wait before contacting the remote. - user_rate_delay = 0 - # How many requests with the current delay returned ok. - user_rate_ok = 0 - - def _cb(rid, resp, excep): - nonlocal j - if excep is not None: - if type(excep) is googleapiclient.errors.HttpError and excep.resp.status == 404: - # message could not be found this is probably a deleted message, spam or draft - # message since these are not included in the messages.get() query by default. - print ("remote: could not find remote message: %s!" % resp) - j += 1 - return - - elif type(excep) is googleapiclient.errors.HttpError and excep.resp.status == 400: - # message id invalid, probably caused by stray files in the mail repo - print ("remote: message id is invalid! are there any non-lieer files created in the lieer repository? %s" % resp) - j += 1 - return - - elif type(excep) is googleapiclient.errors.HttpError and excep.resp.status == 403: - raise Remote.UserRateException (excep) + msg_batch = ( + [] + ) # queue up received batch and send in one go to content / db routine + + def _cb(rid, resp, excep): + nonlocal j, msg_batch + if excep is not None: + if ( + type(excep) is googleapiclient.errors.HttpError + and excep.resp.status == 404 + ): + # message could not be found this is probably a deleted message, spam or draft + # message since these are not included in the messages.get() query by default. + print("remote: could not find remote message: %s!" % gids[j]) + j += 1 + return + + elif ( + type(excep) is googleapiclient.errors.HttpError + and excep.resp.status == 400 + ): + # message id invalid, probably caused by stray files in the mail repo + print( + "remote: message id: %s is invalid! are there any non-lieer files created in the lieer repository?" + % gids[j] + ) + j += 1 + return + + elif ( + type(excep) is googleapiclient.errors.HttpError + and excep.resp.status == 403 + ): + raise Remote.UserRateException(excep) + + else: + raise Remote.BatchException(excep) + else: + j += 1 + + msg_batch.append(resp) + + while i < N: + n = 0 + j = i + batch = self.service.new_batch_http_request(callback=_cb) + + while n < max_req and i < N: + gid = gids[i] + batch.add( + self.service.users() + .messages() + .get(userId=self.account, id=gid, format=format) + ) + n += 1 + i += 1 + + # we wait if there is a user_rate_delay + if user_rate_delay: + print("remote: waiting %.1f seconds.." % user_rate_delay) + time.sleep(user_rate_delay) + + try: + batch.execute() + + # gradually reduce user delay upon every ok batch + user_rate_ok += 1 + if user_rate_delay > 0 and user_rate_ok > 0: + user_rate_delay = user_rate_delay // 2 + print("remote: decreasing delay to %s" % user_rate_delay) + user_rate_ok = 0 + + # gradually increase batch request size upon every ok request + req_ok += 1 + if max_req < self.BATCH_REQUEST_SIZE and req_ok > 0: + max_req = min(max_req * 2, self.BATCH_REQUEST_SIZE) + print("remote: increasing batch request size to: %d" % max_req) + req_ok = 0 + + conn_errors = 0 + + except Remote.UserRateException: + user_rate_delay = user_rate_delay * 2 + 1 + print( + "remote: user rate error, increasing delay to %s" % user_rate_delay + ) + user_rate_ok = 0 + + i = j # reset + + except Remote.BatchException: + max_req = max_req // 2 + req_ok = 0 + + if max_req >= self.MIN_BATCH_REQUEST_SIZE: + i = j # reset + print("remote: reducing batch request size to: %d" % max_req) + else: + max_req = self.MIN_BATCH_REQUEST_SIZE + raise Remote.BatchException("cannot reduce request any further") + + except ConnectionError as ex: + print("connection failed, re-trying:", ex) + i = j # reset + conn_errors += 1 + + if conn_errors > self.MAX_CONNECTION_ERRORS: + print("too many connection errors") + raise + + time.sleep(1) + + finally: + # handle batch + if len(msg_batch) > 0: + cb(msg_batch) + msg_batch.clear() + + @__require_auth__ + def get_message(self, gid, format="minimal"): + """ + Get a single message + """ + self.__wait_delay__() + try: + result = ( + self.service.users() + .messages() + .get(userId=self.account, id=gid, format=format) + .execute() + ) + + except googleapiclient.errors.HttpError as excep: + if excep.resp.status == 403 or excep.resp.status == 500: + self.__request_done__(False) + return self.get_message(gid, format) + else: + raise + + self.__request_done__(True) + + return result + + def authorize(self, reauth=False): + if reauth: + credential_path = self.gmailieer.local.credentials_f + if os.path.exists(credential_path): + print("reauthorizing..") + os.unlink(credential_path) + + self.credentials = self.__get_credentials__() + + timeout = self.gmailieer.local.config.timeout + if timeout == 0: + timeout = None + + self.service = discovery.build("gmail", "v1", credentials=self.credentials) + self.authorized = True + + def __store_credentials__(self, path, credentials): + """ + Store valid credentials in json format + """ + with open(path, "w") as storage: + storage.write(credentials.to_json()) + + def __get_credentials__(self): + """ + Gets valid user credentials from storage. + + If nothing has been stored, or if the stored credentials are invalid, + the OAuth2 flow is completed to obtain the new credentials. + + Returns: + Credentials, the obtained credential. + """ + credentials = None + credential_path = self.gmailieer.local.credentials_f + + if os.path.exists(credential_path): + credentials = Credentials.from_authorized_user_file( + credential_path, self.SCOPES + ) + + if not credentials or not credentials.valid: + if credentials and credentials.expired and credentials.refresh_token: + credentials.refresh(Request()) + + elif self.CLIENT_SECRET_FILE is not None: + # use user-provided client_secret + print("auth: using user-provided api id and secret") + if not os.path.exists(self.CLIENT_SECRET_FILE): + raise Remote.GenericException( + "error: no secret client API key file found for authentication at: %s" + % self.CLIENT_SECRET_FILE + ) + + flow = InstalledAppFlow.from_client_secrets_file( + self.CLIENT_SECRET_FILE, self.SCOPES + ) + credentials = flow.run_local_server() + self.__store_credentials__(credential_path, credentials) + + else: + # use default id and secret + client_config = { + "installed": { + "auth_uri": self.OAUTH2_CLIENT_SECRET["auth_uri"], + "token_uri": self.OAUTH2_CLIENT_SECRET["token_uri"], + "client_id": self.OAUTH2_CLIENT_SECRET["client_id"], + "client_secret": self.OAUTH2_CLIENT_SECRET["client_secret"], + } + } + flow = InstalledAppFlow.from_client_config(client_config, self.SCOPES) + credentials = flow.run_local_server() + self.__store_credentials__(credential_path, credentials) + + return credentials + + @__require_auth__ + def update(self, gmsg, nmsg, last_hist, force): + """ + Gets a message and checks which labels it should add and which to delete, returns a + operation which can be submitted in a batch. + """ + + # DUPLICATES: + # + # there might be duplicate messages across gmail accounts with the same + # message id, messages outside the repository are skipped. if there are + # duplicate messages in the same account they are all updated. if one of + # them is changed remotely it will not be updated, any changes on it will + # then be pulled back on next pull overwriting the changes that might have + # been pushed on another duplicate. this will again trigger a change on the + # next push for the other duplicates. after the 2nd pull things should + # settle unless there's been any local changes. + # + + gid = gmsg["id"] + + found = False + for f in nmsg.filenames(): + if gid in str(f): + found = True + + # this can happen if a draft is edited remotely and is synced before it is sent. we'll + # just skip it and it should be resolved on the next pull. + if not found: + print( + "update: gid does not match any file name of message, probably a draft, skipping: %s" + % gid + ) + return None + + glabels = gmsg.get("labelIds", []) + + # translate labels. Remote.get_labels () must have been called first + labels = [] + for l in glabels: + ll = self.labels.get(l, None) + + if ll is None and not self.gmailieer.local.config.drop_non_existing_label: + err = "error: GMail supplied a label that there exists no record for! You can `gmi set --drop-non-existing-labels` to work around the issue (https://github.com/gauteh/lieer/issues/48)" + print(err) + raise Remote.GenericException(err) + elif ll is None: + pass # drop + else: + labels.append(ll) + + # remove ignored labels + labels = set(labels) + labels = labels - self.ignore_labels + + # translate to notmuch tags + labels = [self.gmailieer.local.translate_labels.get(l, l) for l in labels] + + # this is my weirdness + if self.gmailieer.local.config.replace_slash_with_dot: + labels = [l.replace("/", ".") for l in labels] + + labels = set(labels) + + # current tags + tags = nmsg.tags + + # remove special notmuch tags + tags = tags - self.gmailieer.local.ignore_labels + + add = list((tags - labels) - self.read_only_tags) + rem = list((labels - tags) - self.read_only_tags) + + # translate back to gmail labels + add = [self.gmailieer.local.labels_translate.get(k, k) for k in add] + rem = [self.gmailieer.local.labels_translate.get(k, k) for k in rem] + + if self.gmailieer.local.config.replace_slash_with_dot: + add = [a.replace(".", "/") for a in add] + rem = [r.replace(".", "/") for r in rem] + + if len(add) > 0 or len(rem) > 0: + # check if this message has been changed remotely since last pull + hist_id = int(gmsg["historyId"]) + if hist_id > last_hist: + if not force: + print( + "update: remote has changed, will not update: %s (add: %s, rem: %s) (%d > %d)" + % (gid, add, rem, hist_id, last_hist) + ) + self.all_updated = False + return None + + if "TRASH" in add: + if "SPAM" in add: + print( + "update: %s: Trying to add both TRASH and SPAM, dropping SPAM (add: %s, rem: %s)" + % (gid, add, rem) + ) + add.remove("SPAM") + if "INBOX" in add: + print( + "update: %s: Trying to add both TRASH and INBOX, dropping INBOX (add: %s, rem: %s)" + % (gid, add, rem) + ) + add.remove("INBOX") + elif "SPAM" in add: + if "INBOX" in add: + print( + "update: %s: Trying to add both SPAM and INBOX, dropping INBOX (add: %s, rem: %s)" + % (gid, add, rem) + ) + add.remove("INBOX") + + self.print_changes( + "gid: %s: add: %s, remove: %s" % (gid, str(add), str(rem)) + ) + if self.dry_run: + return None + else: + return self.__push_tags__(gid, add, rem) else: - raise Remote.BatchException(excep) - else: - j += 1 - - cb(resp) - - while i < N: - n = 0 - j = i - batch = self.service.new_batch_http_request(callback = _cb) - - while n < max_req and i < N: - a = actions[i] - batch.add(a) - n += 1 - i += 1 - - # we wait if there is a user_rate_delay - if user_rate_delay: - print ("remote: waiting %.1f seconds.." % user_rate_delay) - time.sleep (user_rate_delay) - - try: - batch.execute () - - # gradually reduce if we had 10 ok batches - user_rate_ok += 1 - if user_rate_ok > 10: - user_rate_delay = user_rate_delay // 2 - user_rate_ok = 0 - - except Remote.UserRateException: - user_rate_delay = user_rate_delay * 2 + 1 - print ("remote: user rate error, increasing delay to %s" % user_rate_delay) + return None + + @__require_auth__ + def __push_tags__(self, gid, add, rem): + """ + Push message changes + """ + + _add = [] + for a in add: + _a = self.invlabels.get(a, None) + if _a is None: + # label does not exist + (lid, ll) = self.__create_label__(a) + self.labels[lid] = ll + self.invlabels[ll] = lid + _add.append(lid) + else: + _add.append(_a) + + _rem = [self.invlabels[r] for r in rem] + + body = {"addLabelIds": _add, "removeLabelIds": _rem} + + return ( + self.service.users() + .messages() + .modify(userId=self.account, id=gid, body=body) + ) + + @__require_auth__ + def push_changes(self, actions, cb): + """ + Push label changes + """ + max_req = self.BATCH_REQUEST_SIZE + N = len(actions) + i = 0 + j = 0 + + # How much to wait before contacting the remote. + user_rate_delay = 0 + # How many requests with the current delay returned ok. user_rate_ok = 0 - i = j # reset - - except Remote.BatchException: - if max_req > self.MIN_BATCH_REQUEST_SIZE: - max_req = max_req / 2 - i = j # reset - print ("reducing batch request size to: %d" % max_req) - else: - raise Remote.BatchException ("cannot reduce request any further") - - @__require_auth__ - def __create_label__ (self, l): - """ - Creates a new label - - Returns: - - (labelId, label) - - """ + def _cb(rid, resp, excep): + nonlocal j + if excep is not None: + if ( + type(excep) is googleapiclient.errors.HttpError + and excep.resp.status == 404 + ): + # message could not be found this is probably a deleted message, spam or draft + # message since these are not included in the messages.get() query by default. + print("remote: could not find remote message: %s!" % resp) + j += 1 + return + + elif ( + type(excep) is googleapiclient.errors.HttpError + and excep.resp.status == 400 + ): + # message id invalid, probably caused by stray files in the mail repo + print( + "remote: message id is invalid! are there any non-lieer files created in the lieer repository? %s" + % resp + ) + j += 1 + return + + elif ( + type(excep) is googleapiclient.errors.HttpError + and excep.resp.status == 403 + ): + raise Remote.UserRateException(excep) + + else: + raise Remote.BatchException(excep) + else: + j += 1 + + cb(resp) + + while i < N: + n = 0 + j = i + batch = self.service.new_batch_http_request(callback=_cb) + + while n < max_req and i < N: + a = actions[i] + batch.add(a) + n += 1 + i += 1 + + # we wait if there is a user_rate_delay + if user_rate_delay: + print("remote: waiting %.1f seconds.." % user_rate_delay) + time.sleep(user_rate_delay) + + try: + batch.execute() + + # gradually reduce if we had 10 ok batches + user_rate_ok += 1 + if user_rate_ok > 10: + user_rate_delay = user_rate_delay // 2 + user_rate_ok = 0 + + except Remote.UserRateException: + user_rate_delay = user_rate_delay * 2 + 1 + print( + "remote: user rate error, increasing delay to %s" % user_rate_delay + ) + user_rate_ok = 0 + + i = j # reset + + except Remote.BatchException: + if max_req > self.MIN_BATCH_REQUEST_SIZE: + max_req = max_req / 2 + i = j # reset + print("reducing batch request size to: %d" % max_req) + else: + raise Remote.BatchException("cannot reduce request any further") + + @__require_auth__ + def __create_label__(self, l): + """ + Creates a new label + + Returns: + + (labelId, label) + + """ + + print("push: creating label: %s.." % l) + + label = { + "messageListVisibility": "show", + "name": l, + "labelListVisibility": "labelShow", + } - print ("push: creating label: %s.." % l) + if not self.dry_run: + self.__wait_delay__() + try: + lr = ( + self.service.users() + .labels() + .create(userId=self.account, body=label) + .execute() + ) - label = { 'messageListVisibility' : 'show', - 'name' : l, - 'labelListVisibility' : 'labelShow', - } + return (lr["id"], l) - if not self.dry_run: - self.__wait_delay__ () - try: - lr = self.service.users ().labels ().create (userId = self.account, body = label).execute () + except googleapiclient.errors.HttpError as excep: + if excep.resp.status == 403 or excep.resp.status == 500: + self.__request_done__(False) + return self.__create_label__(l) + else: + raise - return (lr['id'], l) + self.__request_done__(True) - except googleapiclient.errors.HttpError as excep: - if excep.resp.status == 403 or excep.resp.status == 500: - self.__request_done__ (False) - return self.__create_label__ (l) else: - raise - - self.__request_done__ (True) - - else: - return (None, None) - - - @__require_auth__ - def send (self, message, threadId = None): - """ - Send message + return (None, None) - message: MIME message as bytes + @__require_auth__ + def send(self, message, threadId=None): + """ + Send message - Returns: + message: MIME message as bytes - Message - """ - import base64 + Returns: - message = { 'raw': base64.urlsafe_b64encode(message).decode() } + Message + """ + import base64 - if threadId is not None: - message['threadId'] = threadId + message = {"raw": base64.urlsafe_b64encode(message).decode()} - return self.service.users().messages().send(userId = self.account, body = message).execute() + if threadId is not None: + message["threadId"] = threadId - def print_changes (self, changes): - if self.dry_run: - print ("(dry-run) " + changes) - elif self.verbose: - print(changes) + return ( + self.service.users() + .messages() + .send(userId=self.account, body=message) + .execute() + ) + def print_changes(self, changes): + if self.dry_run: + print("(dry-run) " + changes) + elif self.verbose: + print(changes) diff --git a/lieer/resume.py b/lieer/resume.py index 7297d1d..b16fd1b 100644 --- a/lieer/resume.py +++ b/lieer/resume.py @@ -19,70 +19,75 @@ import json import tempfile + class ResumePull: - lastId = None - version = None - VERSION = 1 - - meta_fetched = None - - @staticmethod - def load(resume_file): - """ - Construct from existing resume - """ - with open(resume_file) as fd: - j = json.load(fd) - - version = j['version'] - if version != ResumePull.VERSION: - print("error: mismatching version in resume file: %d != %d" % (version, ResumePull.VERSION)) - raise ValueError() - - lastId = j['lastId'] - meta_fetched = j['meta_fetched'] - - r = ResumePull(resume_file, lastId) - r.meta_fetched = meta_fetched - - return r - - @staticmethod - def new(resume_file, lastId): - r = ResumePull(resume_file, lastId) - r.meta_fetched = [] - r.save() - - return r - - def __init__(self, resume_file, lastId): - self.resume_file = resume_file - self.lastId = lastId - self.meta_fetched = [] - - def update(self, fetched): - """ - fetched: new messages with metadata fetched - """ - self.meta_fetched.extend(fetched) - self.meta_fetched = list(set(self.meta_fetched)) - self.save() - - def save(self): - j = { - 'version': self.VERSION, - 'lastId': self.lastId, - 'meta_fetched': self.meta_fetched + lastId = None + version = None + VERSION = 1 + + meta_fetched = None + + @staticmethod + def load(resume_file): + """ + Construct from existing resume + """ + with open(resume_file) as fd: + j = json.load(fd) + + version = j["version"] + if version != ResumePull.VERSION: + print( + "error: mismatching version in resume file: %d != %d" + % (version, ResumePull.VERSION) + ) + raise ValueError() + + lastId = j["lastId"] + meta_fetched = j["meta_fetched"] + + r = ResumePull(resume_file, lastId) + r.meta_fetched = meta_fetched + + return r + + @staticmethod + def new(resume_file, lastId): + r = ResumePull(resume_file, lastId) + r.meta_fetched = [] + r.save() + + return r + + def __init__(self, resume_file, lastId): + self.resume_file = resume_file + self.lastId = lastId + self.meta_fetched = [] + + def update(self, fetched): + """ + fetched: new messages with metadata fetched + """ + self.meta_fetched.extend(fetched) + self.meta_fetched = list(set(self.meta_fetched)) + self.save() + + def save(self): + j = { + "version": self.VERSION, + "lastId": self.lastId, + "meta_fetched": self.meta_fetched, } - with tempfile.NamedTemporaryFile (mode = 'w+', dir = os.path.dirname(self.resume_file), delete = False) as fd: - json.dump(j, fd) - - if os.path.exists(self.resume_file): - os.rename(self.resume_file, self.resume_file + '.bak') + with tempfile.NamedTemporaryFile( + mode="w+", dir=os.path.dirname(self.resume_file), delete=False + ) as fd: + json.dump(j, fd) - os.rename(fd.name, self.resume_file) + if os.path.exists(self.resume_file): + os.rename(self.resume_file, self.resume_file + ".bak") - def delete(self): - os.unlink(self.resume_file) + os.rename(fd.name, self.resume_file) + def delete(self): + os.unlink(self.resume_file) diff --git a/setup.py b/setup.py index cfeb313..78ca231 100644 --- a/setup.py +++ b/setup.py @@ -7,6 +7,7 @@ # Always prefer setuptools over distutils from setuptools import setup, find_packages + # To use a consistent encoding from codecs import open from os import path @@ -14,63 +15,55 @@ here = path.abspath(path.dirname(__file__)) # Get the long description from the README file -with open(path.join(here, 'README.md'), encoding='utf-8') as f: +with open(path.join(here, "README.md"), encoding="utf-8") as f: long_description = f.read() setup( - name='lieer', - + name="lieer", # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/single_source_version.html - version='1.4', - - description='Fast fetch and two-way tag synchronization between notmuch and GMail', + version="1.4", + description="Fast fetch and two-way tag synchronization between notmuch and GMail", long_description=long_description, - long_description_content_type='text/markdown', - + long_description_content_type="text/markdown", # The project's main homepage. - url='https://github.com/gauteh/lieer', - + url="https://github.com/gauteh/lieer", # Author details - author='Gaute Hope', - author_email='eg@gaute.vetsj.com', - + author="Gaute Hope", + author_email="eg@gaute.vetsj.com", # Choose your license - license='GPLv3+', - + license="GPLv3+", # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable - 'Development Status :: 4 - Beta', - + "Development Status :: 4 - Beta", # Indicate who your project is intended for - # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. - 'Programming Language :: Python :: 3', + "Programming Language :: Python :: 3", ], - # What does your project relate to? - keywords='gmail notmuch synchronization tags', - + keywords="gmail notmuch synchronization tags", # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). - packages= find_packages (), - + packages=find_packages(), # Alternatively, if you want to distribute just a my_module.py, uncomment # this: # py_modules=["my_module"], - # List run-time dependencies here. These will be installed by pip when # your project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/requirements.html - install_requires=['google_auth_oauthlib', 'google-api-python-client', 'tqdm', 'notmuch2'], - + install_requires=[ + "google_auth_oauthlib", + "google-api-python-client", + "tqdm", + "notmuch2", + ], # List additional groups of dependencies here (e.g. development # dependencies). You can install these using the following syntax, # for example: @@ -79,20 +72,17 @@ # 'dev': ['check-manifest'], # 'test': ['coverage'], }, - # If there are data files included in your packages that need to be # installed, specify them here. If using Python 2.6 or less, then these # have to be included in MANIFEST.in as well. package_data={ # 'sample': ['package_data.dat'], }, - # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. See: # https://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa # In this case, 'data_file' will be installed into '/my_data' # data_files=[('my_data', ['data/data_file'])], - # To provide executable scripts, use entry points in preference to the # "scripts" keyword. Entry points provide cross-platform support and allow # pip to create the appropriate form of executable for the target platform. @@ -101,7 +91,7 @@ # 'sample=sample:main', # ], # }, - scripts = [ - 'gmi', - ] + scripts=[ + "gmi", + ], ) diff --git a/tests/test_local.py b/tests/test_local.py index 4008134..02a9949 100644 --- a/tests/test_local.py +++ b/tests/test_local.py @@ -4,11 +4,11 @@ def test_update_translation_list(gmi): l = lieer.Local(gmi) - l.update_translation_list_with_overlay(['a', '1', 'b', '2']) - assert l.translate_labels['a'] == '1' - assert l.translate_labels['b'] == '2' - assert l.labels_translate['1'] == 'a' - assert l.labels_translate['2'] == 'b' + l.update_translation_list_with_overlay(["a", "1", "b", "2"]) + assert l.translate_labels["a"] == "1" + assert l.translate_labels["b"] == "2" + assert l.labels_translate["1"] == "a" + assert l.labels_translate["2"] == "b" with pytest.raises(Exception): - l.update_translation_list_with_overlay(['a', '1', 'b', '2', 'c']) + l.update_translation_list_with_overlay(["a", "1", "b", "2", "c"])