From 754e896208c056f34d35c8810a2aef4b48f283e0 Mon Sep 17 00:00:00 2001 From: Zsailer Date: Thu, 19 Dec 2019 10:45:16 -0800 Subject: [PATCH 1/2] reformatting using black --- jupyter_server/__init__.py | 5 +- jupyter_server/__main__.py | 3 +- jupyter_server/_sysinfo.py | 25 +- jupyter_server/_tz.py | 11 +- jupyter_server/_version.py | 4 +- jupyter_server/auth/__main__.py | 66 +- jupyter_server/auth/login.py | 100 +- jupyter_server/auth/logout.py | 13 +- jupyter_server/auth/security.py | 35 +- jupyter_server/base/handlers.py | 422 ++--- jupyter_server/base/zmqhandlers.py | 85 +- jupyter_server/config_manager.py | 20 +- jupyter_server/edit/handlers.py | 19 +- jupyter_server/extension/application.py | 201 +-- jupyter_server/extension/handler.py | 11 +- jupyter_server/extension/serverextension.py | 204 ++- jupyter_server/files/handlers.py | 45 +- jupyter_server/gateway/handlers.py | 129 +- jupyter_server/gateway/managers.py | 449 +++-- jupyter_server/i18n/__init__.py | 43 +- jupyter_server/kernelspecs/handlers.py | 8 +- jupyter_server/log.py | 12 +- jupyter_server/nbconvert/handlers.py | 94 +- jupyter_server/prometheus/log_functions.py | 4 +- jupyter_server/prometheus/metrics.py | 15 +- jupyter_server/serverapp.py | 1470 ++++++++++------- jupyter_server/services/api/handlers.py | 17 +- jupyter_server/services/config/handlers.py | 4 +- jupyter_server/services/config/manager.py | 16 +- .../services/contents/checkpoints.py | 32 +- .../services/contents/filecheckpoints.py | 52 +- jupyter_server/services/contents/fileio.py | 84 +- .../services/contents/filemanager.py | 225 +-- jupyter_server/services/contents/handlers.py | 148 +- .../services/contents/largefilemanager.py | 60 +- jupyter_server/services/contents/manager.py | 230 +-- jupyter_server/services/kernels/handlers.py | 187 ++- .../services/kernels/kernelmanager.py | 191 ++- .../services/kernelspecs/handlers.py | 68 +- jupyter_server/services/nbconvert/handlers.py | 2 +- jupyter_server/services/security/handlers.py | 16 +- jupyter_server/services/sessions/handlers.py | 104 +- .../services/sessions/sessionmanager.py | 95 +- jupyter_server/terminal/__init__.py | 46 +- jupyter_server/terminal/api_handlers.py | 14 +- jupyter_server/terminal/handlers.py | 15 +- jupyter_server/transutils.py | 6 +- jupyter_server/utils.py | 111 +- jupyter_server/view/handlers.py | 14 +- tests/auth/test_security.py | 20 +- tests/conftest.py | 118 +- tests/extension/conftest.py | 21 +- tests/extension/test_app.py | 26 +- tests/extension/test_entrypoint.py | 16 +- tests/extension/test_handler.py | 24 +- tests/extension/test_serverextension.py | 81 +- tests/nbconvert/test_handlers.py | 158 +- tests/services/api/test_api.py | 8 +- tests/services/config/test_api.py | 53 +- tests/services/contents/test_api.py | 855 +++++----- tests/services/contents/test_config.py | 4 +- tests/services/contents/test_fileio.py | 74 +- .../contents/test_largefilemanager.py | 93 +- tests/services/contents/test_manager.py | 389 ++--- tests/services/kernels/test_api.py | 192 +-- tests/services/kernels/test_config.py | 14 +- tests/services/kernelspecs/test_api.py | 86 +- tests/services/nbconvert/test_api.py | 12 +- tests/services/sessions/test_api.py | 207 ++- tests/services/sessions/test_manager.py | 287 ++-- tests/test_config_manager.py | 60 +- tests/test_files.py | 149 +- tests/test_gateway.py | 301 ++-- tests/test_paths.py | 32 +- tests/test_serialize.py | 11 +- tests/test_serverapp.py | 62 +- tests/test_utils.py | 43 +- tests/test_version.py | 33 +- 78 files changed, 4622 insertions(+), 4037 deletions(-) diff --git a/jupyter_server/__init__.py b/jupyter_server/__init__.py index 9611848e60..a1da6790bb 100644 --- a/jupyter_server/__init__.py +++ b/jupyter_server/__init__.py @@ -1,13 +1,12 @@ """The Jupyter Server""" +from ._version import version_info, __version__ import os DEFAULT_STATIC_FILES_PATH = os.path.join(os.path.dirname(__file__), "static") DEFAULT_TEMPLATE_PATH_LIST = [ os.path.dirname(__file__), - os.path.join(os.path.dirname(__file__), 'templates'), + os.path.join(os.path.dirname(__file__), "templates"), ] del os - -from ._version import version_info, __version__ diff --git a/jupyter_server/__main__.py b/jupyter_server/__main__.py index 0df765623b..18d5ab8021 100644 --- a/jupyter_server/__main__.py +++ b/jupyter_server/__main__.py @@ -1,5 +1,6 @@ from __future__ import absolute_import -if __name__ == '__main__': +if __name__ == "__main__": from jupyter_server import serverapp as app + app.launch_new_instance() diff --git a/jupyter_server/_sysinfo.py b/jupyter_server/_sysinfo.py index d11c625cb6..73f2b5e672 100644 --- a/jupyter_server/_sysinfo.py +++ b/jupyter_server/_sysinfo.py @@ -18,6 +18,7 @@ import jupyter_server + def pkg_commit_hash(pkg_path): """Get short form of commit hash given directory `pkg_path` @@ -45,23 +46,25 @@ def pkg_commit_hash(pkg_path): par_path = pkg_path while cur_path != par_path: cur_path = par_path - if p.exists(p.join(cur_path, '.git')): + if p.exists(p.join(cur_path, ".git")): try: - proc = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - cwd=pkg_path) + proc = subprocess.Popen( + ["git", "rev-parse", "--short", "HEAD"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=pkg_path, + ) repo_commit, _ = proc.communicate() except OSError: repo_commit = None if repo_commit: - return 'repository', repo_commit.strip().decode('ascii') + return "repository", repo_commit.strip().decode("ascii") else: - return u'', u'' + return u"", u"" par_path = p.dirname(par_path) - - return u'', u'' + + return u"", u"" def pkg_info(pkg_path): @@ -89,11 +92,11 @@ def pkg_info(pkg_path): platform=platform.platform(), os_name=os.name, default_encoding=encoding.DEFAULT_ENCODING, - ) + ) + def get_sys_info(): """Return useful information about the system as a dict.""" p = os.path path = p.realpath(p.dirname(p.abspath(p.join(jupyter_server.__file__)))) return pkg_info(path) - diff --git a/jupyter_server/_tz.py b/jupyter_server/_tz.py index 1df39e51d4..babc89c67a 100644 --- a/jupyter_server/_tz.py +++ b/jupyter_server/_tz.py @@ -13,6 +13,7 @@ # constant for zero offset ZERO = timedelta(0) + class tzUTC(tzinfo): """tzinfo object for UTC (zero offset)""" @@ -22,21 +23,27 @@ def utcoffset(self, d): def dst(self, d): return ZERO + UTC = tzUTC() + def utc_aware(unaware): """decorator for adding UTC tzinfo to datetime's utcfoo methods""" + def utc_method(*args, **kwargs): dt = unaware(*args, **kwargs) return dt.replace(tzinfo=UTC) + return utc_method + utcfromtimestamp = utc_aware(datetime.utcfromtimestamp) utcnow = utc_aware(datetime.utcnow) + def isoformat(dt): """Return iso-formatted timestamp - + Like .isoformat(), but uses Z for UTC instead of +00:00 """ - return dt.isoformat().replace('+00:00', 'Z') + return dt.isoformat().replace("+00:00", "Z") diff --git a/jupyter_server/_version.py b/jupyter_server/_version.py index e1ca7b8e4c..5a57522508 100644 --- a/jupyter_server/_version.py +++ b/jupyter_server/_version.py @@ -9,5 +9,5 @@ # Next beta/alpha/rc release: The version number for beta is X.Y.ZbN **without dots**. -version_info = (0, 2, 0, '.dev0') -__version__ = '.'.join(map(str, version_info[:3])) + ''.join(version_info[3:]) +version_info = (0, 2, 0, "") +__version__ = ".".join(map(str, version_info[:3])) + "".join(version_info[3:]) diff --git a/jupyter_server/auth/__main__.py b/jupyter_server/auth/__main__.py index 322891df1c..322095d1ae 100644 --- a/jupyter_server/auth/__main__.py +++ b/jupyter_server/auth/__main__.py @@ -5,38 +5,44 @@ import argparse import sys + def set_password(args): - password = args.password - while not password : - password1 = getpass("" if args.quiet else "Provide password: ") - password_repeat = getpass("" if args.quiet else "Repeat password: ") - if password1 != password_repeat: - print("Passwords do not match, try again") - elif len(password1) < 4: - print("Please provide at least 4 characters") - else: - password = password1 + password = args.password + while not password: + password1 = getpass("" if args.quiet else "Provide password: ") + password_repeat = getpass("" if args.quiet else "Repeat password: ") + if password1 != password_repeat: + print("Passwords do not match, try again") + elif len(password1) < 4: + print("Please provide at least 4 characters") + else: + password = password1 + + password_hash = passwd(password) + cfg = BaseJSONConfigManager(config_dir=jupyter_config_dir()) + cfg.update("jupyter_server_config", {"ServerApp": {"password": password_hash,}}) + if not args.quiet: + print("password stored in config dir: %s" % jupyter_config_dir()) - password_hash = passwd(password) - cfg = BaseJSONConfigManager(config_dir=jupyter_config_dir()) - cfg.update('jupyter_server_config', { - 'ServerApp': { - 'password': password_hash, - } - }) - if not args.quiet: - print("password stored in config dir: %s" % jupyter_config_dir()) def main(argv): - parser = argparse.ArgumentParser(argv[0]) - subparsers = parser.add_subparsers() - parser_password = subparsers.add_parser('password', help='sets a password for your jupyter server') - parser_password.add_argument("password", help="password to set, if not given, a password will be queried for (NOTE: this may not be safe)", - nargs="?") - parser_password.add_argument("--quiet", help="suppress messages", action="store_true") - parser_password.set_defaults(function=set_password) - args = parser.parse_args(argv[1:]) - args.function(args) - + parser = argparse.ArgumentParser(argv[0]) + subparsers = parser.add_subparsers() + parser_password = subparsers.add_parser( + "password", help="sets a password for your jupyter server" + ) + parser_password.add_argument( + "password", + help="password to set, if not given, a password will be queried for (NOTE: this may not be safe)", + nargs="?", + ) + parser_password.add_argument( + "--quiet", help="suppress messages", action="store_true" + ) + parser_password.set_defaults(function=set_password) + args = parser.parse_args(argv[1:]) + args.function(args) + + if __name__ == "__main__": - main(sys.argv) + main(sys.argv) diff --git a/jupyter_server/auth/login.py b/jupyter_server/auth/login.py index 4d93cc192d..67731ee2e3 100644 --- a/jupyter_server/auth/login.py +++ b/jupyter_server/auth/login.py @@ -21,11 +21,15 @@ class LoginHandler(JupyterHandler): authenticates with a hashed password from the configuration. """ + def _render(self, message=None): - self.write(self.render_template('login.html', - next=url_escape(self.get_argument('next', default=self.base_url)), + self.write( + self.render_template( + "login.html", + next=url_escape(self.get_argument("next", default=self.base_url)), message=message, - )) + ) + ) def _redirect_safe(self, url, default=None): """Redirect if url is on our PATH @@ -40,10 +44,10 @@ def _redirect_safe(self, url, default=None): # require that next_url be absolute path within our path allow = False # OR pass our cross-origin check - if '://' in url: + if "://" in url: # if full URL, run our cross-origin check: parsed = urlparse(url.lower()) - origin = '%s://%s' % (parsed.scheme, parsed.netloc) + origin = "%s://%s" % (parsed.scheme, parsed.netloc) if self.allow_origin: allow = self.allow_origin == origin elif self.allow_origin_pat: @@ -56,7 +60,7 @@ def _redirect_safe(self, url, default=None): def get(self): if self.current_user: - next_url = self.get_argument('next', default=self.base_url) + next_url = self.get_argument("next", default=self.base_url) self._redirect_safe(next_url) else: self._render() @@ -69,42 +73,44 @@ def passwd_check(self, a, b): return passwd_check(a, b) def post(self): - typed_password = self.get_argument('password', default=u'') - new_password = self.get_argument('new_password', default=u'') + typed_password = self.get_argument("password", default=u"") + new_password = self.get_argument("new_password", default=u"") if self.get_login_available(self.settings): - if self.passwd_check(self.hashed_password, typed_password) and not new_password: + if ( + self.passwd_check(self.hashed_password, typed_password) + and not new_password + ): self.set_login_cookie(self, uuid.uuid4().hex) elif self.token and self.token == typed_password: self.set_login_cookie(self, uuid.uuid4().hex) - if new_password and self.settings.get('allow_password_change'): - config_dir = self.settings.get('config_dir') - config_file = os.path.join(config_dir, 'jupyter_server_config.json') + if new_password and self.settings.get("allow_password_change"): + config_dir = self.settings.get("config_dir") + config_file = os.path.join(config_dir, "jupyter_server_config.json") set_password(new_password, config_file=config_file) self.log.info("Wrote hashed password to %s" % config_file) else: self.set_status(401) - self._render(message={'error': 'Invalid credentials'}) + self._render(message={"error": "Invalid credentials"}) return - - next_url = self.get_argument('next', default=self.base_url) + next_url = self.get_argument("next", default=self.base_url) self._redirect_safe(next_url) @classmethod def set_login_cookie(cls, handler, user_id=None): """Call this on handlers to set the login cookie for success""" - cookie_options = handler.settings.get('cookie_options', {}) - cookie_options.setdefault('httponly', True) + cookie_options = handler.settings.get("cookie_options", {}) + cookie_options.setdefault("httponly", True) # tornado <4.2 has a bug that considers secure==True as soon as # 'secure' kwarg is passed to set_secure_cookie - if handler.settings.get('secure_cookie', handler.request.protocol == 'https'): - cookie_options.setdefault('secure', True) - cookie_options.setdefault('path', handler.base_url) + if handler.settings.get("secure_cookie", handler.request.protocol == "https"): + cookie_options.setdefault("secure", True) + cookie_options.setdefault("path", handler.base_url) handler.set_secure_cookie(handler.cookie_name, user_id, **cookie_options) return user_id - auth_header_pat = re.compile('token\s+(.+)', re.IGNORECASE) + auth_header_pat = re.compile("token\s+(.+)", re.IGNORECASE) @classmethod def get_token(cls, handler): @@ -116,10 +122,12 @@ def get_token(cls, handler): - in header: Authorization: token """ - user_token = handler.get_argument('token', '') + user_token = handler.get_argument("token", "") if not user_token: # get it from Authorization header - m = cls.auth_header_pat.match(handler.request.headers.get('Authorization', '')) + m = cls.auth_header_pat.match( + handler.request.headers.get("Authorization", "") + ) if m: user_token = m.group(1) return user_token @@ -146,10 +154,10 @@ def is_token_authenticated(cls, handler): - xsrf protection - skip origin-checks for scripts """ - if getattr(handler, '_user_id', None) is None: + if getattr(handler, "_user_id", None) is None: # ensure get_user has been called, so we know if we're token-authenticated handler.get_current_user() - return getattr(handler, '_token_authenticated', False) + return getattr(handler, "_token_authenticated", False) @classmethod def get_user(cls, handler): @@ -159,12 +167,16 @@ def get_user(cls, handler): """ # Can't call this get_current_user because it will collide when # called on LoginHandler itself. - if getattr(handler, '_user_id', None): + if getattr(handler, "_user_id", None): return handler._user_id user_id = cls.get_user_token(handler) if user_id is None: - get_secure_cookie_kwargs = handler.settings.get('get_secure_cookie_kwargs', {}) - user_id = handler.get_secure_cookie(handler.cookie_name, **get_secure_cookie_kwargs ) + get_secure_cookie_kwargs = handler.settings.get( + "get_secure_cookie_kwargs", {} + ) + user_id = handler.get_secure_cookie( + handler.cookie_name, **get_secure_cookie_kwargs + ) else: cls.set_login_cookie(handler, user_id) # Record that the current request has been authenticated with a token. @@ -175,12 +187,14 @@ def get_user(cls, handler): # extra warnings. But don't do this on a request with *no* cookie, # because that can erroneously log you out (see gh-3365) if handler.get_cookie(handler.cookie_name) is not None: - handler.log.warning("Clearing invalid/expired login cookie %s", handler.cookie_name) + handler.log.warning( + "Clearing invalid/expired login cookie %s", handler.cookie_name + ) handler.clear_login_cookie() if not handler.login_available: # Completely insecure! No authentication at all. # No need to warn here, though; validate_security will have already done that. - user_id = 'anonymous' + user_id = "anonymous" # cache value for future retrievals on the same request handler._user_id = user_id @@ -189,7 +203,7 @@ def get_user(cls, handler): @classmethod def get_user_token(cls, handler): """Identify the user based on a token in the URL or Authorization header - + Returns: - uuid if authenticated - None if not @@ -202,7 +216,10 @@ def get_user_token(cls, handler): authenticated = False if user_token == token: # token-authenticated, set the login cookie - handler.log.debug("Accepting token-authenticated connection from %s", handler.request.remote_ip) + handler.log.debug( + "Accepting token-authenticated connection from %s", + handler.request.remote_ip, + ) authenticated = True if authenticated: @@ -210,7 +227,6 @@ def get_user_token(cls, handler): else: return None - @classmethod def validate_security(cls, app, ssl_options=None): """Check the application's security. @@ -220,16 +236,20 @@ def validate_security(cls, app, ssl_options=None): if not app.ip: warning = "WARNING: The Jupyter server is listening on all IP addresses" if ssl_options is None: - app.log.warning(warning + " and not using encryption. This " - "is not recommended.") + app.log.warning( + warning + " and not using encryption. This " "is not recommended." + ) if not app.password and not app.token: - app.log.warning(warning + " and not using authentication. " - "This is highly insecure and not recommended.") + app.log.warning( + warning + " and not using authentication. " + "This is highly insecure and not recommended." + ) else: if not app.password and not app.token: app.log.warning( "All authentication is disabled." - " Anyone who can connect to this server will be able to run code.") + " Anyone who can connect to this server will be able to run code." + ) @classmethod def password_from_settings(cls, settings): @@ -237,9 +257,9 @@ def password_from_settings(cls, settings): If there is no configured password, an empty string will be returned. """ - return settings.get('password', u'') + return settings.get("password", u"") @classmethod def get_login_available(cls, settings): """Whether this LoginHandler is needed - and therefore whether the login page should be displayed.""" - return bool(cls.password_from_settings(settings) or settings.get('token')) + return bool(cls.password_from_settings(settings) or settings.get("token")) diff --git a/jupyter_server/auth/logout.py b/jupyter_server/auth/logout.py index 30e2732c45..499c51beef 100644 --- a/jupyter_server/auth/logout.py +++ b/jupyter_server/auth/logout.py @@ -8,21 +8,16 @@ class LogoutHandler(JupyterHandler): - def get(self): self.clear_login_cookie() if self.login_available: - message = { - 'info': 'Successfully logged out.' - } + message = {"info": "Successfully logged out."} else: message = { - 'warning': 'Cannot log out. Jupyter Server authentication ' - 'is disabled.' + "warning": "Cannot log out. Jupyter Server authentication " + "is disabled." } - self.write(self.render_template('logout.html', - message=message)) + self.write(self.render_template("logout.html", message=message)) default_handlers = [(r"/logout", LogoutHandler)] - diff --git a/jupyter_server/auth/security.py b/jupyter_server/auth/security.py index 0fdb8eb5f0..3bbb6a9b95 100644 --- a/jupyter_server/auth/security.py +++ b/jupyter_server/auth/security.py @@ -21,7 +21,7 @@ salt_len = 12 -def passwd(passphrase=None, algorithm='sha1'): +def passwd(passphrase=None, algorithm="sha1"): """Generate hashed password and salt for use in server configuration. In the server configuration, set `c.ServerApp.password` to @@ -49,21 +49,21 @@ def passwd(passphrase=None, algorithm='sha1'): """ if passphrase is None: for i in range(3): - p0 = getpass.getpass('Enter password: ') - p1 = getpass.getpass('Verify password: ') + p0 = getpass.getpass("Enter password: ") + p1 = getpass.getpass("Verify password: ") if p0 == p1: passphrase = p0 break else: - print('Passwords do not match.') + print("Passwords do not match.") else: - raise ValueError('No matching passwords found. Giving up.') + raise ValueError("No matching passwords found. Giving up.") h = hashlib.new(algorithm) - salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len) - h.update(cast_bytes(passphrase, 'utf-8') + str_to_bytes(salt, 'ascii')) + salt = ("%0" + str(salt_len) + "x") % random.getrandbits(4 * salt_len) + h.update(cast_bytes(passphrase, "utf-8") + str_to_bytes(salt, "ascii")) - return ':'.join((algorithm, salt, h.hexdigest())) + return ":".join((algorithm, salt, h.hexdigest())) def passwd_check(hashed_passphrase, passphrase): @@ -93,7 +93,7 @@ def passwd_check(hashed_passphrase, passphrase): False """ try: - algorithm, salt, pw_digest = hashed_passphrase.split(':', 2) + algorithm, salt, pw_digest = hashed_passphrase.split(":", 2) except (ValueError, TypeError): return False @@ -105,10 +105,11 @@ def passwd_check(hashed_passphrase, passphrase): if len(pw_digest) == 0: return False - h.update(cast_bytes(passphrase, 'utf-8') + cast_bytes(salt, 'ascii')) + h.update(cast_bytes(passphrase, "utf-8") + cast_bytes(salt, "ascii")) return h.hexdigest() == pw_digest + @contextmanager def persist_config(config_file=None, mode=0o600): """Context manager that can be used to modify a config object @@ -118,9 +119,11 @@ def persist_config(config_file=None, mode=0o600): """ if config_file is None: - config_file = os.path.join(jupyter_config_dir(), 'jupyter_server_config.json') + config_file = os.path.join(jupyter_config_dir(), "jupyter_server_config.json") - loader = JSONFileConfigLoader(os.path.basename(config_file), os.path.dirname(config_file)) + loader = JSONFileConfigLoader( + os.path.basename(config_file), os.path.dirname(config_file) + ) try: config = loader.load_config() except ConfigFileNotFound: @@ -128,15 +131,17 @@ def persist_config(config_file=None, mode=0o600): yield config - with io.open(config_file, 'w', encoding='utf8') as f: + with io.open(config_file, "w", encoding="utf8") as f: f.write(cast_unicode(json.dumps(config, indent=2))) try: os.chmod(config_file, mode) except Exception as e: tb = traceback.format_exc() - warnings.warn("Failed to set permissions on %s:\n%s" % (config_file, tb), - RuntimeWarning) + warnings.warn( + "Failed to set permissions on %s:\n%s" % (config_file, tb), RuntimeWarning + ) + def set_password(password=None, config_file=None): """Ask user for password, store it in JSON configuration file""" diff --git a/jupyter_server/base/handlers.py b/jupyter_server/base/handlers.py index 1d9bbb3cd5..8825bc1737 100755 --- a/jupyter_server/base/handlers.py +++ b/jupyter_server/base/handlers.py @@ -34,24 +34,28 @@ from jupyter_server.utils import is_hidden, url_path_join, url_is_absolute, url_escape from jupyter_server.services.security import csp_report_uri -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Top-level handlers -#----------------------------------------------------------------------------- -non_alphanum = re.compile(r'[^A-Za-z0-9]') +# ----------------------------------------------------------------------------- +non_alphanum = re.compile(r"[^A-Za-z0-9]") _sys_info_cache = None + + def json_sys_info(): global _sys_info_cache if _sys_info_cache is None: _sys_info_cache = json.dumps(get_sys_info()) return _sys_info_cache + def log(): if Application.initialized(): return Application.instance().log else: return app_log + class AuthenticatedHandler(web.RequestHandler): """A RequestHandler with an authenticated user.""" @@ -61,19 +65,24 @@ def content_security_policy(self): Can be overridden by defining Content-Security-Policy in settings['headers'] """ - if 'Content-Security-Policy' in self.settings.get('headers', {}): + if "Content-Security-Policy" in self.settings.get("headers", {}): # user-specified, don't override - return self.settings['headers']['Content-Security-Policy'] - - return '; '.join([ - "frame-ancestors 'self'", - # Make sure the report-uri is relative to the base_url - "report-uri " + self.settings.get('csp_report_uri', url_path_join(self.base_url, csp_report_uri)), - ]) + return self.settings["headers"]["Content-Security-Policy"] + + return "; ".join( + [ + "frame-ancestors 'self'", + # Make sure the report-uri is relative to the base_url + "report-uri " + + self.settings.get( + "csp_report_uri", url_path_join(self.base_url, csp_report_uri) + ), + ] + ) def set_default_headers(self): headers = {} - headers.update(self.settings.get('headers', {})) + headers.update(self.settings.get("headers", {})) headers["Content-Security-Policy"] = self.content_security_policy @@ -105,18 +114,18 @@ def force_clear_cookie(self, name, path="/", domain=None): expires = datetime.datetime.utcnow() - datetime.timedelta(days=365) morsel = Morsel() - morsel.set(name, '', '""') - morsel['expires'] = httputil.format_timestamp(expires) - morsel['path'] = path + morsel.set(name, "", '""') + morsel["expires"] = httputil.format_timestamp(expires) + morsel["path"] = path if domain: - morsel['domain'] = domain + morsel["domain"] = domain self.add_header("Set-Cookie", morsel.OutputString()) def clear_login_cookie(self): - cookie_options = self.settings.get('cookie_options', {}) - path = cookie_options.setdefault('path', self.base_url) + cookie_options = self.settings.get("cookie_options", {}) + path = cookie_options.setdefault("path", self.base_url) self.clear_cookie(self.cookie_name, path=path) - if path and path != '/': + if path and path != "/": # also clear cookie on / to ensure old cookies are cleared # after the change in path behavior. # N.B. This bypasses the normal cookie handling, which can't update @@ -125,7 +134,7 @@ def clear_login_cookie(self): def get_current_user(self): if self.login_handler is None: - return 'anonymous' + return "anonymous" return self.login_handler.get_user(self) def skip_check_origin(self): @@ -134,42 +143,46 @@ def skip_check_origin(self): For example: in the default LoginHandler, if a request is token-authenticated, origin checking should be skipped. """ - if self.request.method == 'OPTIONS': + if self.request.method == "OPTIONS": # no origin-check on options requests, which are used to check origins! return True - if self.login_handler is None or not hasattr(self.login_handler, 'should_check_origin'): + if self.login_handler is None or not hasattr( + self.login_handler, "should_check_origin" + ): return False return not self.login_handler.should_check_origin(self) @property def token_authenticated(self): """Have I been authenticated with a token?""" - if self.login_handler is None or not hasattr(self.login_handler, 'is_token_authenticated'): + if self.login_handler is None or not hasattr( + self.login_handler, "is_token_authenticated" + ): return False return self.login_handler.is_token_authenticated(self) @property def cookie_name(self): - default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( - self.request.host - )) - return self.settings.get('cookie_name', default_cookie_name) - + default_cookie_name = non_alphanum.sub( + "-", "username-{}".format(self.request.host) + ) + return self.settings.get("cookie_name", default_cookie_name) + @property def logged_in(self): """Is a user currently logged in?""" user = self.get_current_user() - return (user and not user == 'anonymous') + return user and not user == "anonymous" @property def login_handler(self): """Return the login handler for this application, if any.""" - return self.settings.get('login_handler_class', None) + return self.settings.get("login_handler_class", None) @property def token(self): """Return the login token for this application, if any.""" - return self.settings.get('token', None) + return self.settings.get("token", None) @property def login_available(self): @@ -192,7 +205,7 @@ class JupyterHandler(AuthenticatedHandler): @property def config(self): - return self.settings.get('config', None) + return self.settings.get("config", None) @property def log(self): @@ -202,92 +215,94 @@ def log(self): @property def jinja_template_vars(self): """User-supplied values to supply to jinja templates.""" - return self.settings.get('jinja_template_vars', {}) - - #--------------------------------------------------------------- + return self.settings.get("jinja_template_vars", {}) + + # --------------------------------------------------------------- # URLs - #--------------------------------------------------------------- - + # --------------------------------------------------------------- + @property def version_hash(self): """The version hash to use for cache hints for static files""" - return self.settings.get('version_hash', '') - + return self.settings.get("version_hash", "") + @property def mathjax_url(self): - url = self.settings.get('mathjax_url', '') + url = self.settings.get("mathjax_url", "") if not url or url_is_absolute(url): return url return url_path_join(self.base_url, url) - + @property def mathjax_config(self): - return self.settings.get('mathjax_config', 'TeX-AMS-MML_HTMLorMML-full,Safe') + return self.settings.get("mathjax_config", "TeX-AMS-MML_HTMLorMML-full,Safe") @property def base_url(self): - return self.settings.get('base_url', '/') + return self.settings.get("base_url", "/") @property def default_url(self): - return self.settings.get('default_url', '') + return self.settings.get("default_url", "") @property def ws_url(self): - return self.settings.get('websocket_url', '') + return self.settings.get("websocket_url", "") @property def contents_js_source(self): - self.log.debug("Using contents: %s", self.settings.get('contents_js_source', - 'services/contents')) - return self.settings.get('contents_js_source', 'services/contents') - - #--------------------------------------------------------------- + self.log.debug( + "Using contents: %s", + self.settings.get("contents_js_source", "services/contents"), + ) + return self.settings.get("contents_js_source", "services/contents") + + # --------------------------------------------------------------- # Manager objects - #--------------------------------------------------------------- - + # --------------------------------------------------------------- + @property def kernel_manager(self): - return self.settings['kernel_manager'] + return self.settings["kernel_manager"] @property def contents_manager(self): - return self.settings['contents_manager'] - + return self.settings["contents_manager"] + @property def session_manager(self): - return self.settings['session_manager'] - + return self.settings["session_manager"] + @property def terminal_manager(self): - return self.settings['terminal_manager'] - + return self.settings["terminal_manager"] + @property def kernel_spec_manager(self): - return self.settings['kernel_spec_manager'] + return self.settings["kernel_spec_manager"] @property def config_manager(self): - return self.settings['config_manager'] + return self.settings["config_manager"] - #--------------------------------------------------------------- + # --------------------------------------------------------------- # CORS - #--------------------------------------------------------------- - + # --------------------------------------------------------------- + @property def allow_origin(self): """Normal Access-Control-Allow-Origin""" - return self.settings.get('allow_origin', '') + return self.settings.get("allow_origin", "") @property def allow_origin_pat(self): """Regular expression version of allow_origin""" - return self.settings.get('allow_origin_pat', None) + return self.settings.get("allow_origin_pat", None) @property def allow_credentials(self): """Whether to set Access-Control-Allow-Credentials""" - return self.settings.get('allow_credentials', False) + return self.settings.get("allow_credentials", False) def set_default_headers(self): """Add CORS headers, if defined""" @@ -300,29 +315,26 @@ def set_default_headers(self): self.set_header("Access-Control-Allow-Origin", origin) elif ( self.token_authenticated - and "Access-Control-Allow-Origin" not in - self.settings.get('headers', {}) + and "Access-Control-Allow-Origin" not in self.settings.get("headers", {}) ): # allow token-authenticated requests cross-origin by default. # only apply this exception if allow-origin has not been specified. - self.set_header('Access-Control-Allow-Origin', - self.request.headers.get('Origin', '')) + self.set_header( + "Access-Control-Allow-Origin", self.request.headers.get("Origin", "") + ) if self.allow_credentials: - self.set_header("Access-Control-Allow-Credentials", 'true') - + self.set_header("Access-Control-Allow-Credentials", "true") + def set_attachment_header(self, filename): """Set Content-Disposition: attachment header As a method to ensure handling of filename encoding """ escaped_filename = url_escape(filename) - self.set_header('Content-Disposition', - 'attachment;' - " filename*=utf-8''{utf8}" - .format( - utf8=escaped_filename, - ) + self.set_header( + "Content-Disposition", + "attachment;" " filename*=utf-8''{utf8}".format(utf8=escaped_filename,), ) def get_origin(self): @@ -346,7 +358,7 @@ def check_origin(self, origin_to_satisfy_tornado=""): - allow unspecified host/origin (e.g. scripts) - allow token-authenticated requests """ - if self.allow_origin == '*' or self.skip_check_origin(): + if self.allow_origin == "*" or self.skip_check_origin(): return True host = self.request.headers.get("Host") @@ -377,14 +389,17 @@ def check_origin(self, origin_to_satisfy_tornado=""): # No CORS headers deny the request allow = False if not allow: - self.log.warning("Blocking Cross Origin API request for %s. Origin: %s, Host: %s", - self.request.path, origin, host, + self.log.warning( + "Blocking Cross Origin API request for %s. Origin: %s, Host: %s", + self.request.path, + origin, + host, ) return allow def check_xsrf_cookie(self): """Bypass xsrf cookie checks when token-authenticated""" - if self.token_authenticated or self.settings.get('disable_check_xsrf', False): + if self.token_authenticated or self.settings.get("disable_check_xsrf", False): # Token-authenticated requests do not need additional XSRF-check # Servers without authentication are vulnerable to XSRF return @@ -395,30 +410,33 @@ def check_host(self): Returns True if the request should continue, False otherwise. """ - if self.settings.get('allow_remote_access', False): + if self.settings.get("allow_remote_access", False): return True # Remove port (e.g. ':8888') from host - host = re.match(r'^(.*?)(:\d+)?$', self.request.host).group(1) + host = re.match(r"^(.*?)(:\d+)?$", self.request.host).group(1) # Browsers format IPv6 addresses like [::1]; we need to remove the [] - if host.startswith('[') and host.endswith(']'): + if host.startswith("[") and host.endswith("]"): host = host[1:-1] try: addr = ipaddress.ip_address(host) except ValueError: # Not an IP address: check against hostnames - allow = host in self.settings.get('local_hostnames', ['localhost']) + allow = host in self.settings.get("local_hostnames", ["localhost"]) else: allow = addr.is_loopback if not allow: self.log.warning( - ("Blocking request with non-local 'Host' %s (%s). " - "If the server should be accessible at that name, " - "set ServerApp.allow_remote_access to disable the check."), - host, self.request.host + ( + "Blocking request with non-local 'Host' %s (%s). " + "If the server should be accessible at that name, " + "set ServerApp.allow_remote_access to disable the check." + ), + host, + self.request.host, ) return allow @@ -427,13 +445,13 @@ def prepare(self): raise web.HTTPError(403) return super(JupyterHandler, self).prepare() - #--------------------------------------------------------------- + # --------------------------------------------------------------- # template rendering - #--------------------------------------------------------------- + # --------------------------------------------------------------- def get_template(self, name): """Return the jinja template object for a given name""" - return self.settings['jinja2_env'].get_template(name) + return self.settings["jinja2_env"].get_template(name) def render_template(self, name, **ns): ns.update(self.template_namespace) @@ -447,7 +465,7 @@ def template_namespace(self): default_url=self.default_url, ws_url=self.ws_url, logged_in=self.logged_in, - allow_password_change=self.settings.get('allow_password_change'), + allow_password_change=self.settings.get("allow_password_change"), login_available=self.login_available, token_available=bool(self.token), static_url=self.static_url, @@ -456,9 +474,10 @@ def template_namespace(self): version_hash=self.version_hash, xsrf_form_html=self.xsrf_form_html, token=self.token, - xsrf_token=self.xsrf_token.decode('utf8'), - nbjs_translations=json.dumps(combine_translations( - self.request.headers.get('Accept-Language', ''))), + xsrf_token=self.xsrf_token.decode("utf8"), + nbjs_translations=json.dumps( + combine_translations(self.request.headers.get("Accept-Language", "")) + ), **self.jinja_template_vars ) @@ -467,21 +486,21 @@ def get_json_body(self): if not self.request.body: return None # Do we need to call body.decode('utf-8') here? - body = self.request.body.strip().decode(u'utf-8') + body = self.request.body.strip().decode(u"utf-8") try: model = json.loads(body) except Exception: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) - raise web.HTTPError(400, u'Invalid JSON in body of request') + raise web.HTTPError(400, u"Invalid JSON in body of request") return model def write_error(self, status_code, **kwargs): """render custom error pages""" - exc_info = kwargs.get('exc_info') - message = '' - status_message = responses.get(status_code, 'Unknown HTTP Error') - exception = '(unknown)' + exc_info = kwargs.get("exc_info") + message = "" + status_message = responses.get(status_code, "Unknown HTTP Error") + exception = "(unknown)" if exc_info: exception = exc_info[1] # get the custom message, if defined @@ -491,7 +510,7 @@ def write_error(self, status_code, **kwargs): pass # construct the custom reason, if defined - reason = getattr(exception, 'reason', '') + reason = getattr(exception, "reason", "") if reason: status_message = reason @@ -503,12 +522,12 @@ def write_error(self, status_code, **kwargs): exception=exception, ) - self.set_header('Content-Type', 'text/html') + self.set_header("Content-Type", "text/html") # render the template try: - html = self.render_template('%s.html' % status_code, **ns) + html = self.render_template("%s.html" % status_code, **ns) except TemplateNotFound: - html = self.render_template('error.html', **ns) + html = self.render_template("error.html", **ns) self.write(html) @@ -523,28 +542,28 @@ def prepare(self): def write_error(self, status_code, **kwargs): """APIHandler errors are JSON, not human pages""" - self.set_header('Content-Type', 'application/json') - message = responses.get(status_code, 'Unknown HTTP Error') + self.set_header("Content-Type", "application/json") + message = responses.get(status_code, "Unknown HTTP Error") reply = { - 'message': message, + "message": message, } - exc_info = kwargs.get('exc_info') + exc_info = kwargs.get("exc_info") if exc_info: e = exc_info[1] if isinstance(e, HTTPError): - reply['message'] = e.log_message or message - reply['reason'] = e.reason + reply["message"] = e.log_message or message + reply["reason"] = e.reason else: - reply['message'] = 'Unhandled error' - reply['reason'] = None - reply['traceback'] = ''.join(traceback.format_exception(*exc_info)) - self.log.warning(reply['message']) + reply["message"] = "Unhandled error" + reply["reason"] = None + reply["traceback"] = "".join(traceback.format_exception(*exc_info)) + self.log.warning(reply["message"]) self.finish(json.dumps(reply)) def get_current_user(self): """Raise 403 on API handlers instead of redirecting to human login page""" # preserve _user_cache so we don't raise more than once - if hasattr(self, '_user_cache'): + if hasattr(self, "_user_cache"): return self._user_cache self._user_cache = user = super(APIHandler, self).get_current_user() return user @@ -559,10 +578,9 @@ def get_login_url(self): @property def content_security_policy(self): - csp = '; '.join([ - super(APIHandler, self).content_security_policy, - "default-src 'none'", - ]) + csp = "; ".join( + [super(APIHandler, self).content_security_policy, "default-src 'none'",] + ) return csp # set _track_activity = False on API handlers that shouldn't track activity @@ -573,24 +591,30 @@ def update_api_activity(self): # record activity of authenticated requests if ( self._track_activity - and getattr(self, '_user_cache', None) - and self.get_argument('no_track_activity', None) is None + and getattr(self, "_user_cache", None) + and self.get_argument("no_track_activity", None) is None ): - self.settings['api_last_activity'] = utcnow() + self.settings["api_last_activity"] = utcnow() def finish(self, *args, **kwargs): self.update_api_activity() - self.set_header('Content-Type', 'application/json') + self.set_header("Content-Type", "application/json") return super(APIHandler, self).finish(*args, **kwargs) def options(self, *args, **kwargs): - if 'Access-Control-Allow-Headers' in self.settings.get('headers', {}): - self.set_header('Access-Control-Allow-Headers', self.settings['headers']['Access-Control-Allow-Headers']) + if "Access-Control-Allow-Headers" in self.settings.get("headers", {}): + self.set_header( + "Access-Control-Allow-Headers", + self.settings["headers"]["Access-Control-Allow-Headers"], + ) else: - self.set_header('Access-Control-Allow-Headers', - 'accept, content-type, authorization, x-xsrftoken') - self.set_header('Access-Control-Allow-Methods', - 'GET, PUT, POST, PATCH, DELETE, OPTIONS') + self.set_header( + "Access-Control-Allow-Headers", + "accept, content-type, authorization, x-xsrftoken", + ) + self.set_header( + "Access-Control-Allow-Methods", "GET, PUT, POST, PATCH, DELETE, OPTIONS" + ) # if authorization header is requested, # that means the request is token-authenticated. @@ -598,25 +622,31 @@ def options(self, *args, **kwargs): # only allow this exception if allow_origin has not been specified # and Jupyter server authentication is enabled. # If the token is not valid, the 'real' request will still be rejected. - requested_headers = self.request.headers.get('Access-Control-Request-Headers', '').split(',') - if requested_headers and any( - h.strip().lower() == 'authorization' - for h in requested_headers - ) and ( - # FIXME: it would be even better to check specifically for token-auth, - # but there is currently no API for this. - self.login_available - ) and ( - self.allow_origin - or self.allow_origin_pat - or 'Access-Control-Allow-Origin' in self.settings.get('headers', {}) + requested_headers = self.request.headers.get( + "Access-Control-Request-Headers", "" + ).split(",") + if ( + requested_headers + and any(h.strip().lower() == "authorization" for h in requested_headers) + and ( + # FIXME: it would be even better to check specifically for token-auth, + # but there is currently no API for this. + self.login_available + ) + and ( + self.allow_origin + or self.allow_origin_pat + or "Access-Control-Allow-Origin" in self.settings.get("headers", {}) + ) ): - self.set_header('Access-Control-Allow-Origin', - self.request.headers.get('Origin', '')) + self.set_header( + "Access-Control-Allow-Origin", self.request.headers.get("Origin", "") + ) class Template404(JupyterHandler): """Render our 404 template""" + def prepare(self): raise web.HTTPError(404) @@ -628,29 +658,33 @@ class AuthenticatedFileHandler(JupyterHandler, web.StaticFileHandler): def content_security_policy(self): # In case we're serving HTML/SVG, confine any Javascript to a unique # origin so it can't interact with the Jupyter server. - return super(AuthenticatedFileHandler, self).content_security_policy + \ - "; sandbox allow-scripts" + return ( + super(AuthenticatedFileHandler, self).content_security_policy + + "; sandbox allow-scripts" + ) @web.authenticated def get(self, path): - if os.path.splitext(path)[1] == '.ipynb' or self.get_argument("download", False): - name = path.rsplit('/', 1)[-1] + if os.path.splitext(path)[1] == ".ipynb" or self.get_argument( + "download", False + ): + name = path.rsplit("/", 1)[-1] self.set_attachment_header(name) return web.StaticFileHandler.get(self, path) def get_content_type(self): - path = self.absolute_path.strip('/') - if '/' in path: - _, name = path.rsplit('/', 1) + path = self.absolute_path.strip("/") + if "/" in path: + _, name = path.rsplit("/", 1) else: name = path - if name.endswith('.ipynb'): - return 'application/x-ipynb+json' + if name.endswith(".ipynb"): + return "application/x-ipynb+json" else: cur_mime = mimetypes.guess_type(name)[0] - if cur_mime == 'text/plain': - return 'text/plain; charset=UTF-8' + if cur_mime == "text/plain": + return "text/plain; charset=UTF-8" else: return super(AuthenticatedFileHandler, self).get_content_type() @@ -670,13 +704,18 @@ def validate_absolute_path(self, root, absolute_path): Adding to tornado's own handling, forbids the serving of hidden files. """ - abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) + abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path( + root, absolute_path + ) abs_root = os.path.abspath(root) if is_hidden(abs_path, abs_root) and not self.contents_manager.allow_hidden: - self.log.info("Refusing to serve hidden file, via 404 Error, use flag 'ContentsManager.allow_hidden' to enable") + self.log.info( + "Refusing to serve hidden file, via 404 Error, use flag 'ContentsManager.allow_hidden' to enable" + ) raise web.HTTPError(404) return abs_path + def json_errors(method): """Decorate methods with this to return GitHub style JSON errors. @@ -689,23 +728,29 @@ def json_errors(method): 2. Create and return a JSON body with a message field describing the error in a human readable form. """ - warnings.warn('@json_errors is deprecated in notebook 5.2.0. Subclass APIHandler instead.', + warnings.warn( + "@json_errors is deprecated in notebook 5.2.0. Subclass APIHandler instead.", DeprecationWarning, stacklevel=2, ) + @functools.wraps(method) def wrapper(self, *args, **kwargs): self.write_error = types.MethodType(APIHandler.write_error, self) return method(self, *args, **kwargs) + return wrapper -#----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- # File handler -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + # to minimize subclass changes: HTTPError = web.HTTPError + class FileFindHandler(JupyterHandler, web.StaticFileHandler): """subclass of StaticFileHandler for serving files from a search path""" @@ -715,8 +760,9 @@ class FileFindHandler(JupyterHandler, web.StaticFileHandler): def set_headers(self): super(FileFindHandler, self).set_headers() # disable browser caching, rely on 304 replies for savings - if "v" not in self.request.arguments or \ - any(self.request.path.startswith(path) for path in self.no_cache_paths): + if "v" not in self.request.arguments or any( + self.request.path.startswith(path) for path in self.no_cache_paths + ): self.set_header("Cache-Control", "no-cache") def initialize(self, path, default_filename=None, no_cache_paths=None): @@ -725,9 +771,7 @@ def initialize(self, path, default_filename=None, no_cache_paths=None): if isinstance(path, string_types): path = [path] - self.root = tuple( - os.path.abspath(os.path.expanduser(p)) + os.sep for p in path - ) + self.root = tuple(os.path.abspath(os.path.expanduser(p)) + os.sep for p in path) self.default_filename = default_filename def compute_etag(self): @@ -743,16 +787,16 @@ def get_absolute_path(cls, roots, path): abspath = os.path.abspath(filefind(path, roots)) except IOError: # IOError means not found - return '' + return "" cls._static_paths[path] = abspath - log().debug("Path %s served from %s"%(path, abspath)) + log().debug("Path %s served from %s" % (path, abspath)) return abspath def validate_absolute_path(self, root, absolute_path): """check if the file should be served (raises 404, 403, etc.)""" - if absolute_path == '': + if absolute_path == "": raise web.HTTPError(404) for root in self.root: @@ -763,7 +807,6 @@ def validate_absolute_path(self, root, absolute_path): class APIVersionHandler(APIHandler): - def get(self): # not authenticated, so give as few info as possible self.finish(json.dumps({"version": jupyter_server.__version__})) @@ -778,7 +821,7 @@ class TrailingSlashHandler(web.RequestHandler): def get(self): uri = self.request.path.rstrip("/") if uri: - self.redirect('?'.join((uri, self.request.query))) + self.redirect("?".join((uri, self.request.query))) post = put = get @@ -805,66 +848,69 @@ def redirect_to_files(self, path): cm = self.contents_manager if cm.dir_exists(path): # it's a *directory*, redirect to /tree - url = url_path_join(self.base_url, 'tree', url_escape(path)) + url = url_path_join(self.base_url, "tree", url_escape(path)) else: orig_path = path # otherwise, redirect to /files - parts = path.split('/') + parts = path.split("/") - if not cm.file_exists(path=path) and 'files' in parts: + if not cm.file_exists(path=path) and "files" in parts: # redirect without files/ iff it would 404 # this preserves pre-2.0-style 'files/' links self.log.warning("Deprecated files/ URL: %s", orig_path) - parts.remove('files') - path = '/'.join(parts) + parts.remove("files") + path = "/".join(parts) if not cm.file_exists(path=path): raise web.HTTPError(404) - url = url_path_join(self.base_url, 'files', url_escape(path)) + url = url_path_join(self.base_url, "files", url_escape(path)) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) - def get(self, path=''): + def get(self, path=""): return self.redirect_to_files(self, path) class RedirectWithParams(web.RequestHandler): """Sam as web.RedirectHandler, but preserves URL parameters""" + def initialize(self, url, permanent=True): self._url = url self._permanent = permanent def get(self): - sep = '&' if '?' in self._url else '?' + sep = "&" if "?" in self._url else "?" url = sep.join([self._url, self.request.query]) self.redirect(url, permanent=self._permanent) + class PrometheusMetricsHandler(JupyterHandler): """ Return prometheus metrics for this Jupyter server """ + @web.authenticated def get(self): - self.set_header('Content-Type', prometheus_client.CONTENT_TYPE_LATEST) + self.set_header("Content-Type", prometheus_client.CONTENT_TYPE_LATEST) self.write(prometheus_client.generate_latest(prometheus_client.REGISTRY)) -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL pattern fragments for re-use -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # path matches any number of `/foo[/bar...]` or just `/` or '' path_regex = r"(?P(?:(?:/[^/]+)+|/?))" -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- default_handlers = [ (r".*/", TrailingSlashHandler), (r"api", APIVersionHandler), - (r'/(robots\.txt|favicon\.ico)', web.StaticFileHandler), - (r'/metrics', PrometheusMetricsHandler) + (r"/(robots\.txt|favicon\.ico)", web.StaticFileHandler), + (r"/metrics", PrometheusMetricsHandler), ] diff --git a/jupyter_server/base/zmqhandlers.py b/jupyter_server/base/zmqhandlers.py index a7dde2cbe5..e1c2fe0702 100644 --- a/jupyter_server/base/zmqhandlers.py +++ b/jupyter_server/base/zmqhandlers.py @@ -39,18 +39,18 @@ def serialize_binary_message(msg): """ # don't modify msg or buffer list in-place msg = msg.copy() - buffers = list(msg.pop('buffers')) + buffers = list(msg.pop("buffers")) if sys.version_info < (3, 4): buffers = [x.tobytes() for x in buffers] - bmsg = json.dumps(msg, default=date_default).encode('utf8') + bmsg = json.dumps(msg, default=date_default).encode("utf8") buffers.insert(0, bmsg) nbufs = len(buffers) offsets = [4 * (nbufs + 1)] for buf in buffers[:-1]: offsets.append(offsets[-1] + len(buf)) - offsets_buf = struct.pack('!' + 'I' * (nbufs + 1), nbufs, *offsets) + offsets_buf = struct.pack("!" + "I" * (nbufs + 1), nbufs, *offsets) buffers.insert(0, offsets_buf) - return b''.join(buffers) + return b"".join(buffers) def deserialize_binary_message(bmsg): @@ -68,72 +68,75 @@ def deserialize_binary_message(bmsg): message dictionary """ - nbufs = struct.unpack('!i', bmsg[:4])[0] - offsets = list(struct.unpack('!' + 'I' * nbufs, bmsg[4:4*(nbufs+1)])) + nbufs = struct.unpack("!i", bmsg[:4])[0] + offsets = list(struct.unpack("!" + "I" * nbufs, bmsg[4 : 4 * (nbufs + 1)])) offsets.append(None) bufs = [] for start, stop in zip(offsets[:-1], offsets[1:]): bufs.append(bmsg[start:stop]) - msg = json.loads(bufs[0].decode('utf8')) - msg['header'] = extract_dates(msg['header']) - msg['parent_header'] = extract_dates(msg['parent_header']) - msg['buffers'] = bufs[1:] + msg = json.loads(bufs[0].decode("utf8")) + msg["header"] = extract_dates(msg["header"]) + msg["parent_header"] = extract_dates(msg["parent_header"]) + msg["buffers"] = bufs[1:] return msg + # ping interval for keeping websockets alive (30 seconds) WS_PING_INTERVAL = 30000 class WebSocketMixin(object): """Mixin for common websocket options""" + ping_callback = None last_ping = 0 last_pong = 0 stream = None - + @property def ping_interval(self): """The interval for websocket keep-alive pings. - + Set ws_ping_interval = 0 to disable pings. """ - return self.settings.get('ws_ping_interval', WS_PING_INTERVAL) - + return self.settings.get("ws_ping_interval", WS_PING_INTERVAL) + @property def ping_timeout(self): """If no ping is received in this many milliseconds, close the websocket connection (VPNs, etc. can fail to cleanly close ws connections). Default is max of 3 pings or 30 seconds. """ - return self.settings.get('ws_ping_timeout', - max(3 * self.ping_interval, WS_PING_INTERVAL) + return self.settings.get( + "ws_ping_timeout", max(3 * self.ping_interval, WS_PING_INTERVAL) ) def check_origin(self, origin=None): """Check Origin == Host or Access-Control-Allow-Origin. - + Tornado >= 4 calls this method automatically, raising 403 if it returns False. """ - if self.allow_origin == '*' or ( - hasattr(self, 'skip_check_origin') and self.skip_check_origin()): + if self.allow_origin == "*" or ( + hasattr(self, "skip_check_origin") and self.skip_check_origin() + ): return True host = self.request.headers.get("Host") if origin is None: origin = self.get_origin() - + # If no origin or host header is provided, assume from script if origin is None or host is None: return True - + origin = origin.lower() origin_host = urlparse(origin).netloc - + # OK if origin matches host if origin_host == host: return True - + # Check CORS headers if self.allow_origin: allow = self.allow_origin == origin @@ -143,8 +146,10 @@ def check_origin(self, origin=None): # No CORS headers deny the request allow = False if not allow: - self.log.warning("Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", - origin, host, + self.log.warning( + "Blocking Cross Origin WebSocket Attempt. Origin: %s, Host: %s", + origin, + host, ) return allow @@ -177,12 +182,15 @@ def send_ping(self): now = ioloop.IOLoop.current().time() since_last_pong = 1e3 * (now - self.last_pong) since_last_ping = 1e3 * (now - self.last_ping) - if since_last_ping < 2*self.ping_interval and since_last_pong > self.ping_timeout: + if ( + since_last_ping < 2 * self.ping_interval + and since_last_pong > self.ping_timeout + ): self.log.warning("WebSocket ping timeout after %i ms.", since_last_pong) self.close() return - self.ping(b'') + self.ping(b"") self.last_ping = now def on_pong(self, data): @@ -190,9 +198,10 @@ def on_pong(self, data): class ZMQStreamHandler(WebSocketMixin, WebSocketHandler): - - if tornado.version_info < (4,1): + + if tornado.version_info < (4, 1): """Backport send_error from tornado 4.1 to 4.0""" + def send_error(self, *args, **kwargs): if self.stream is None: super(WebSocketHandler, self).send_error(*args, **kwargs) @@ -203,17 +212,16 @@ def send_error(self, *args, **kwargs): # we can close the connection more gracefully. self.stream.close() - def _reserialize_reply(self, msg_or_list, channel=None): """Reserialize a reply message using JSON. msg_or_list can be an already-deserialized msg dict or the zmq buffer list. If it is the zmq list, it will be deserialized with self.session. - + This takes the msg list from the ZMQ socket and serializes the result for the websocket. This method should be used by self._on_zmq_reply to build messages that can be sent back to the browser. - + """ if isinstance(msg_or_list, dict): # already unpacked @@ -222,8 +230,8 @@ def _reserialize_reply(self, msg_or_list, channel=None): idents, msg_list = self.session.feed_identities(msg_or_list) msg = self.session.deserialize(msg_list) if channel: - msg['channel'] = channel - if msg['buffers']: + msg["channel"] = channel + if msg["buffers"]: buf = serialize_binary_message(msg) return buf else: @@ -237,7 +245,7 @@ def _on_zmq_reply(self, stream, msg_list): self.log.warning("zmq message arrived on closed channel") self.close() return - channel = getattr(stream, 'channel', None) + channel = getattr(stream, "channel", None) try: msg = self._reserialize_reply(msg_list, channel=channel) except Exception: @@ -247,7 +255,6 @@ def _on_zmq_reply(self, stream, msg_list): class AuthenticatedZMQStreamHandler(ZMQStreamHandler, JupyterHandler): - def set_default_headers(self): """Undo the set_default_headers in JupyterHandler @@ -266,8 +273,8 @@ def pre_get(self): self.log.warning("Couldn't authenticate WebSocket connection") raise web.HTTPError(403) - if self.get_argument('session_id', False): - self.session.session = cast_unicode(self.get_argument('session_id')) + if self.get_argument("session_id", False): + self.session.session = cast_unicode(self.get_argument("session_id")) else: self.log.warning("No session ID specified") @@ -285,4 +292,4 @@ def initialize(self): self.session = Session(config=self.config) def get_compression_options(self): - return self.settings.get('websocket_compression_options', None) + return self.settings.get("websocket_compression_options", None) diff --git a/jupyter_server/config_manager.py b/jupyter_server/config_manager.py index 584df88709..aa3c71f4f4 100644 --- a/jupyter_server/config_manager.py +++ b/jupyter_server/config_manager.py @@ -58,7 +58,7 @@ class BaseJSONConfigManager(LoggingConfigurable): default values in a {section_name}.d directory. """ - config_dir = Unicode('.') + config_dir = Unicode(".") read_directory = Bool(True) def ensure_config_dir_exists(self): @@ -71,11 +71,11 @@ def ensure_config_dir_exists(self): def file_name(self, section_name): """Returns the json filename for the section_name: {config_dir}/{section_name}.json""" - return os.path.join(self.config_dir, section_name+'.json') + return os.path.join(self.config_dir, section_name + ".json") def directory(self, section_name): """Returns the directory name for the section name: {config_dir}/{section_name}.d""" - return os.path.join(self.config_dir, section_name+'.d') + return os.path.join(self.config_dir, section_name + ".d") def get(self, section_name, include_root=True): """Retrieve the config data for the specified section. @@ -88,18 +88,22 @@ def get(self, section_name, include_root=True): """ paths = [self.file_name(section_name)] if include_root else [] if self.read_directory: - pattern = os.path.join(self.directory(section_name), '*.json') + pattern = os.path.join(self.directory(section_name), "*.json") # These json files should be processed first so that the # {section_name}.json take precedence. # The idea behind this is that installing a Python package may # put a json file somewhere in the a .d directory, while the # .json file is probably a user configuration. paths = sorted(glob.glob(pattern)) + paths - self.log.debug('Paths used for configuration of %s: \n\t%s', section_name, '\n\t'.join(paths)) + self.log.debug( + "Paths used for configuration of %s: \n\t%s", + section_name, + "\n\t".join(paths), + ) data = {} for path in paths: if os.path.isfile(path): - with io.open(path, encoding='utf-8') as f: + with io.open(path, encoding="utf-8") as f: recursive_update(data, json.load(f)) return data @@ -120,9 +124,9 @@ def set(self, section_name, data): json_content = json.dumps(data, indent=2) if PY3: - f = io.open(filename, 'w', encoding='utf-8') + f = io.open(filename, "w", encoding="utf-8") else: - f = open(filename, 'wb') + f = open(filename, "wb") with f: f.write(json_content) diff --git a/jupyter_server/edit/handlers.py b/jupyter_server/edit/handlers.py index f813fd1040..219f67f360 100644 --- a/jupyter_server/edit/handlers.py +++ b/jupyter_server/edit/handlers.py @@ -1,4 +1,4 @@ -#encoding: utf-8 +# encoding: utf-8 """Tornado handlers for the terminal emulator.""" # Copyright (c) Jupyter Development Team. @@ -14,18 +14,21 @@ class EditorHandler(JupyterHandler): @web.authenticated def get(self, path): - path = path.strip('/') + path = path.strip("/") if not self.contents_manager.file_exists(path): - raise web.HTTPError(404, u'File does not exist: %s' % path) + raise web.HTTPError(404, u"File does not exist: %s" % path) - basename = path.rsplit('/', 1)[-1] - self.write(self.render_template('edit.html', - file_path=url_escape(path), - basename=basename, - page_title=basename + " (editing)", + basename = path.rsplit("/", 1)[-1] + self.write( + self.render_template( + "edit.html", + file_path=url_escape(path), + basename=basename, + page_title=basename + " (editing)", ) ) + default_handlers = [ (r"/edit%s" % path_regex, EditorHandler), ] diff --git a/jupyter_server/extension/application.py b/jupyter_server/extension/application.py index dacd7998b8..5b3954fa1f 100644 --- a/jupyter_server/extension/application.py +++ b/jupyter_server/extension/application.py @@ -3,14 +3,7 @@ from jinja2 import Environment, FileSystemLoader -from traitlets import ( - Unicode, - List, - Dict, - Bool, - default, - validate -) +from traitlets import Unicode, List, Dict, Bool, default, validate from traitlets.config import Config from jupyter_core.application import JupyterApp @@ -23,13 +16,14 @@ # Remove alias for nested classes in ServerApp. # Nested classes are not allowed in ExtensionApp. try: - aliases.pop('transport') + aliases.pop("transport") except KeyError: pass -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Util functions and classes. -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + def _preparse_for_subcommand(Application, argv): """Preparse command line to look for subcommands. @@ -42,7 +36,7 @@ def _preparse_for_subcommand(Application, argv): if Application.subcommands and len(argv) > 0: # we have subcommands, and one may have been specified subc, subargv = argv[0], argv[1:] - if re.match(r'^\w(\-?\w)*$', subc) and subc in Application.subcommands: + if re.match(r"^\w(\-?\w)*$", subc) and subc in Application.subcommands: # it's a subcommand, and *not* a flag or class parameter app = Application() app.initialize_subcommand(subc, subargv) @@ -63,24 +57,24 @@ def _preparse_for_stopping_flags(Application, argv): # version), we want to only search the arguments up to the first # occurrence of '--', which we're calling interpreted_argv. try: - interpreted_argv = argv[:argv.index('--')] + interpreted_argv = argv[: argv.index("--")] except ValueError: interpreted_argv = argv # Catch any help calls. - if any(x in interpreted_argv for x in ('-h', '--help-all', '--help')): + if any(x in interpreted_argv for x in ("-h", "--help-all", "--help")): app = Application() - app.print_help('--help-all' in interpreted_argv) + app.print_help("--help-all" in interpreted_argv) app.exit(0) # Catch version commands - if '--version' in interpreted_argv or '-V' in interpreted_argv: + if "--version" in interpreted_argv or "-V" in interpreted_argv: app = Application() app.print_version() app.exit(0) # Catch generate-config commands. - if '--generate-config' in interpreted_argv: + if "--generate-config" in interpreted_argv: app = Application() app.write_default_config() app.exit(0) @@ -88,24 +82,26 @@ def _preparse_for_stopping_flags(Application, argv): class ExtensionAppJinjaMixin: """Use Jinja templates for HTML templates on top of an ExtensionApp.""" - + jinja2_options = Dict( - help=_("""Options to pass to the jinja2 environment for this + help=_( + """Options to pass to the jinja2 environment for this extension. - """) + """ + ) ).tag(config=True) def _prepare_templates(self): # Add templates to web app settings if extension has templates. if len(self.template_paths) > 0: - self.settings.update({ - "{}_template_paths".format(self.extension_name): self.template_paths - }) + self.settings.update( + {"{}_template_paths".format(self.extension_name): self.template_paths} + ) # Create a jinja environment for logging html templates. self.jinja2_env = Environment( - loader=FileSystemLoader(self.template_paths), - extensions=['jinja2.ext.i18n'], + loader=FileSystemLoader(self.template_paths), + extensions=["jinja2.ext.i18n"], autoescape=True, **self.jinja2_options ) @@ -115,23 +111,24 @@ def _prepare_templates(self): # Add the jinja2 environment for this extension to the tornado settings. self.settings.update( - { - "{}_jinja2_env".format(self.extension_name): self.jinja2_env - } + {"{}_jinja2_env".format(self.extension_name): self.jinja2_env} ) -#----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- # Aliases and Flags -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + -flags['no-browser']=( - {'ExtensionApp' : {'open_browser' : True}}, - _("Prevent the opening of the default url in the browser.") +flags["no-browser"] = ( + {"ExtensionApp": {"open_browser": True}}, + _("Prevent the opening of the default url in the browser."), ) -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # ExtensionApp -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + class ExtensionApp(JupyterApp): """Base class for configurable Jupyter Server Extension Applications. @@ -144,15 +141,14 @@ class ExtensionApp(JupyterApp): class method. This method can be set as a entry_point in the extensions setup.py """ + # Subclasses should override this trait. Tells the server if # this extension allows other other extensions to be loaded # side-by-side when launched directly. load_other_extensions = True # Name of the extension - extension_name = Unicode( - help="Name of extension." - ) + extension_name = Unicode(help="Name of extension.") def _extension_name_default(self): try: @@ -160,20 +156,26 @@ def _extension_name_default(self): except AttributeError: raise ValueError("The extension must be given a `name`.") - INVALID_EXTENSION_NAME_CHARS = [' ', '.', '+', '/'] + INVALID_EXTENSION_NAME_CHARS = [" ", ".", "+", "/"] - @validate('extension_name') + @validate("extension_name") def _validate_extension_name(self, value): - #value = self.extension_name + # value = self.extension_name if isinstance(value, str): # Validate that extension_name doesn't contain any invalid characters. for c in ExtensionApp.INVALID_EXTENSION_NAME_CHARS: if c in value: - raise ValueError("Extension name '{name}' cannot contain any of the following characters: " - "{invalid_chars}.". - format(name=value, invalid_chars=ExtensionApp.INVALID_EXTENSION_NAME_CHARS)) + raise ValueError( + "Extension name '{name}' cannot contain any of the following characters: " + "{invalid_chars}.".format( + name=value, + invalid_chars=ExtensionApp.INVALID_EXTENSION_NAME_CHARS, + ) + ) return value - raise ValueError("Extension name must be a string, found {type}.".format(type=type(value))) + raise ValueError( + "Extension name must be a string, found {type}.".format(type=type(value)) + ) # Extension can configure the ServerApp from the command-line classes = [ @@ -187,30 +189,31 @@ def _validate_extension_name(self, value): @property def static_url_prefix(self): - return "/static/{extension_name}/".format( - extension_name=self.extension_name) + return "/static/{extension_name}/".format(extension_name=self.extension_name) - static_paths = List(Unicode(), + static_paths = List( + Unicode(), help="""paths to search for serving static files. This allows adding javascript/css to be available from the notebook server machine, or overriding individual files in the IPython - """ + """, ).tag(config=True) - template_paths = List(Unicode(), - help=_("""Paths to search for serving jinja templates. + template_paths = List( + Unicode(), + help=_( + """Paths to search for serving jinja templates. - Can be used to override templates from notebook.templates.""") + Can be used to override templates from notebook.templates.""" + ), ).tag(config=True) - settings = Dict( - help=_("""Settings that will passed to the server.""") - ).tag(config=True) + settings = Dict(help=_("""Settings that will passed to the server.""")).tag( + config=True + ) - handlers = List( - help=_("""Handlers appended to the server.""") - ).tag(config=True) + handlers = List(help=_("""Handlers appended to the server.""")).tag(config=True) def _config_dir_default(self): """Point the config directory at the server's config_dir by default.""" @@ -225,20 +228,20 @@ def _config_dir_default(self): def _config_file_name_default(self): """The default config file name.""" if not self.extension_name: - return '' - return 'jupyter_{}_config'.format(self.extension_name.replace('-','_')) + return "" + return "jupyter_{}_config".format(self.extension_name.replace("-", "_")) - default_url = Unicode('/', config=True, - help=_("The default URL to redirect to from `/`") + default_url = Unicode( + "/", config=True, help=_("The default URL to redirect to from `/`") ) - open_browser = Bool( - True, - help=_("Should the extension open a browser window?") - ) + open_browser = Bool(True, help=_("Should the extension open a browser window?")) - custom_display_url = Unicode(u'', config=True, - help=_("""Override URL shown to users. + custom_display_url = Unicode( + u"", + config=True, + help=_( + """Override URL shown to users. Replace actual URL, including protocol, address, port and base URL, with the given value when displaying URL to the users. Do not change @@ -247,10 +250,11 @@ def _config_file_name_default(self): This option is intended to be used when the URL to display to the user cannot be determined reliably by the Jupyter server (proxified - or containerized setups for example).""") + or containerized setups for example).""" + ), ) - @default('custom_display_url') + @default("custom_display_url") def _default_custom_display_url(self): """URL to display to the user.""" # Get url from server. @@ -265,8 +269,8 @@ def _write_browser_open_file(self, url, fh): del url path = url_path_join(self.serverapp.base_url, self.default_url) url = self.serverapp.get_url(path=path, token=self.serverapp.token) - jinja2_env = self.serverapp.web_app.settings['jinja2_env'] - template = jinja2_env.get_template('browser-open.html') + jinja2_env = self.serverapp.web_app.settings["jinja2_env"] + template = jinja2_env.get_template("browser-open.html") fh.write(template.render(open_url=url)) def initialize_settings(self): @@ -287,7 +291,7 @@ def _prepare_config(self): """ traits = self.class_own_traits().keys() self.extension_config = Config({t: getattr(self, t) for t in traits}) - self.settings['{}_config'.format(self.extension_name)] = self.extension_config + self.settings["{}_config".format(self.extension_name)] = self.extension_config def _prepare_settings(self): # Make webapp settings accessible to initialize_settings method @@ -295,9 +299,9 @@ def _prepare_settings(self): self.settings.update(**webapp.settings) # Add static and template paths to settings. - self.settings.update({ - "{}_static_paths".format(self.extension_name): self.static_paths, - }) + self.settings.update( + {"{}_static_paths".format(self.extension_name): self.static_paths,} + ) # Get setting defined by subclass using initialize_settings method. self.initialize_settings() @@ -315,14 +319,14 @@ def _prepare_handlers(self): new_handlers = [] for handler_items in self.handlers: # Build url pattern including base_url - pattern = url_path_join(webapp.settings['base_url'], handler_items[0]) + pattern = url_path_join(webapp.settings["base_url"], handler_items[0]) handler = handler_items[1] - + # Get handler kwargs, if given kwargs = {} if issubclass(handler, ExtensionHandler): - kwargs['extension_name'] = self.extension_name - try: + kwargs["extension_name"] = self.extension_name + try: kwargs.update(handler_items[2]) except IndexError: pass @@ -334,23 +338,23 @@ def _prepare_handlers(self): if len(self.static_paths) > 0: # Append the extension's static directory to server handlers. static_url = url_path_join("/static", self.extension_name, "(.*)") - + # Construct handler. handler = ( - static_url, - webapp.settings['static_handler_class'], - {'path': self.static_paths} + static_url, + webapp.settings["static_handler_class"], + {"path": self.static_paths}, ) new_handlers.append(handler) - webapp.add_handlers('.*$', new_handlers) + webapp.add_handlers(".*$", new_handlers) def _prepare_templates(self): # Add templates to web app settings if extension has templates. if len(self.template_paths) > 0: - self.settings.update({ - "{}_template_paths".format(self.extension_name): self.template_paths - }) + self.settings.update( + {"{}_template_paths".format(self.extension_name): self.template_paths} + ) @staticmethod def initialize_server(argv=[], load_other_extensions=True, **kwargs): @@ -358,14 +362,14 @@ def initialize_server(argv=[], load_other_extensions=True, **kwargs): # Get a jupyter server instance serverapp = ServerApp.instance(**kwargs) # Initialize ServerApp config. - # Parses the command line looking for + # Parses the command line looking for # ServerApp configuration. serverapp.initialize(argv=argv, load_extensions=load_other_extensions) return serverapp def initialize(self, serverapp, argv=[]): """Initialize the extension app. - + This method: - Loads the extension's config from file - Updates the extension's config from argv @@ -375,7 +379,7 @@ def initialize(self, serverapp, argv=[]): """ # Initialize ServerApp. self.serverapp = serverapp - + # Initialize the extension application super(ExtensionApp, self).initialize(argv=argv) @@ -387,11 +391,11 @@ def initialize(self, serverapp, argv=[]): def start(self): """Start the underlying Jupyter server. - + Server should be started after extension is initialized. """ super(ExtensionApp, self).start() - # Override the browser open file to + # Override the browser open file to # Override the server's display url to show extension's display URL. self.serverapp.custom_display_url = self.custom_display_url # Override the server's default option and open a broswer window. @@ -437,11 +441,10 @@ def launch_instance(cls, argv=None, **kwargs): # Check for help, version, and generate-config arguments # before initializing server to make sure these # arguments trigger actions from the extension not the server. - _preparse_for_stopping_flags(cls, args) + _preparse_for_stopping_flags(cls, args) # Get a jupyter server instance. serverapp = cls.initialize_server( - argv=args, - load_other_extensions=cls.load_other_extensions + argv=args, load_other_extensions=cls.load_other_extensions ) # Log if extension is blocking other extensions from loading. if not cls.load_other_extensions: @@ -450,6 +453,8 @@ def launch_instance(cls, argv=None, **kwargs): "other extensions.".format(ext_name=cls.extension_name) ) - extension = cls.load_jupyter_server_extension(serverapp, argv=args, **kwargs) + extension = cls.load_jupyter_server_extension( + serverapp, argv=args, **kwargs + ) # Start the ioloop. - extension.start() \ No newline at end of file + extension.start() diff --git a/jupyter_server/extension/handler.py b/jupyter_server/extension/handler.py index 316d1fd782..986382a8b1 100644 --- a/jupyter_server/extension/handler.py +++ b/jupyter_server/extension/handler.py @@ -6,9 +6,10 @@ class ExtensionHandlerJinjaMixin: """Mixin class for ExtensionApp handlers that use jinja templating for template rendering. """ + def get_template(self, name): """Return the jinja template object for a given name""" - env = '{}_jinja2_env'.format(self.extension_name) + env = "{}_jinja2_env".format(self.extension_name) return self.settings[env].get_template(name) @@ -22,6 +23,7 @@ class ExtensionHandler(JupyterHandler): their own namespace and avoid intercepting requests for other extensions. """ + def initialize(self, extension_name): self.extension_name = extension_name @@ -35,12 +37,11 @@ def server_config(self): @property def static_url_prefix(self): - return "/static/{extension_name}/".format( - extension_name=self.extension_name) + return "/static/{extension_name}/".format(extension_name=self.extension_name) @property def static_path(self): - return self.settings['{}_static_paths'.format(self.extension_name)] + return self.settings["{}_static_paths".format(self.extension_name)] def static_url(self, path, include_host=None, **kwargs): """Returns a static URL for the given relative static file path. @@ -87,7 +88,7 @@ def static_url(self, path, include_host=None, **kwargs): # static directory. settings = { "static_path": self.static_path, - "static_url_prefix": self.static_url_prefix + "static_url_prefix": self.static_url_prefix, } return base + get_url(settings, path, **kwargs) diff --git a/jupyter_server/extension/serverextension.py b/jupyter_server/extension/serverextension.py index de2b752a89..5542466958 100644 --- a/jupyter_server/extension/serverextension.py +++ b/jupyter_server/extension/serverextension.py @@ -13,10 +13,10 @@ from jupyter_core.application import JupyterApp from jupyter_core.paths import ( - jupyter_config_dir, - jupyter_config_path, - ENV_CONFIG_PATH, - SYSTEM_CONFIG_PATH + jupyter_config_dir, + jupyter_config_path, + ENV_CONFIG_PATH, + SYSTEM_CONFIG_PATH, ) from jupyter_server._version import __version__ from jupyter_server.config_manager import BaseJSONConfigManager @@ -25,34 +25,32 @@ class ArgumentConflict(ValueError): pass + _base_flags = {} _base_flags.update(JupyterApp.flags) _base_flags.pop("y", None) _base_flags.pop("generate-config", None) -_base_flags.update({ - "user" : ({ - "BaseExtensionApp" : { - "user" : True, - }}, "Apply the operation only for the given user" - ), - "system" : ({ - "BaseExtensionApp" : { - "user" : False, - "sys_prefix": False, - }}, "Apply the operation system-wide" - ), - "sys-prefix" : ({ - "BaseExtensionApp" : { - "sys_prefix" : True, - }}, "Use sys.prefix as the prefix for installing extensions (for environments, packaging)" - ), - "py" : ({ - "BaseExtensionApp" : { - "python" : True, - }}, "Install from a Python package" - ) -}) -_base_flags['python'] = _base_flags['py'] +_base_flags.update( + { + "user": ( + {"BaseExtensionApp": {"user": True,}}, + "Apply the operation only for the given user", + ), + "system": ( + {"BaseExtensionApp": {"user": False, "sys_prefix": False,}}, + "Apply the operation system-wide", + ), + "sys-prefix": ( + {"BaseExtensionApp": {"sys_prefix": True,}}, + "Use sys.prefix as the prefix for installing extensions (for environments, packaging)", + ), + "py": ( + {"BaseExtensionApp": {"python": True,}}, + "Install from a Python package", + ), + } +) +_base_flags["python"] = _base_flags["py"] _base_aliases = {} _base_aliases.update(JupyterApp.aliases) @@ -60,6 +58,7 @@ class ArgumentConflict(ValueError): class BaseExtensionApp(JupyterApp): """Base extension installer app""" + _log_formatter_cls = LogFormatter flags = _base_flags aliases = _base_aliases @@ -73,6 +72,7 @@ def _log_format_default(self): """A default format for messages""" return "%(message)s" + def _get_config_dir(user=False, sys_prefix=False): """Get the location of config files for the current context @@ -100,16 +100,18 @@ def _get_config_dir(user=False, sys_prefix=False): # Constants for pretty print extension listing function. # Window doesn't support coloring in the commandline -GREEN_ENABLED = '\033[32menabled\033[0m' if os.name != 'nt' else 'enabled' -RED_DISABLED = '\033[31mdisabled\033[0m' if os.name != 'nt' else 'disabled' -GREEN_OK = '\033[32mOK\033[0m' if os.name != 'nt' else 'ok' -RED_X = '\033[31m X\033[0m' if os.name != 'nt' else ' X' +GREEN_ENABLED = "\033[32menabled\033[0m" if os.name != "nt" else "enabled" +RED_DISABLED = "\033[31mdisabled\033[0m" if os.name != "nt" else "disabled" +GREEN_OK = "\033[32mOK\033[0m" if os.name != "nt" else "ok" +RED_X = "\033[31m X\033[0m" if os.name != "nt" else " X" # ------------------------------------------------------------------------------ # Public API # ------------------------------------------------------------------------------ -class ExtensionValidationError(Exception): pass + +class ExtensionValidationError(Exception): + pass def validate_server_extension(import_name): @@ -118,18 +120,22 @@ def validate_server_extension(import_name): """ try: mod = importlib.import_module(import_name) - func = getattr(mod, 'load_jupyter_server_extension') - version = getattr(mod, '__version__', '') + func = getattr(mod, "load_jupyter_server_extension") + version = getattr(mod, "__version__", "") return mod, func, version # If the extension does not exist, raise an exception except ImportError: - raise ExtensionValidationError('{} is not importable.'.format(import_name)) + raise ExtensionValidationError("{} is not importable.".format(import_name)) # If the extension does not have a `load_jupyter_server_extension` function, raise exception. except AttributeError: - raise ExtensionValidationError('Found module "{}" but cannot load it.'.format(import_name)) + raise ExtensionValidationError( + 'Found module "{}" but cannot load it.'.format(import_name) + ) -def toggle_server_extension_python(import_name, enabled=None, parent=None, user=False, sys_prefix=True): +def toggle_server_extension_python( + import_name, enabled=None, parent=None, user=False, sys_prefix=True +): """Toggle the boolean setting for a given server extension in a Jupyter config file. """ @@ -137,63 +143,64 @@ def toggle_server_extension_python(import_name, enabled=None, parent=None, user= config_dir = _get_config_dir(user=user, sys_prefix=sys_prefix) cm = BaseJSONConfigManager(parent=parent, config_dir=config_dir) cfg = cm.get("jupyter_server_config") - server_extensions = ( - cfg.setdefault("ServerApp", {}) - .setdefault("jpserver_extensions", {}) + server_extensions = cfg.setdefault("ServerApp", {}).setdefault( + "jpserver_extensions", {} ) old_enabled = server_extensions.get(import_name, None) new_enabled = enabled if enabled is not None else not old_enabled server_extensions[import_name] = new_enabled cm.update("jupyter_server_config", cfg) + # ---------------------------------------------------------------------- # Applications # ---------------------------------------------------------------------- + flags = {} flags.update(BaseExtensionApp.flags) flags.pop("y", None) flags.pop("generate-config", None) -flags.update({ - "user" : ({ - "ToggleServerExtensionApp" : { - "user" : True, - }}, "Perform the operation for the current user" - ), - "system" : ({ - "ToggleServerExtensionApp" : { - "user" : False, - "sys_prefix": False, - }}, "Perform the operation system-wide" - ), - "sys-prefix" : ({ - "ToggleServerExtensionApp" : { - "sys_prefix" : True, - }}, "Use sys.prefix as the prefix for installing server extensions" - ), - "py" : ({ - "ToggleServerExtensionApp" : { - "python" : True, - }}, "Install from a Python package" - ), -}) -flags['python'] = flags['py'] +flags.update( + { + "user": ( + {"ToggleServerExtensionApp": {"user": True,}}, + "Perform the operation for the current user", + ), + "system": ( + {"ToggleServerExtensionApp": {"user": False, "sys_prefix": False,}}, + "Perform the operation system-wide", + ), + "sys-prefix": ( + {"ToggleServerExtensionApp": {"sys_prefix": True,}}, + "Use sys.prefix as the prefix for installing server extensions", + ), + "py": ( + {"ToggleServerExtensionApp": {"python": True,}}, + "Install from a Python package", + ), + } +) +flags["python"] = flags["py"] class ToggleServerExtensionApp(BaseExtensionApp): """A base class for enabling/disabling extensions""" + name = "jupyter server extension enable/disable" - description = "Enable/disable a server extension using frontend configuration files." - + description = ( + "Enable/disable a server extension using frontend configuration files." + ) + flags = flags user = Bool(False, config=True, help="Whether to do a user install") sys_prefix = Bool(True, config=True, help="Use the sys.prefix as the prefix") python = Bool(False, config=True, help="Install from a Python package") _toggle_value = Bool() - _toggle_pre_message = '' - _toggle_post_message = '' - + _toggle_pre_message = "" + _toggle_post_message = "" + def toggle_server_extension(self, import_name): """Change the status of a named server extension. @@ -207,23 +214,27 @@ def toggle_server_extension(self, import_name): `load_jupyter_server_extension` function """ try: - self.log.info("{}: {}".format(self._toggle_pre_message.capitalize(), import_name)) + self.log.info( + "{}: {}".format(self._toggle_pre_message.capitalize(), import_name) + ) # Validate the server extension. self.log.info(" - Validating {}...".format(import_name)) _, __, version = validate_server_extension(import_name) # Toggle the server extension to active. toggle_server_extension_python( - import_name, - self._toggle_value, - parent=self, + import_name, + self._toggle_value, + parent=self, user=self.user, - sys_prefix=self.sys_prefix + sys_prefix=self.sys_prefix, ) self.log.info(" {} {} {}".format(import_item, version, GREEN_OK)) # If successful, let's log. - self.log.info(" - Extension successfully {}.".format(self._toggle_post_message)) + self.log.info( + " - Extension successfully {}.".format(self._toggle_post_message) + ) except ExtensionValidationError as err: self.log.info(" {} Validation failed: {}".format(RED_X, err)) @@ -241,13 +252,13 @@ def toggle_server_extension_python(self, package): """ _, server_exts = _get_server_extension_metadata(package) for server_ext in server_exts: - module = server_ext['module'] + module = server_ext["module"] self.toggle_server_extension(module) def start(self): """Perform the App's actions as configured""" if not self.extra_args: - sys.exit('Please specify a server extension/package to enable or disable') + sys.exit("Please specify a server extension/package to enable or disable") for arg in self.extra_args: if self.python: self.toggle_server_extension_python(arg) @@ -257,6 +268,7 @@ def start(self): class EnableServerExtensionApp(ToggleServerExtensionApp): """An App that enables (and validates) Server Extensions""" + name = "jupyter server extension enable" description = """ Enable a server extension in configuration. @@ -271,6 +283,7 @@ class EnableServerExtensionApp(ToggleServerExtensionApp): class DisableServerExtensionApp(ToggleServerExtensionApp): """An App that disables Server Extensions""" + name = "jupyter server extension disable" description = """ Disable a server extension in configuration. @@ -285,6 +298,7 @@ class DisableServerExtensionApp(ToggleServerExtensionApp): class ListServerExtensionsApp(BaseExtensionApp): """An App that lists (and validates) Server Extensions""" + name = "jupyter server extension list" version = __version__ description = "List all server extensions known by the configuration system" @@ -298,20 +312,23 @@ def list_server_extensions(self): for config_dir in config_dirs: cm = BaseJSONConfigManager(parent=self, config_dir=config_dir) data = cm.get("jupyter_server_config") - server_extensions = ( - data.setdefault("ServerApp", {}) - .setdefault("jpserver_extensions", {}) + server_extensions = data.setdefault("ServerApp", {}).setdefault( + "jpserver_extensions", {} ) if server_extensions: - self.log.info(u'config dir: {}'.format(config_dir)) + self.log.info(u"config dir: {}".format(config_dir)) for import_name, enabled in server_extensions.items(): - self.log.info(u' {} {}'.format( - import_name, - GREEN_ENABLED if enabled else RED_DISABLED)) + self.log.info( + u" {} {}".format( + import_name, GREEN_ENABLED if enabled else RED_DISABLED + ) + ) try: self.log.info(" - Validating {}...".format(import_name)) _, __, version = validate_server_extension(import_name) - self.log.info(" {} {} {}".format(import_name, version, GREEN_OK)) + self.log.info( + " {} {} {}".format(import_name, version, GREEN_OK) + ) except ExtensionValidationError as err: self.log.warn(" {} {}".format(RED_X, err)) @@ -330,6 +347,7 @@ def start(self): class ServerExtensionApp(BaseExtensionApp): """Root level server extension app""" + name = "jupyter server extension" version = __version__ description = "Work with Jupyter server extensions" @@ -338,7 +356,7 @@ class ServerExtensionApp(BaseExtensionApp): subcommands = dict( enable=(EnableServerExtensionApp, "Enable a server extension"), disable=(DisableServerExtensionApp, "Disable a server extension"), - list=(ListServerExtensionsApp, "List server extensions") + list=(ListServerExtensionsApp, "List server extensions"), ) def start(self): @@ -357,6 +375,7 @@ def start(self): # Private API # ------------------------------------------------------------------------------ + def _get_server_extension_metadata(module): """Load server extension metadata from a module. @@ -377,9 +396,14 @@ def _get_server_extension_metadata(module): magic-named `_jupyter_server_extension_paths` function """ m = import_item(module) - if not hasattr(m, '_jupyter_server_extension_paths'): - raise KeyError(u'The Python module {} does not include any valid server extensions'.format(module)) + if not hasattr(m, "_jupyter_server_extension_paths"): + raise KeyError( + u"The Python module {} does not include any valid server extensions".format( + module + ) + ) return m, m._jupyter_server_extension_paths() -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/jupyter_server/files/handlers.py b/jupyter_server/files/handlers.py index 7ccdad5b68..a44fe2cf85 100644 --- a/jupyter_server/files/handlers.py +++ b/jupyter_server/files/handlers.py @@ -24,8 +24,10 @@ class FilesHandler(JupyterHandler): def content_security_policy(self): # In case we're serving HTML/SVG, confine any Javascript to a unique # origin so it can't interact with the notebook server. - return super(FilesHandler, self).content_security_policy + \ - "; sandbox allow-scripts" + return ( + super(FilesHandler, self).content_security_policy + + "; sandbox allow-scripts" + ) @web.authenticated def head(self, path): @@ -40,42 +42,41 @@ def get(self, path, include_body=True): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) - path = path.strip('/') - if '/' in path: - _, name = path.rsplit('/', 1) + path = path.strip("/") + if "/" in path: + _, name = path.rsplit("/", 1) else: name = path - - model = yield maybe_future(cm.get(path, type='file', content=include_body)) - + + model = yield maybe_future(cm.get(path, type="file", content=include_body)) + if self.get_argument("download", False): self.set_attachment_header(name) # get mimetype from filename - if name.lower().endswith('.ipynb'): - self.set_header('Content-Type', 'application/x-ipynb+json') + if name.lower().endswith(".ipynb"): + self.set_header("Content-Type", "application/x-ipynb+json") else: cur_mime = mimetypes.guess_type(name)[0] - if cur_mime == 'text/plain': - self.set_header('Content-Type', 'text/plain; charset=UTF-8') + if cur_mime == "text/plain": + self.set_header("Content-Type", "text/plain; charset=UTF-8") elif cur_mime is not None: - self.set_header('Content-Type', cur_mime) + self.set_header("Content-Type", cur_mime) else: - if model['format'] == 'base64': - self.set_header('Content-Type', 'application/octet-stream') + if model["format"] == "base64": + self.set_header("Content-Type", "application/octet-stream") else: - self.set_header('Content-Type', 'text/plain; charset=UTF-8') + self.set_header("Content-Type", "text/plain; charset=UTF-8") if include_body: - if model['format'] == 'base64': - b64_bytes = model['content'].encode('ascii') + if model["format"] == "base64": + b64_bytes = model["content"].encode("ascii") self.write(decodebytes(b64_bytes)) - elif model['format'] == 'json': - self.write(json.dumps(model['content'])) + elif model["format"] == "json": + self.write(json.dumps(model["content"])) else: - self.write(model['content']) + self.write(model["content"]) self.flush() default_handlers = [] - diff --git a/jupyter_server/gateway/handlers.py b/jupyter_server/gateway/handlers.py index a1b76b5536..b16c296a3f 100644 --- a/jupyter_server/gateway/handlers.py +++ b/jupyter_server/gateway/handlers.py @@ -1,6 +1,8 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from ..services.kernelspecs.handlers import kernel_name_regex +from ..services.kernels.handlers import _kernel_id_regex import os import logging import mimetypes @@ -22,7 +24,7 @@ from .managers import GatewayClient # Keepalive ping interval (default: 30 seconds) -GATEWAY_WS_PING_INTERVAL_SECS = int(os.getenv('GATEWAY_WS_PING_INTERVAL_SECS', 30)) +GATEWAY_WS_PING_INTERVAL_SECS = int(os.getenv("GATEWAY_WS_PING_INTERVAL_SECS", 30)) class WebSocketChannelsHandler(WebSocketHandler, JupyterHandler): @@ -51,8 +53,8 @@ def authenticate(self): self.log.warning("Couldn't authenticate WebSocket connection") raise web.HTTPError(403) - if self.get_argument('session_id', False): - self.session.session = cast_unicode(self.get_argument('session_id')) + if self.get_argument("session_id", False): + self.session.session = cast_unicode(self.get_argument("session_id")) else: self.log.warning("No session ID specified") @@ -64,25 +66,29 @@ def initialize(self): @gen.coroutine def get(self, kernel_id, *args, **kwargs): self.authenticate() - self.kernel_id = cast_unicode(kernel_id, 'ascii') - yield super(WebSocketChannelsHandler, self).get(kernel_id=kernel_id, *args, **kwargs) + self.kernel_id = cast_unicode(kernel_id, "ascii") + yield super(WebSocketChannelsHandler, self).get( + kernel_id=kernel_id, *args, **kwargs + ) def send_ping(self): if self.ws_connection is None and self.ping_callback is not None: self.ping_callback.stop() return - self.ping(b'') + self.ping(b"") def open(self, kernel_id, *args, **kwargs): """Handle web socket connection open to notebook server and delegate to gateway web socket handler """ - self.ping_callback = PeriodicCallback(self.send_ping, GATEWAY_WS_PING_INTERVAL_SECS * 1000) + self.ping_callback = PeriodicCallback( + self.send_ping, GATEWAY_WS_PING_INTERVAL_SECS * 1000 + ) self.ping_callback.start() self.gateway.on_open( kernel_id=kernel_id, message_callback=self.write_message, - compression_options=self.get_compression_options() + compression_options=self.get_compression_options(), ) def on_message(self, message): @@ -96,8 +102,14 @@ def write_message(self, message, binary=False): binary = True super(WebSocketChannelsHandler, self).write_message(message, binary=binary) elif self.log.isEnabledFor(logging.DEBUG): - msg_summary = WebSocketChannelsHandler._get_message_summary(json_decode(utf8(message))) - self.log.debug("Notebook client closed websocket connection - message dropped: {}".format(msg_summary)) + msg_summary = WebSocketChannelsHandler._get_message_summary( + json_decode(utf8(message)) + ) + self.log.debug( + "Notebook client closed websocket connection - message dropped: {}".format( + msg_summary + ) + ) def on_close(self): self.log.debug("Closing websocket connection %s", self.request.path) @@ -107,19 +119,23 @@ def on_close(self): @staticmethod def _get_message_summary(message): summary = [] - message_type = message['msg_type'] - summary.append('type: {}'.format(message_type)) - - if message_type == 'status': - summary.append(', state: {}'.format(message['content']['execution_state'])) - elif message_type == 'error': - summary.append(', {}:{}:{}'.format(message['content']['ename'], - message['content']['evalue'], - message['content']['traceback'])) + message_type = message["msg_type"] + summary.append("type: {}".format(message_type)) + + if message_type == "status": + summary.append(", state: {}".format(message["content"]["execution_state"])) + elif message_type == "error": + summary.append( + ", {}:{}:{}".format( + message["content"]["ename"], + message["content"]["evalue"], + message["content"]["traceback"], + ) + ) else: - summary.append(', ...') # don't display potentially sensitive data + summary.append(", ...") # don't display potentially sensitive data - return ''.join(summary) + return "".join(summary) class GatewayWebSocketClient(LoggingConfigurable): @@ -139,9 +155,11 @@ def _connect(self, kernel_id): self.kernel_id = kernel_id ws_url = url_path_join( GatewayClient.instance().ws_url, - GatewayClient.instance().kernels_endpoint, url_escape(kernel_id), 'channels' + GatewayClient.instance().kernels_endpoint, + url_escape(kernel_id), + "channels", ) - self.log.info('Connecting to {}'.format(ws_url)) + self.log.info("Connecting to {}".format(ws_url)) kwargs = {} kwargs = GatewayClient.instance().load_connection_args(**kwargs) @@ -150,13 +168,18 @@ def _connect(self, kernel_id): self.ws_future.add_done_callback(self._connection_done) def _connection_done(self, fut): - if not self.disconnected and fut.exception() is None: # prevent concurrent.futures._base.CancelledError + if ( + not self.disconnected and fut.exception() is None + ): # prevent concurrent.futures._base.CancelledError self.ws = fut.result() self.log.debug("Connection is ready: ws: {}".format(self.ws)) else: - self.log.warning("Websocket connection has been closed via client disconnect or due to error. " - "Kernel with ID '{}' may not be terminated on GatewayClient: {}". - format(self.kernel_id, GatewayClient.instance().url)) + self.log.warning( + "Websocket connection has been closed via client disconnect or due to error. " + "Kernel with ID '{}' may not be terminated on GatewayClient: {}".format( + self.kernel_id, GatewayClient.instance().url + ) + ) def _disconnect(self): self.disconnected = True @@ -166,7 +189,11 @@ def _disconnect(self): elif not self.ws_future.done(): # Cancel pending connection. Since future.cancel() is a noop on tornado, we'll track cancellation locally self.ws_future.cancel() - self.log.debug("_disconnect: future cancelled, disconnected: {}".format(self.disconnected)) + self.log.debug( + "_disconnect: future cancelled, disconnected: {}".format( + self.disconnected + ) + ) @gen.coroutine def _read_messages(self, callback): @@ -177,38 +204,48 @@ def _read_messages(self, callback): try: message = yield self.ws.read_message() except Exception as e: - self.log.error("Exception reading message from websocket: {}".format(e)) # , exc_info=True) + # , exc_info=True) + self.log.error( + "Exception reading message from websocket: {}".format(e) + ) if message is None: if not self.disconnected: - self.log.warning("Lost connection to Gateway: {}".format(self.kernel_id)) + self.log.warning( + "Lost connection to Gateway: {}".format(self.kernel_id) + ) break - callback(message) # pass back to notebook client (see self.on_open and WebSocketChannelsHandler.open) + # pass back to notebook client (see self.on_open and WebSocketChannelsHandler.open) + callback(message) else: # ws cancelled - stop reading break - if not self.disconnected: # if websocket is not disconnected by client, attept to reconnect to Gateway - self.log.info("Attempting to re-establish the connection to Gateway: {}".format(self.kernel_id)) + if ( + not self.disconnected + ): # if websocket is not disconnected by client, attept to reconnect to Gateway + self.log.info( + "Attempting to re-establish the connection to Gateway: {}".format( + self.kernel_id + ) + ) self._connect(self.kernel_id) loop = IOLoop.current() - loop.add_future(self.ws_future, lambda future: self._read_messages(callback)) + loop.add_future( + self.ws_future, lambda future: self._read_messages(callback) + ) def on_open(self, kernel_id, message_callback, **kwargs): """Web socket connection open against gateway server.""" self._connect(kernel_id) loop = IOLoop.current() loop.add_future( - self.ws_future, - lambda future: self._read_messages(message_callback) + self.ws_future, lambda future: self._read_messages(message_callback) ) def on_message(self, message): """Send message to gateway server.""" if self.ws is None: loop = IOLoop.current() - loop.add_future( - self.ws_future, - lambda future: self._write_message(message) - ) + loop.add_future(self.ws_future, lambda future: self._write_message(message)) else: self._write_message(message) @@ -218,7 +255,8 @@ def _write_message(self, message): if not self.disconnected and self.ws is not None: self.ws.write_message(message) except Exception as e: - self.log.error("Exception writing message to websocket: {}".format(e)) # , exc_info=True) + # , exc_info=True) + self.log.error("Exception writing message to websocket: {}".format(e)) def on_close(self): """Web socket closed event.""" @@ -234,16 +272,15 @@ def get(self, kernel_name, path, include_body=True): ksm = self.kernel_spec_manager kernel_spec_res = yield ksm.get_kernel_spec_resource(kernel_name, path) if kernel_spec_res is None: - self.log.warning("Kernelspec resource '{}' for '{}' not found. Gateway may not support" - " resource serving.".format(path, kernel_name)) + self.log.warning( + "Kernelspec resource '{}' for '{}' not found. Gateway may not support" + " resource serving.".format(path, kernel_name) + ) else: self.set_header("Content-Type", mimetypes.guess_type(path)[0]) self.finish(kernel_spec_res) -from ..services.kernels.handlers import _kernel_id_regex -from ..services.kernelspecs.handlers import kernel_name_regex - default_handlers = [ (r"/api/kernels/%s/channels" % _kernel_id_regex, WebSocketChannelsHandler), (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, GatewayResourceHandler), diff --git a/jupyter_server/gateway/managers.py b/jupyter_server/gateway/managers.py index e2f74ce43c..e03e48cb26 100644 --- a/jupyter_server/gateway/managers.py +++ b/jupyter_server/gateway/managers.py @@ -26,197 +26,268 @@ class GatewayClient(SingletonConfigurable): """ - url = Unicode(default_value=None, allow_none=True, config=True, + url = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The url of the Kernel or Enterprise Gateway server where kernel specifications are defined and kernel management takes place. If defined, this Notebook server acts as a proxy for all kernel management and kernel specification retrieval. (JUPYTER_GATEWAY_URL env var) - """ + """, ) - url_env = 'JUPYTER_GATEWAY_URL' + url_env = "JUPYTER_GATEWAY_URL" - @default('url') + @default("url") def _url_default(self): return os.environ.get(self.url_env) - @validate('url') + @validate("url") def _url_validate(self, proposal): - value = proposal['value'] + value = proposal["value"] # Ensure value, if present, starts with 'http' if value is not None and len(value) > 0: - if not str(value).lower().startswith('http'): - raise TraitError("GatewayClient url must start with 'http': '%r'" % value) + if not str(value).lower().startswith("http"): + raise TraitError( + "GatewayClient url must start with 'http': '%r'" % value + ) return value - ws_url = Unicode(default_value=None, allow_none=True, config=True, + ws_url = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The websocket url of the Kernel or Enterprise Gateway server. If not provided, this value will correspond to the value of the Gateway url with 'ws' in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) - """ + """, ) - ws_url_env = 'JUPYTER_GATEWAY_WS_URL' + ws_url_env = "JUPYTER_GATEWAY_WS_URL" - @default('ws_url') + @default("ws_url") def _ws_url_default(self): default_value = os.environ.get(self.ws_url_env) if default_value is None: if self.gateway_enabled: - default_value = self.url.lower().replace('http', 'ws') + default_value = self.url.lower().replace("http", "ws") return default_value - @validate('ws_url') + @validate("ws_url") def _ws_url_validate(self, proposal): - value = proposal['value'] + value = proposal["value"] # Ensure value, if present, starts with 'ws' if value is not None and len(value) > 0: - if not str(value).lower().startswith('ws'): - raise TraitError("GatewayClient ws_url must start with 'ws': '%r'" % value) + if not str(value).lower().startswith("ws"): + raise TraitError( + "GatewayClient ws_url must start with 'ws': '%r'" % value + ) return value - kernels_endpoint_default_value = '/api/kernels' - kernels_endpoint_env = 'JUPYTER_GATEWAY_KERNELS_ENDPOINT' - kernels_endpoint = Unicode(default_value=kernels_endpoint_default_value, config=True, - help="""The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var)""") + kernels_endpoint_default_value = "/api/kernels" + kernels_endpoint_env = "JUPYTER_GATEWAY_KERNELS_ENDPOINT" + kernels_endpoint = Unicode( + default_value=kernels_endpoint_default_value, + config=True, + help="""The gateway API endpoint for accessing kernel resources (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var)""", + ) - @default('kernels_endpoint') + @default("kernels_endpoint") def _kernels_endpoint_default(self): - return os.environ.get(self.kernels_endpoint_env, self.kernels_endpoint_default_value) - - kernelspecs_endpoint_default_value = '/api/kernelspecs' - kernelspecs_endpoint_env = 'JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT' - kernelspecs_endpoint = Unicode(default_value=kernelspecs_endpoint_default_value, config=True, - help="""The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var)""") + return os.environ.get( + self.kernels_endpoint_env, self.kernels_endpoint_default_value + ) + + kernelspecs_endpoint_default_value = "/api/kernelspecs" + kernelspecs_endpoint_env = "JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT" + kernelspecs_endpoint = Unicode( + default_value=kernelspecs_endpoint_default_value, + config=True, + help="""The gateway API endpoint for accessing kernelspecs (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var)""", + ) - @default('kernelspecs_endpoint') + @default("kernelspecs_endpoint") def _kernelspecs_endpoint_default(self): - return os.environ.get(self.kernelspecs_endpoint_env, self.kernelspecs_endpoint_default_value) - - kernelspecs_resource_endpoint_default_value = '/kernelspecs' - kernelspecs_resource_endpoint_env = 'JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT' - kernelspecs_resource_endpoint = Unicode(default_value=kernelspecs_resource_endpoint_default_value, config=True, + return os.environ.get( + self.kernelspecs_endpoint_env, self.kernelspecs_endpoint_default_value + ) + + kernelspecs_resource_endpoint_default_value = "/kernelspecs" + kernelspecs_resource_endpoint_env = "JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT" + kernelspecs_resource_endpoint = Unicode( + default_value=kernelspecs_resource_endpoint_default_value, + config=True, help="""The gateway endpoint for accessing kernelspecs resources - (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)""") + (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)""", + ) - @default('kernelspecs_resource_endpoint') + @default("kernelspecs_resource_endpoint") def _kernelspecs_resource_endpoint_default(self): - return os.environ.get(self.kernelspecs_resource_endpoint_env, self.kernelspecs_resource_endpoint_default_value) + return os.environ.get( + self.kernelspecs_resource_endpoint_env, + self.kernelspecs_resource_endpoint_default_value, + ) connect_timeout_default_value = 60.0 - connect_timeout_env = 'JUPYTER_GATEWAY_CONNECT_TIMEOUT' - connect_timeout = Float(default_value=connect_timeout_default_value, config=True, + connect_timeout_env = "JUPYTER_GATEWAY_CONNECT_TIMEOUT" + connect_timeout = Float( + default_value=connect_timeout_default_value, + config=True, help="""The time allowed for HTTP connection establishment with the Gateway server. - (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)""") + (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)""", + ) - @default('connect_timeout') + @default("connect_timeout") def connect_timeout_default(self): - return float(os.environ.get('JUPYTER_GATEWAY_CONNECT_TIMEOUT', self.connect_timeout_default_value)) + return float( + os.environ.get( + "JUPYTER_GATEWAY_CONNECT_TIMEOUT", self.connect_timeout_default_value + ) + ) request_timeout_default_value = 60.0 - request_timeout_env = 'JUPYTER_GATEWAY_REQUEST_TIMEOUT' - request_timeout = Float(default_value=request_timeout_default_value, config=True, - help="""The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var)""") + request_timeout_env = "JUPYTER_GATEWAY_REQUEST_TIMEOUT" + request_timeout = Float( + default_value=request_timeout_default_value, + config=True, + help="""The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT env var)""", + ) - @default('request_timeout') + @default("request_timeout") def request_timeout_default(self): - return float(os.environ.get('JUPYTER_GATEWAY_REQUEST_TIMEOUT', self.request_timeout_default_value)) - - client_key = Unicode(default_value=None, allow_none=True, config=True, + return float( + os.environ.get( + "JUPYTER_GATEWAY_REQUEST_TIMEOUT", self.request_timeout_default_value + ) + ) + + client_key = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) - """ + """, ) - client_key_env = 'JUPYTER_GATEWAY_CLIENT_KEY' + client_key_env = "JUPYTER_GATEWAY_CLIENT_KEY" - @default('client_key') + @default("client_key") def _client_key_default(self): return os.environ.get(self.client_key_env) - client_cert = Unicode(default_value=None, allow_none=True, config=True, + client_cert = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT env var) - """ + """, ) - client_cert_env = 'JUPYTER_GATEWAY_CLIENT_CERT' + client_cert_env = "JUPYTER_GATEWAY_CLIENT_CERT" - @default('client_cert') + @default("client_cert") def _client_cert_default(self): return os.environ.get(self.client_cert_env) - ca_certs = Unicode(default_value=None, allow_none=True, config=True, + ca_certs = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The filename of CA certificates or None to use defaults. (JUPYTER_GATEWAY_CA_CERTS env var) - """ + """, ) - ca_certs_env = 'JUPYTER_GATEWAY_CA_CERTS' + ca_certs_env = "JUPYTER_GATEWAY_CA_CERTS" - @default('ca_certs') + @default("ca_certs") def _ca_certs_default(self): return os.environ.get(self.ca_certs_env) - http_user = Unicode(default_value=None, allow_none=True, config=True, + http_user = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) - """ + """, ) - http_user_env = 'JUPYTER_GATEWAY_HTTP_USER' + http_user_env = "JUPYTER_GATEWAY_HTTP_USER" - @default('http_user') + @default("http_user") def _http_user_default(self): return os.environ.get(self.http_user_env) - http_pwd = Unicode(default_value=None, allow_none=True, config=True, + http_pwd = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) - """ + """, ) - http_pwd_env = 'JUPYTER_GATEWAY_HTTP_PWD' + http_pwd_env = "JUPYTER_GATEWAY_HTTP_PWD" - @default('http_pwd') + @default("http_pwd") def _http_pwd_default(self): return os.environ.get(self.http_pwd_env) - headers_default_value = '{}' - headers_env = 'JUPYTER_GATEWAY_HEADERS' - headers = Unicode(default_value=headers_default_value, allow_none=True, config=True, + headers_default_value = "{}" + headers_env = "JUPYTER_GATEWAY_HEADERS" + headers = Unicode( + default_value=headers_default_value, + allow_none=True, + config=True, help="""Additional HTTP headers to pass on the request. This value will be converted to a dict. (JUPYTER_GATEWAY_HEADERS env var) - """ + """, ) - @default('headers') + @default("headers") def _headers_default(self): return os.environ.get(self.headers_env, self.headers_default_value) - auth_token = Unicode(default_value=None, allow_none=True, config=True, + auth_token = Unicode( + default_value=None, + allow_none=True, + config=True, help="""The authorization token used in the HTTP headers. (JUPYTER_GATEWAY_AUTH_TOKEN env var) - """ + """, ) - auth_token_env = 'JUPYTER_GATEWAY_AUTH_TOKEN' + auth_token_env = "JUPYTER_GATEWAY_AUTH_TOKEN" - @default('auth_token') + @default("auth_token") def _auth_token_default(self): - return os.environ.get(self.auth_token_env, '') + return os.environ.get(self.auth_token_env, "") validate_cert_default_value = True - validate_cert_env = 'JUPYTER_GATEWAY_VALIDATE_CERT' - validate_cert = Bool(default_value=validate_cert_default_value, config=True, + validate_cert_env = "JUPYTER_GATEWAY_VALIDATE_CERT" + validate_cert = Bool( + default_value=validate_cert_default_value, + config=True, help="""For HTTPS requests, determines if server's certificate should be validated or not. - (JUPYTER_GATEWAY_VALIDATE_CERT env var)""" + (JUPYTER_GATEWAY_VALIDATE_CERT env var)""", ) - @default('validate_cert') + @default("validate_cert") def validate_cert_default(self): - return bool(os.environ.get(self.validate_cert_env, str(self.validate_cert_default_value)) not in ['no', 'false']) + return bool( + os.environ.get( + self.validate_cert_env, str(self.validate_cert_default_value) + ) + not in ["no", "false"] + ) def __init__(self, **kwargs): super(GatewayClient, self).__init__(**kwargs) self._static_args = {} # initialized on first use - env_whitelist_default_value = '' - env_whitelist_env = 'JUPYTER_GATEWAY_ENV_WHITELIST' - env_whitelist = Unicode(default_value=env_whitelist_default_value, config=True, + env_whitelist_default_value = "" + env_whitelist_env = "JUPYTER_GATEWAY_ENV_WHITELIST" + env_whitelist = Unicode( + default_value=env_whitelist_default_value, + config=True, help="""A comma-separated list of environment variable names that will be included, along with their values, in the kernel startup request. The corresponding `env_whitelist` configuration value must also be set on the Gateway server - since that configuration value indicates which - environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var)""") + environmental values to make available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var)""", + ) - @default('env_whitelist') + @default("env_whitelist") def _env_whitelist_default(self): return os.environ.get(self.env_whitelist_env, self.env_whitelist_default_value) @@ -225,10 +296,10 @@ def gateway_enabled(self): return bool(self.url is not None and len(self.url) > 0) # Ensure KERNEL_LAUNCH_TIMEOUT has a default value. - KERNEL_LAUNCH_TIMEOUT = int(os.environ.get('KERNEL_LAUNCH_TIMEOUT', 40)) - os.environ['KERNEL_LAUNCH_TIMEOUT'] = str(KERNEL_LAUNCH_TIMEOUT) + KERNEL_LAUNCH_TIMEOUT = int(os.environ.get("KERNEL_LAUNCH_TIMEOUT", 40)) + os.environ["KERNEL_LAUNCH_TIMEOUT"] = str(KERNEL_LAUNCH_TIMEOUT) - LAUNCH_TIMEOUT_PAD = int(os.environ.get('LAUNCH_TIMEOUT_PAD', 2)) + LAUNCH_TIMEOUT_PAD = int(os.environ.get("LAUNCH_TIMEOUT_PAD", 2)) def init_static_args(self): """Initialize arguments used on every request. Since these are static values, we'll @@ -236,26 +307,30 @@ def init_static_args(self): """ # Ensure that request timeout is at least "pad" greater than launch timeout. - if self.request_timeout < float(GatewayClient.KERNEL_LAUNCH_TIMEOUT + GatewayClient.LAUNCH_TIMEOUT_PAD): - self.request_timeout = float(GatewayClient.KERNEL_LAUNCH_TIMEOUT + GatewayClient.LAUNCH_TIMEOUT_PAD) - - self._static_args['headers'] = json.loads(self.headers) - if 'Authorization' not in self._static_args['headers'].keys(): - self._static_args['headers'].update({ - 'Authorization': 'token {}'.format(self.auth_token) - }) - self._static_args['connect_timeout'] = self.connect_timeout - self._static_args['request_timeout'] = self.request_timeout - self._static_args['validate_cert'] = self.validate_cert + if self.request_timeout < float( + GatewayClient.KERNEL_LAUNCH_TIMEOUT + GatewayClient.LAUNCH_TIMEOUT_PAD + ): + self.request_timeout = float( + GatewayClient.KERNEL_LAUNCH_TIMEOUT + GatewayClient.LAUNCH_TIMEOUT_PAD + ) + + self._static_args["headers"] = json.loads(self.headers) + if "Authorization" not in self._static_args["headers"].keys(): + self._static_args["headers"].update( + {"Authorization": "token {}".format(self.auth_token)} + ) + self._static_args["connect_timeout"] = self.connect_timeout + self._static_args["request_timeout"] = self.request_timeout + self._static_args["validate_cert"] = self.validate_cert if self.client_cert: - self._static_args['client_cert'] = self.client_cert - self._static_args['client_key'] = self.client_key + self._static_args["client_cert"] = self.client_cert + self._static_args["client_key"] = self.client_key if self.ca_certs: - self._static_args['ca_certs'] = self.ca_certs + self._static_args["ca_certs"] = self.ca_certs if self.http_user: - self._static_args['auth_username'] = self.http_user + self._static_args["auth_username"] = self.http_user if self.http_pwd: - self._static_args['auth_password'] = self.http_pwd + self._static_args["auth_password"] = self.http_pwd def load_connection_args(self, **kwargs): """Merges the static args relative to the connection, with the given keyword arguments. If statics @@ -281,17 +356,30 @@ def gateway_request(endpoint, **kwargs): # NOTE: We do this here since this handler is called during the Notebook's startup and subsequent refreshes # of the tree view. except ConnectionRefusedError: - raise web.HTTPError(503, "Connection refused from Gateway server url '{}'. " - "Check to be sure the Gateway instance is running.".format(GatewayClient.instance().url)) + raise web.HTTPError( + 503, + "Connection refused from Gateway server url '{}'. " + "Check to be sure the Gateway instance is running.".format( + GatewayClient.instance().url + ), + ) except HTTPError as e: # This can occur if the host is valid (e.g., foo.com) but there's nothing there. - raise web.HTTPError(e.code, "Error attempting to connect to Gateway server url '{}'. " - "Ensure gateway url is valid and the Gateway instance is running.". - format(GatewayClient.instance().url)) + raise web.HTTPError( + e.code, + "Error attempting to connect to Gateway server url '{}'. " + "Ensure gateway url is valid and the Gateway instance is running.".format( + GatewayClient.instance().url + ), + ) except gaierror: - raise web.HTTPError(404, "The Gateway server specified in the gateway_url '{}' doesn't appear to be valid. " - "Ensure gateway url is valid and the Gateway instance is running.". - format(GatewayClient.instance().url)) + raise web.HTTPError( + 404, + "The Gateway server specified in the gateway_url '{}' doesn't appear to be valid. " + "Ensure gateway url is valid and the Gateway instance is running.".format( + GatewayClient.instance().url + ), + ) raise gen.Return(response) @@ -304,7 +392,9 @@ class GatewayKernelManager(MappingKernelManager): def __init__(self, **kwargs): super(GatewayKernelManager, self).__init__(**kwargs) - self.base_endpoint = url_path_join(GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint) + self.base_endpoint = url_path_join( + GatewayClient.instance().url, GatewayClient.instance().kernels_endpoint + ) def __contains__(self, kernel_id): return kernel_id in self._kernels @@ -346,32 +436,39 @@ def start_kernel(self, kernel_id=None, path=None, **kwargs): if kernel_id is None: if path is not None: - kwargs['cwd'] = self.cwd_for_path(path) - kernel_name = kwargs.get('kernel_name', 'python3') + kwargs["cwd"] = self.cwd_for_path(path) + kernel_name = kwargs.get("kernel_name", "python3") kernel_url = self._get_kernel_endpoint_url() self.log.debug("Request new kernel at: %s" % kernel_url) # Let KERNEL_USERNAME take precedent over http_user config option. - if os.environ.get('KERNEL_USERNAME') is None and GatewayClient.instance().http_user: - os.environ['KERNEL_USERNAME'] = GatewayClient.instance().http_user - - kernel_env = {k: v for (k, v) in dict(os.environ).items() if k.startswith('KERNEL_') - or k in GatewayClient.instance().env_whitelist.split(",")} + if ( + os.environ.get("KERNEL_USERNAME") is None + and GatewayClient.instance().http_user + ): + os.environ["KERNEL_USERNAME"] = GatewayClient.instance().http_user + + kernel_env = { + k: v + for (k, v) in dict(os.environ).items() + if k.startswith("KERNEL_") + or k in GatewayClient.instance().env_whitelist.split(",") + } # Convey the full path to where this notebook file is located. - if path is not None and kernel_env.get('KERNEL_WORKING_DIR') is None: - kernel_env['KERNEL_WORKING_DIR'] = kwargs['cwd'] + if path is not None and kernel_env.get("KERNEL_WORKING_DIR") is None: + kernel_env["KERNEL_WORKING_DIR"] = kwargs["cwd"] - json_body = json_encode({'name': kernel_name, 'env': kernel_env}) + json_body = json_encode({"name": kernel_name, "env": kernel_env}) - response = yield gateway_request(kernel_url, method='POST', body=json_body) + response = yield gateway_request(kernel_url, method="POST", body=json_body) kernel = json_decode(response.body) - kernel_id = kernel['id'] + kernel_id = kernel["id"] self.log.info("Kernel started: %s" % kernel_id) self.log.debug("Kernel args: %r" % kwargs) else: kernel = yield self.get_kernel(kernel_id) - kernel_id = kernel['id'] + kernel_id = kernel["id"] self.log.info("Using existing kernel: %s" % kernel_id) self._kernels[kernel_id] = kernel @@ -389,7 +486,7 @@ def get_kernel(self, kernel_id=None, **kwargs): kernel_url = self._get_kernel_endpoint_url(kernel_id) self.log.debug("Request kernel at: %s" % kernel_url) try: - response = yield gateway_request(kernel_url, method='GET') + response = yield gateway_request(kernel_url, method="GET") except web.HTTPError as error: if error.status_code == 404: self.log.warn("Kernel not found at: %s" % kernel_url) @@ -422,9 +519,9 @@ def list_kernels(self, **kwargs): """Get a list of kernels.""" kernel_url = self._get_kernel_endpoint_url() self.log.debug("Request list kernels: %s", kernel_url) - response = yield gateway_request(kernel_url, method='GET') + response = yield gateway_request(kernel_url, method="GET") kernels = json_decode(response.body) - self._kernels = {x['id']: x for x in kernels} + self._kernels = {x["id"]: x for x in kernels} raise gen.Return(kernels) @gen.coroutine @@ -442,8 +539,10 @@ def shutdown_kernel(self, kernel_id, now=False, restart=False): """ kernel_url = self._get_kernel_endpoint_url(kernel_id) self.log.debug("Request shutdown kernel at: %s", kernel_url) - response = yield gateway_request(kernel_url, method='DELETE') - self.log.debug("Shutdown kernel response: %d %s", response.code, response.reason) + response = yield gateway_request(kernel_url, method="DELETE") + self.log.debug( + "Shutdown kernel response: %d %s", response.code, response.reason + ) self.remove_kernel(kernel_id) @gen.coroutine @@ -455,9 +554,11 @@ def restart_kernel(self, kernel_id, now=False, **kwargs): kernel_id : uuid The id of the kernel to restart. """ - kernel_url = self._get_kernel_endpoint_url(kernel_id) + '/restart' + kernel_url = self._get_kernel_endpoint_url(kernel_id) + "/restart" self.log.debug("Request restart kernel at: %s", kernel_url) - response = yield gateway_request(kernel_url, method='POST', body=json_encode({})) + response = yield gateway_request( + kernel_url, method="POST", body=json_encode({}) + ) self.log.debug("Restart kernel response: %d %s", response.code, response.reason) @gen.coroutine @@ -469,16 +570,20 @@ def interrupt_kernel(self, kernel_id, **kwargs): kernel_id : uuid The id of the kernel to interrupt. """ - kernel_url = self._get_kernel_endpoint_url(kernel_id) + '/interrupt' + kernel_url = self._get_kernel_endpoint_url(kernel_id) + "/interrupt" self.log.debug("Request interrupt kernel at: %s", kernel_url) - response = yield gateway_request(kernel_url, method='POST', body=json_encode({})) - self.log.debug("Interrupt kernel response: %d %s", response.code, response.reason) + response = yield gateway_request( + kernel_url, method="POST", body=json_encode({}) + ) + self.log.debug( + "Interrupt kernel response: %d %s", response.code, response.reason + ) def shutdown_all(self, now=False): """Shutdown all kernels.""" # Note: We have to make this sync because the NotebookApp does not wait for async. shutdown_kernels = [] - kwargs = {'method': 'DELETE'} + kwargs = {"method": "DELETE"} kwargs = GatewayClient.instance().load_connection_args(**kwargs) client = HTTPClient() for kernel_id in self._kernels.keys(): @@ -489,21 +594,26 @@ def shutdown_all(self, now=False): except HTTPError: pass else: - self.log.debug("Delete kernel response: %d %s", response.code, response.reason) - shutdown_kernels.append(kernel_id) # avoid changing dict size during iteration + self.log.debug( + "Delete kernel response: %d %s", response.code, response.reason + ) + # avoid changing dict size during iteration + shutdown_kernels.append(kernel_id) client.close() for kernel_id in shutdown_kernels: self.remove_kernel(kernel_id) class GatewayKernelSpecManager(KernelSpecManager): - def __init__(self, **kwargs): super(GatewayKernelSpecManager, self).__init__(**kwargs) - self.base_endpoint = url_path_join(GatewayClient.instance().url, - GatewayClient.instance().kernelspecs_endpoint) - self.base_resource_endpoint = url_path_join(GatewayClient.instance().url, - GatewayClient.instance().kernelspecs_resource_endpoint) + self.base_endpoint = url_path_join( + GatewayClient.instance().url, GatewayClient.instance().kernelspecs_endpoint + ) + self.base_resource_endpoint = url_path_join( + GatewayClient.instance().url, + GatewayClient.instance().kernelspecs_resource_endpoint, + ) def _get_kernelspecs_endpoint_url(self, kernel_name=None): """Builds a url for the kernels endpoint @@ -526,15 +636,18 @@ def get_all_specs(self): # caller of this method will still return this server's value until # the next fetch of kernelspecs - at which time they'll match. km = self.parent.kernel_manager - remote_default_kernel_name = fetched_kspecs.get('default') + remote_default_kernel_name = fetched_kspecs.get("default") if remote_default_kernel_name != km.default_kernel_name: - self.log.info("Default kernel name on Gateway server ({gateway_default}) differs from " - "Notebook server ({notebook_default}). Updating to Gateway server's value.". - format(gateway_default=remote_default_kernel_name, - notebook_default=km.default_kernel_name)) + self.log.info( + "Default kernel name on Gateway server ({gateway_default}) differs from " + "Notebook server ({notebook_default}). Updating to Gateway server's value.".format( + gateway_default=remote_default_kernel_name, + notebook_default=km.default_kernel_name, + ) + ) km.default_kernel_name = remote_default_kernel_name - remote_kspecs = fetched_kspecs.get('kernelspecs') + remote_kspecs = fetched_kspecs.get("kernelspecs") raise gen.Return(remote_kspecs) @gen.coroutine @@ -542,7 +655,7 @@ def list_kernel_specs(self): """Get a list of kernel specs.""" kernel_spec_url = self._get_kernelspecs_endpoint_url() self.log.debug("Request list kernel specs at: %s", kernel_spec_url) - response = yield gateway_request(kernel_spec_url, method='GET') + response = yield gateway_request(kernel_spec_url, method="GET") kernel_specs = json_decode(response.body) raise gen.Return(kernel_specs) @@ -555,16 +668,22 @@ def get_kernel_spec(self, kernel_name, **kwargs): kernel_name : str The name of the kernel. """ - kernel_spec_url = self._get_kernelspecs_endpoint_url(kernel_name=str(kernel_name)) + kernel_spec_url = self._get_kernelspecs_endpoint_url( + kernel_name=str(kernel_name) + ) self.log.debug("Request kernel spec at: %s" % kernel_spec_url) try: - response = yield gateway_request(kernel_spec_url, method='GET') + response = yield gateway_request(kernel_spec_url, method="GET") except web.HTTPError as error: if error.status_code == 404: # Convert not found to KeyError since that's what the Notebook handler expects # message is not used, but might as well make it useful for troubleshooting - raise KeyError('kernelspec {kernel_name} not found on Gateway server at: {gateway_url}'. - format(kernel_name=kernel_name, gateway_url=GatewayClient.instance().url)) + raise KeyError( + "kernelspec {kernel_name} not found on Gateway server at: {gateway_url}".format( + kernel_name=kernel_name, + gateway_url=GatewayClient.instance().url, + ) + ) else: raise else: @@ -583,10 +702,16 @@ def get_kernel_spec_resource(self, kernel_name, path): path : str The name of the desired resource """ - kernel_spec_resource_url = url_path_join(self.base_resource_endpoint, str(kernel_name), str(path)) - self.log.debug("Request kernel spec resource '{}' at: {}".format(path, kernel_spec_resource_url)) + kernel_spec_resource_url = url_path_join( + self.base_resource_endpoint, str(kernel_name), str(path) + ) + self.log.debug( + "Request kernel spec resource '{}' at: {}".format( + path, kernel_spec_resource_url + ) + ) try: - response = yield gateway_request(kernel_spec_resource_url, method='GET') + response = yield gateway_request(kernel_spec_resource_url, method="GET") except web.HTTPError as error: if error.status_code == 404: kernel_spec_resource = None @@ -598,7 +723,7 @@ def get_kernel_spec_resource(self, kernel_name, path): class GatewaySessionManager(SessionManager): - kernel_manager = Instance('jupyter_server.gateway.managers.GatewayKernelManager') + kernel_manager = Instance("jupyter_server.gateway.managers.GatewayKernelManager") @gen.coroutine def kernel_culled(self, kernel_id): diff --git a/jupyter_server/i18n/__init__.py b/jupyter_server/i18n/__init__.py index 63fde70f06..7b96d03e78 100644 --- a/jupyter_server/i18n/__init__.py +++ b/jupyter_server/i18n/__init__.py @@ -15,14 +15,18 @@ # ... # } # }} -TRANSLATIONS_CACHE = {'nbjs': {}} +TRANSLATIONS_CACHE = {"nbjs": {}} -_accept_lang_re = re.compile(r''' +_accept_lang_re = re.compile( + r""" (?P[a-zA-Z]{1,8}(-[a-zA-Z]{1,8})?) (\s*;\s*q\s*=\s* (?P[01](.\d+)?) -)?''', re.VERBOSE) +)?""", + re.VERBOSE, +) + def parse_accept_lang_header(accept_lang): """Parses the 'Accept-Language' HTTP header. @@ -31,15 +35,15 @@ def parse_accept_lang_header(accept_lang): (with the most preferred language last). """ by_q = defaultdict(list) - for part in accept_lang.split(','): + for part in accept_lang.split(","): m = _accept_lang_re.match(part.strip()) if not m: continue - lang, qvalue = m.group('lang', 'qvalue') + lang, qvalue = m.group("lang", "qvalue") # Browser header format is zh-CN, gettext uses zh_CN - lang = lang.replace('-', '_') + lang = lang.replace("-", "_") if qvalue is None: - qvalue = 1. + qvalue = 1.0 else: qvalue = float(qvalue) if qvalue == 0: @@ -51,11 +55,13 @@ def parse_accept_lang_header(accept_lang): res.extend(sorted(langs)) return res -def load(language, domain='nbjs'): + +def load(language, domain="nbjs"): """Load translations from an nbjs.json file""" try: - f = io.open(pjoin(I18N_DIR, language, 'LC_MESSAGES', 'nbjs.json'), - encoding='utf-8') + f = io.open( + pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8" + ) except IOError as e: if e.errno != errno.ENOENT: raise @@ -65,7 +71,8 @@ def load(language, domain='nbjs'): data = json.load(f) return data["locale_data"][domain] -def cached_load(language, domain='nbjs'): + +def cached_load(language, domain="nbjs"): """Load translations for one language, using in-memory cache if available""" domain_cache = TRANSLATIONS_CACHE[domain] try: @@ -75,7 +82,8 @@ def cached_load(language, domain='nbjs'): domain_cache[language] = data return data -def combine_translations(accept_language, domain='nbjs'): + +def combine_translations(accept_language, domain="nbjs"): """Combine translations for multiple accepted languages. Returns data re-packaged in jed1.x format. @@ -83,17 +91,12 @@ def combine_translations(accept_language, domain='nbjs'): lang_codes = parse_accept_lang_header(accept_language) combined = {} for language in lang_codes: - if language == 'en': + if language == "en": # en is default, all translations are in frontend. combined.clear() else: combined.update(cached_load(language, domain)) - combined[''] = {"domain":"nbjs"} + combined[""] = {"domain": "nbjs"} - return { - "domain": domain, - "locale_data": { - domain: combined - } - } + return {"domain": domain, "locale_data": {domain: combined}} diff --git a/jupyter_server/kernelspecs/handlers.py b/jupyter_server/kernelspecs/handlers.py index 228694b8a5..853ccbb536 100644 --- a/jupyter_server/kernelspecs/handlers.py +++ b/jupyter_server/kernelspecs/handlers.py @@ -4,10 +4,10 @@ class KernelSpecResourceHandler(web.StaticFileHandler, JupyterHandler): - SUPPORTED_METHODS = ('GET', 'HEAD') + SUPPORTED_METHODS = ("GET", "HEAD") def initialize(self): - web.StaticFileHandler.initialize(self, path='') + web.StaticFileHandler.initialize(self, path="") @web.authenticated def get(self, kernel_name, path, include_body=True): @@ -15,7 +15,7 @@ def get(self, kernel_name, path, include_body=True): try: self.root = ksm.get_kernel_spec(kernel_name).resource_dir except KeyError: - raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name) + raise web.HTTPError(404, u"Kernel spec %s not found" % kernel_name) self.log.debug("Serving kernel resource from: %s", self.root) return web.StaticFileHandler.get(self, path, include_body=include_body) @@ -23,7 +23,7 @@ def get(self, kernel_name, path, include_body=True): def head(self, kernel_name, path): return self.get(kernel_name, path, include_body=False) + default_handlers = [ (r"/kernelspecs/%s/(?P.*)" % kernel_name_regex, KernelSpecResourceHandler), ] - diff --git a/jupyter_server/log.py b/jupyter_server/log.py index 3621a70cae..e3ec81ff45 100644 --- a/jupyter_server/log.py +++ b/jupyter_server/log.py @@ -1,9 +1,9 @@ -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Copyright (c) Jupyter Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- import json from tornado.log import access_log @@ -12,7 +12,7 @@ def log_request(handler): """log a bit more information about each request than tornado's default - + - move static file get success to debug-level (reduces noise) - get proxied IP instead of proxy IP - log referer for redirect and failed requests @@ -29,7 +29,7 @@ def log_request(handler): log_method = access_log.warning else: log_method = access_log.error - + request_time = 1000.0 * handler.request.request_time() ns = dict( status=status, @@ -41,8 +41,8 @@ def log_request(handler): msg = "{status} {method} {uri} ({ip}) {request_time:.2f}ms" if status >= 400: # log bad referers - ns['referer'] = request.headers.get('Referer', 'None') - msg = msg + ' referer={referer}' + ns["referer"] = request.headers.get("Referer", "None") + msg = msg + " referer={referer}" if status >= 500 and status != 502: # log all headers if it caused an error log_method(json.dumps(dict(request.headers), indent=2)) diff --git a/jupyter_server/nbconvert/handlers.py b/jupyter_server/nbconvert/handlers.py index fc8d9c961b..9867e430d3 100644 --- a/jupyter_server/nbconvert/handlers.py +++ b/jupyter_server/nbconvert/handlers.py @@ -11,7 +11,8 @@ from tornado.log import app_log from ..base.handlers import ( - JupyterHandler, FilesRedirectHandler, + JupyterHandler, + FilesRedirectHandler, path_regex, ) from nbformat import from_dict @@ -19,12 +20,14 @@ from ipython_genutils.py3compat import cast_bytes from ipython_genutils import text + def find_resource_files(output_files_dir): files = [] for dirpath, dirnames, filenames in os.walk(output_files_dir): files.extend([os.path.join(dirpath, f) for f in filenames]) return files + def respond_zip(handler, name, output, resources): """Zip up the output and resource files and respond with the zip file. @@ -32,21 +35,23 @@ def respond_zip(handler, name, output, resources): files, in which case we serve the plain output file. """ # Check if we have resource files we need to zip - output_files = resources.get('outputs', None) + output_files = resources.get("outputs", None) if not output_files: return False # Headers - zip_filename = os.path.splitext(name)[0] + '.zip' + zip_filename = os.path.splitext(name)[0] + ".zip" handler.set_attachment_header(zip_filename) - handler.set_header('Content-Type', 'application/zip') - handler.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') + handler.set_header("Content-Type", "application/zip") + handler.set_header( + "Cache-Control", "no-store, no-cache, must-revalidate, max-age=0" + ) # Prepare the zip file buffer = io.BytesIO() - zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) - output_filename = os.path.splitext(name)[0] + resources['output_extension'] - zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) + zipf = zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) + output_filename = os.path.splitext(name)[0] + resources["output_extension"] + zipf.writestr(output_filename, cast_bytes(output, "utf-8")) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() @@ -54,6 +59,7 @@ def respond_zip(handler, name, output, resources): handler.finish(buffer.getvalue()) return True + def get_exporter(format, **kwargs): """get an exporter, raising appropriate errors""" # if this fails, will raise 500 @@ -77,52 +83,46 @@ def get_exporter(format, **kwargs): class NbconvertFileHandler(JupyterHandler): - SUPPORTED_METHODS = ('GET',) + SUPPORTED_METHODS = ("GET",) @web.authenticated def get(self, format, path): exporter = get_exporter(format, config=self.config, log=self.log) - path = path.strip('/') + path = path.strip("/") # If the notebook relates to a real file (default contents manager), # give its path to nbconvert. - if hasattr(self.contents_manager, '_get_os_path'): + if hasattr(self.contents_manager, "_get_os_path"): os_path = self.contents_manager._get_os_path(path) ext_resources_dir, basename = os.path.split(os_path) else: ext_resources_dir = None model = self.contents_manager.get(path=path) - name = model['name'] - if model['type'] != 'notebook': + name = model["name"] + if model["type"] != "notebook": # not a notebook, redirect to files return FilesRedirectHandler.redirect_to_files(self, path) - nb = model['content'] + nb = model["content"] - self.set_header('Last-Modified', model['last_modified']) + self.set_header("Last-Modified", model["last_modified"]) # create resources dictionary - mod_date = model['last_modified'].strftime(text.date_format) + mod_date = model["last_modified"].strftime(text.date_format) nb_title = os.path.splitext(name)[0] resource_dict = { - "metadata": { - "name": nb_title, - "modified_date": mod_date - }, - "config_dir": self.application.settings['config_dir'] + "metadata": {"name": nb_title, "modified_date": mod_date}, + "config_dir": self.application.settings["config_dir"], } if ext_resources_dir: - resource_dict['metadata']['path'] = ext_resources_dir + resource_dict["metadata"]["path"] = ext_resources_dir try: - output, resources = exporter.from_notebook_node( - nb, - resources=resource_dict - ) + output, resources = exporter.from_notebook_node(nb, resources=resource_dict) except Exception as e: self.log.exception("nbconvert failed: %s", e) raise web.HTTPError(500, "nbconvert failed: %s" % e) @@ -131,36 +131,42 @@ def get(self, format, path): return # Force download if requested - if self.get_argument('download', 'false').lower() == 'true': - filename = os.path.splitext(name)[0] + resources['output_extension'] + if self.get_argument("download", "false").lower() == "true": + filename = os.path.splitext(name)[0] + resources["output_extension"] self.set_attachment_header(filename) # MIME type if exporter.output_mimetype: - self.set_header('Content-Type', - '%s; charset=utf-8' % exporter.output_mimetype) + self.set_header( + "Content-Type", "%s; charset=utf-8" % exporter.output_mimetype + ) - self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0') + self.set_header( + "Cache-Control", "no-store, no-cache, must-revalidate, max-age=0" + ) self.finish(output) class NbconvertPostHandler(JupyterHandler): - SUPPORTED_METHODS = ('POST',) + SUPPORTED_METHODS = ("POST",) @web.authenticated def post(self, format): exporter = get_exporter(format, config=self.config) model = self.get_json_body() - name = model.get('name', 'notebook.ipynb') - nbnode = from_dict(model['content']) + name = model.get("name", "notebook.ipynb") + nbnode = from_dict(model["content"]) try: - output, resources = exporter.from_notebook_node(nbnode, resources={ - "metadata": {"name": name[:name.rfind('.')],}, - "config_dir": self.application.settings['config_dir'], - }) + output, resources = exporter.from_notebook_node( + nbnode, + resources={ + "metadata": {"name": name[: name.rfind(".")],}, + "config_dir": self.application.settings["config_dir"], + }, + ) except Exception as e: raise web.HTTPError(500, "nbconvert failed: %s" % e) @@ -169,21 +175,21 @@ def post(self, format): # MIME type if exporter.output_mimetype: - self.set_header('Content-Type', - '%s; charset=utf-8' % exporter.output_mimetype) + self.set_header( + "Content-Type", "%s; charset=utf-8" % exporter.output_mimetype + ) self.finish(output) -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _format_regex = r"(?P\w+)" default_handlers = [ (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), - (r"/nbconvert/%s%s" % (_format_regex, path_regex), - NbconvertFileHandler), + (r"/nbconvert/%s%s" % (_format_regex, path_regex), NbconvertFileHandler), ] diff --git a/jupyter_server/prometheus/log_functions.py b/jupyter_server/prometheus/log_functions.py index 6f13fc6eb8..1f36ade3e3 100644 --- a/jupyter_server/prometheus/log_functions.py +++ b/jupyter_server/prometheus/log_functions.py @@ -19,6 +19,6 @@ def prometheus_log_method(handler): """ HTTP_REQUEST_DURATION_SECONDS.labels( method=handler.request.method, - handler='{}.{}'.format(handler.__class__.__module__, type(handler).__name__), - status_code=handler.get_status() + handler="{}.{}".format(handler.__class__.__module__, type(handler).__name__), + status_code=handler.get_status(), ).observe(handler.request.request_time()) diff --git a/jupyter_server/prometheus/metrics.py b/jupyter_server/prometheus/metrics.py index abc9d0e16b..2180dd6b67 100644 --- a/jupyter_server/prometheus/metrics.py +++ b/jupyter_server/prometheus/metrics.py @@ -10,18 +10,17 @@ HTTP_REQUEST_DURATION_SECONDS = Histogram( - 'http_request_duration_seconds', - 'duration in seconds for all HTTP requests', - ['method', 'handler', 'status_code'], + "http_request_duration_seconds", + "duration in seconds for all HTTP requests", + ["method", "handler", "status_code"], ) TERMINAL_CURRENTLY_RUNNING_TOTAL = Gauge( - 'terminal_currently_running_total', - 'counter for how many terminals are running', + "terminal_currently_running_total", "counter for how many terminals are running", ) KERNEL_CURRENTLY_RUNNING_TOTAL = Gauge( - 'kernel_currently_running_total', - 'counter for how many kernels are running labeled by type', - ['type'] + "kernel_currently_running_total", + "counter for how many kernels are running labeled by type", + ["type"], ) diff --git a/jupyter_server/serverapp.py b/jupyter_server/serverapp.py index 9ff6d280ca..896a64d5fb 100755 --- a/jupyter_server/serverapp.py +++ b/jupyter_server/serverapp.py @@ -6,6 +6,70 @@ from __future__ import absolute_import, print_function +from jupyter_server.extension.serverextension import ServerExtensionApp +from .utils import url_path_join, check_pid, url_escape, urljoin, pathname2url +from ._tz import utcnow, utcfromtimestamp +from jupyter_server._sysinfo import get_sys_info +from jupyter_core.paths import jupyter_runtime_dir, jupyter_path +from ipython_genutils import py3compat +from traitlets import ( + Any, + Dict, + Unicode, + Integer, + List, + Bool, + Bytes, + Instance, + TraitError, + Type, + Float, + observe, + default, + validate, +) +from nbformat.sign import NotebookNotary +from jupyter_client.session import Session +from jupyter_client.kernelspec import ( + KernelSpecManager, + NoSuchKernel, + NATIVE_KERNEL_NAME, +) +from jupyter_client import KernelManager +from jupyter_core.paths import jupyter_config_path +from jupyter_core.application import ( + JupyterApp, + base_flags, + base_aliases, +) +from traitlets.config.application import catch_config_error, boolean_flag +from traitlets.config import Config +from .base.handlers import FileFindHandler +from .auth.logout import LogoutHandler +from .auth.login import LoginHandler +from .gateway.managers import ( + GatewayKernelManager, + GatewayKernelSpecManager, + GatewaySessionManager, + GatewayClient, +) +from .services.sessions.sessionmanager import SessionManager +from .services.contents.largefilemanager import LargeFileManager +from .services.contents.filemanager import FileContentsManager +from .services.contents.manager import ContentsManager +from .services.config import ConfigManager +from .services.kernels.kernelmanager import MappingKernelManager +from .log import log_request +from .base.handlers import MainHandler, RedirectWithParams, Template404 +from jupyter_server import ( + DEFAULT_STATIC_FILES_PATH, + DEFAULT_TEMPLATE_PATH_LIST, + __version__, +) +from tornado.log import LogFormatter, app_log, access_log, gen_log +from tornado.httputil import url_concat +from tornado import web +from tornado import httpserver import jupyter_server import binascii import datetime @@ -42,6 +106,7 @@ # Install the pyzmq ioloop. This has to be done before anything else from # tornado is imported. from zmq.eventloop import ioloop + ioloop.install() # check for tornado 3.1.0 @@ -52,61 +117,19 @@ try: version_info = tornado.version_info except AttributeError: - raise ImportError(_("The Jupyter Server requires tornado >= 4.0, but you have < 1.1.0")) -if version_info < (4,0): - raise ImportError(_("The Jupyter Server requires tornado >= 4.0, but you have %s") % tornado.version) - -from tornado import httpserver -from tornado import web -from tornado.httputil import url_concat -from tornado.log import LogFormatter, app_log, access_log, gen_log - -from jupyter_server import ( - DEFAULT_STATIC_FILES_PATH, - DEFAULT_TEMPLATE_PATH_LIST, - __version__, -) - -from .base.handlers import MainHandler, RedirectWithParams, Template404 -from .log import log_request -from .services.kernels.kernelmanager import MappingKernelManager -from .services.config import ConfigManager -from .services.contents.manager import ContentsManager -from .services.contents.filemanager import FileContentsManager -from .services.contents.largefilemanager import LargeFileManager -from .services.sessions.sessionmanager import SessionManager -from .gateway.managers import GatewayKernelManager, GatewayKernelSpecManager, GatewaySessionManager, GatewayClient - -from .auth.login import LoginHandler -from .auth.logout import LogoutHandler -from .base.handlers import FileFindHandler - -from traitlets.config import Config -from traitlets.config.application import catch_config_error, boolean_flag -from jupyter_core.application import ( - JupyterApp, base_flags, base_aliases, -) -from jupyter_core.paths import jupyter_config_path -from jupyter_client import KernelManager -from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel, NATIVE_KERNEL_NAME -from jupyter_client.session import Session -from nbformat.sign import NotebookNotary -from traitlets import ( - Any, Dict, Unicode, Integer, List, Bool, Bytes, Instance, - TraitError, Type, Float, observe, default, validate -) -from ipython_genutils import py3compat -from jupyter_core.paths import jupyter_runtime_dir, jupyter_path -from jupyter_server._sysinfo import get_sys_info - -from ._tz import utcnow, utcfromtimestamp -from .utils import url_path_join, check_pid, url_escape, urljoin, pathname2url + raise ImportError( + _("The Jupyter Server requires tornado >= 4.0, but you have < 1.1.0") + ) +if version_info < (4, 0): + raise ImportError( + _("The Jupyter Server requires tornado >= 4.0, but you have %s") + % tornado.version + ) -from jupyter_server.extension.serverextension import ServerExtensionApp -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Module globals -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _examples = """ jupyter server # start the server @@ -116,27 +139,30 @@ JUPYTER_SERVICE_HANDLERS = dict( auth=None, - api=['jupyter_server.services.api.handlers'], - config=['jupyter_server.services.config.handlers'], - contents=['jupyter_server.services.contents.handlers'], - edit=['jupyter_server.edit.handlers'], - files=['jupyter_server.files.handlers'], - kernels=['jupyter_server.services.kernels.handlers'], + api=["jupyter_server.services.api.handlers"], + config=["jupyter_server.services.config.handlers"], + contents=["jupyter_server.services.contents.handlers"], + edit=["jupyter_server.edit.handlers"], + files=["jupyter_server.files.handlers"], + kernels=["jupyter_server.services.kernels.handlers"], kernelspecs=[ - 'jupyter_server.kernelspecs.handlers', - 'jupyter_server.services.kernelspecs.handlers'], + "jupyter_server.kernelspecs.handlers", + "jupyter_server.services.kernelspecs.handlers", + ], nbconvert=[ - 'jupyter_server.nbconvert.handlers', - 'jupyter_server.services.nbconvert.handlers'], - security=['jupyter_server.services.security.handlers'], - sessions=['jupyter_server.services.sessions.handlers'], - shutdown=['jupyter_server.services.shutdown'], - view=['jupyter_server.view.handlers'] + "jupyter_server.nbconvert.handlers", + "jupyter_server.services.nbconvert.handlers", + ], + security=["jupyter_server.services.security.handlers"], + sessions=["jupyter_server.services.sessions.handlers"], + shutdown=["jupyter_server.services.shutdown"], + view=["jupyter_server.view.handlers"], ) -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Helper functions -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + def random_ports(port, n): """Generate a list of n random ports near the given port. @@ -146,43 +172,75 @@ def random_ports(port, n): """ for i in range(min(5, n)): yield port + i - for i in range(n-5): - yield max(1, port + random.randint(-2*n, 2*n)) + for i in range(n - 5): + yield max(1, port + random.randint(-2 * n, 2 * n)) + def load_handlers(name): """Load the (URL pattern, handler) tuples for each component.""" - mod = __import__(name, fromlist=['default_handlers']) + mod = __import__(name, fromlist=["default_handlers"]) return mod.default_handlers -#----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- # The Tornado web application -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- -class ServerWebApplication(web.Application): - def __init__(self, jupyter_app, default_services, kernel_manager, contents_manager, - session_manager, kernel_spec_manager, - config_manager, extra_services, log, - base_url, default_url, settings_overrides, jinja_env_options): +class ServerWebApplication(web.Application): + def __init__( + self, + jupyter_app, + default_services, + kernel_manager, + contents_manager, + session_manager, + kernel_spec_manager, + config_manager, + extra_services, + log, + base_url, + default_url, + settings_overrides, + jinja_env_options, + ): settings = self.init_settings( - jupyter_app, kernel_manager, contents_manager, - session_manager, kernel_spec_manager, config_manager, - extra_services, log, base_url, - default_url, settings_overrides, jinja_env_options) + jupyter_app, + kernel_manager, + contents_manager, + session_manager, + kernel_spec_manager, + config_manager, + extra_services, + log, + base_url, + default_url, + settings_overrides, + jinja_env_options, + ) handlers = self.init_handlers(default_services, settings) super(ServerWebApplication, self).__init__(handlers, **settings) - def init_settings(self, jupyter_app, kernel_manager, contents_manager, - session_manager, kernel_spec_manager, - config_manager, extra_services, - log, base_url, default_url, settings_overrides, - jinja_env_options=None): + def init_settings( + self, + jupyter_app, + kernel_manager, + contents_manager, + session_manager, + kernel_spec_manager, + config_manager, + extra_services, + log, + base_url, + default_url, + settings_overrides, + jinja_env_options=None, + ): _template_path = settings_overrides.get( - "template_path", - jupyter_app.template_file_path, + "template_path", jupyter_app.template_file_path, ) if isinstance(_template_path, py3compat.string_types): _template_path = (_template_path,) @@ -191,20 +249,28 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, jenv_opt = {"autoescape": True} jenv_opt.update(jinja_env_options if jinja_env_options else {}) - env = Environment(loader=FileSystemLoader(template_path), extensions=['jinja2.ext.i18n'], **jenv_opt) + env = Environment( + loader=FileSystemLoader(template_path), + extensions=["jinja2.ext.i18n"], + **jenv_opt + ) sys_info = get_sys_info() # If the user is running the server in a git directory, make the assumption # that this is a dev install and suggest to the developer `npm run build:watch`. - base_dir = os.path.realpath(os.path.join(__file__, '..', '..')) - dev_mode = os.path.exists(os.path.join(base_dir, '.git')) + base_dir = os.path.realpath(os.path.join(__file__, "..", "..")) + dev_mode = os.path.exists(os.path.join(base_dir, ".git")) - nbui = gettext.translation('nbui', localedir=os.path.join(base_dir, 'jupyter_server/i18n'), fallback=True) + nbui = gettext.translation( + "nbui", + localedir=os.path.join(base_dir, "jupyter_server/i18n"), + fallback=True, + ) env.install_gettext_translations(nbui, newstyle=False) - if sys_info['commit_source'] == 'repository': + if sys_info["commit_source"] == "repository": # don't cache (rely on 304) when working from master - version_hash = '' + version_hash = "" else: # reset the cache on server restart version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S") @@ -212,10 +278,12 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, now = utcnow() root_dir = contents_manager.root_dir - home = py3compat.str_to_unicode(os.path.expanduser('~'), encoding=sys.getfilesystemencoding()) + home = py3compat.str_to_unicode( + os.path.expanduser("~"), encoding=sys.getfilesystemencoding() + ) if root_dir.startswith(home + os.path.sep): # collapse $HOME to ~ - root_dir = '~' + root_dir[len(home):] + root_dir = "~" + root_dir[len(home) :] settings = dict( # basics @@ -225,22 +293,20 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, template_path=template_path, static_path=jupyter_app.static_file_path, static_custom_path=jupyter_app.static_custom_path, - static_handler_class = FileFindHandler, - static_url_prefix = url_path_join(base_url, '/static/'), - static_handler_args = { + static_handler_class=FileFindHandler, + static_url_prefix=url_path_join(base_url, "/static/"), + static_handler_args={ # don't cache custom.js - 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')], + "no_cache_paths": [url_path_join(base_url, "static", "custom")], }, version_hash=version_hash, - # rate limits iopub_msg_rate_limit=jupyter_app.iopub_msg_rate_limit, iopub_data_rate_limit=jupyter_app.iopub_data_rate_limit, rate_limit_window=jupyter_app.rate_limit_window, - # authentication cookie_secret=jupyter_app.cookie_secret, - login_url=url_path_join(base_url, '/login'), + login_url=url_path_join(base_url, "/login"), login_handler_class=jupyter_app.login_handler_class, logout_handler_class=jupyter_app.logout_handler_class, password=jupyter_app.password, @@ -248,17 +314,14 @@ def init_settings(self, jupyter_app, kernel_manager, contents_manager, disable_check_xsrf=jupyter_app.disable_check_xsrf, allow_remote_access=jupyter_app.allow_remote_access, local_hostnames=jupyter_app.local_hostnames, - # managers kernel_manager=kernel_manager, contents_manager=contents_manager, session_manager=session_manager, kernel_spec_manager=kernel_spec_manager, config_manager=config_manager, - # handlers extra_services=extra_services, - # Jupyter stuff started=now, # place for extensions to register activity @@ -284,13 +347,13 @@ def init_handlers(self, default_services, settings): # Order matters. The first handler to match the URL will handle the request. handlers = [] # load extra services specified by users before default handlers - for service in settings['extra_services']: + for service in settings["extra_services"]: handlers.extend(load_handlers(service)) # Add auth services. - if 'auth' in default_services: - handlers.extend([(r"/login", settings['login_handler_class'])]) - handlers.extend([(r"/logout", settings['logout_handler_class'])]) + if "auth" in default_services: + handlers.extend([(r"/login", settings["login_handler_class"])]) + handlers.extend([(r"/logout", settings["logout_handler_class"])]) # Load default services. Raise exception if service not # found in JUPYTER_SERVICE_HANLDERS. @@ -301,19 +364,21 @@ def init_handlers(self, default_services, settings): for loc in locations: handlers.extend(load_handlers(loc)) else: - raise Exception("{} is not recognized as a jupyter_server " - "service. If this is a custom service, " - "try adding it to the " - "`extra_services` list.".format(service)) + raise Exception( + "{} is not recognized as a jupyter_server " + "service. If this is a custom service, " + "try adding it to the " + "`extra_services` list.".format(service) + ) # Add extra handlers from contents manager. - handlers.extend(settings['contents_manager'].get_extra_handlers()) + handlers.extend(settings["contents_manager"].get_extra_handlers()) # If gateway mode is enabled, replace appropriate handlers to perform redirection if GatewayClient.instance().gateway_enabled: # for each handler required for gateway, locate its pattern # in the current list and replace that entry... - gateway_handlers = load_handlers('jupyter_server.gateway.handlers') + gateway_handlers = load_handlers("jupyter_server.gateway.handlers") for i, gwh in enumerate(gateway_handlers): for j, h in enumerate(handlers): if gwh[0] == h[0]: @@ -321,34 +386,41 @@ def init_handlers(self, default_services, settings): break handlers.append( - (r"/custom/(.*)", FileFindHandler, { - 'path': settings['static_custom_path'], - 'no_cache_paths': ['/'], # don't cache anything in custom - }) + ( + r"/custom/(.*)", + FileFindHandler, + { + "path": settings["static_custom_path"], + "no_cache_paths": ["/"], # don't cache anything in custom + }, + ) ) # register base handlers last - handlers.extend(load_handlers('jupyter_server.base.handlers')) + handlers.extend(load_handlers("jupyter_server.base.handlers")) - if settings['default_url'] != '/': + if settings["default_url"] != "/": # set the URL that will be redirected from `/` handlers.append( - (r'/?', RedirectWithParams, { - 'url' : settings['default_url'], - 'permanent': False, # want 302, not 301 - }) + ( + r"/?", + RedirectWithParams, + { + "url": settings["default_url"], + "permanent": False, # want 302, not 301 + }, + ) ) else: - handlers.append( - (r"/", MainHandler)) + handlers.append((r"/", MainHandler)) # prepend base_url onto the patterns that we match new_handlers = [] for handler in handlers: - pattern = url_path_join(settings['base_url'], handler[0]) + pattern = url_path_join(settings["base_url"], handler[0]) new_handler = tuple([pattern] + list(handler[1:])) new_handlers.append(new_handler) # add 404 on the end, which will catch everything that falls through - new_handlers.append((r'(.*)', Template404)) + new_handlers.append((r"(.*)", Template404)) return new_handlers def last_activity(self): @@ -358,18 +430,18 @@ def last_activity(self): activity. """ sources = [ - self.settings['started'], - self.settings['kernel_manager'].last_kernel_activity, + self.settings["started"], + self.settings["kernel_manager"].last_kernel_activity, ] try: - sources.append(self.settings['api_last_activity']) + sources.append(self.settings["api_last_activity"]) except KeyError: pass try: - sources.append(self.settings['terminal_last_activity']) + sources.append(self.settings["terminal_last_activity"]) except KeyError: pass - sources.extend(self.settings['last_activity_times'].values()) + sources.extend(self.settings["last_activity_times"].values()) return max(sources) @@ -383,13 +455,15 @@ class JupyterPasswordApp(JupyterApp): description = __doc__ def _config_file_default(self): - return os.path.join(self.config_dir, 'jupyter_server_config.json') + return os.path.join(self.config_dir, "jupyter_server_config.json") def start(self): from .auth.security import set_password + set_password(config_file=self.config_file) self.log.info("Wrote hashed password to %s" % self.config_file) + def shutdown_server(server_info, timeout=5, log=None): """Shutdown a notebook server in a separate process. @@ -403,35 +477,44 @@ def shutdown_server(server_info, timeout=5, log=None): failed (on Windows). """ from tornado.httpclient import HTTPClient, HTTPRequest - url = server_info['url'] - pid = server_info['pid'] - req = HTTPRequest(url + 'api/shutdown', method='POST', body=b'', headers={ - 'Authorization': 'token ' + server_info['token'] - }) - if log: log.debug("POST request to %sapi/shutdown", url) + + url = server_info["url"] + pid = server_info["pid"] + req = HTTPRequest( + url + "api/shutdown", + method="POST", + body=b"", + headers={"Authorization": "token " + server_info["token"]}, + ) + if log: + log.debug("POST request to %sapi/shutdown", url) HTTPClient().fetch(req) # Poll to see if it shut down. - for _ in range(timeout*10): + for _ in range(timeout * 10): if not check_pid(pid): - if log: log.debug("Server PID %s is gone", pid) + if log: + log.debug("Server PID %s is gone", pid) return True time.sleep(0.1) - if sys.platform.startswith('win'): + if sys.platform.startswith("win"): return False - if log: log.debug("SIGTERM to PID %s", pid) + if log: + log.debug("SIGTERM to PID %s", pid) os.kill(pid, signal.SIGTERM) # Poll to see if it shut down. for _ in range(timeout * 10): if not check_pid(pid): - if log: log.debug("Server PID %s is gone", pid) + if log: + log.debug("Server PID %s is gone", pid) return True time.sleep(0.1) - if log: log.debug("SIGKILL to PID %s", pid) + if log: + log.debug("SIGKILL to PID %s", pid) os.kill(pid, signal.SIGKILL) return True # SIGKILL cannot be caught @@ -439,15 +522,16 @@ def shutdown_server(server_info, timeout=5, log=None): class JupyterServerStopApp(JupyterApp): version = __version__ - description="Stop currently running Jupyter server for a given port" + description = "Stop currently running Jupyter server for a given port" - port = Integer(8888, config=True, - help="Port of the server to be killed. Default 8888") + port = Integer( + 8888, config=True, help="Port of the server to be killed. Default 8888" + ) def parse_command_line(self, argv=None): super(JupyterServerStopApp, self).parse_command_line(argv) if self.extra_args: - self.port=int(self.extra_args[0]) + self.port = int(self.extra_args[0]) def shutdown_server(self, server): return shutdown_server(server, log=self.log) @@ -457,38 +541,55 @@ def start(self): if not servers: self.exit("There are no running servers") for server in servers: - if server['port'] == self.port: + if server["port"] == self.port: print("Shutting down server on port", self.port, "...") if not self.shutdown_server(server): sys.exit("Could not stop server") return else: - print("There is currently no server running on port {}".format(self.port), file=sys.stderr) + print( + "There is currently no server running on port {}".format(self.port), + file=sys.stderr, + ) print("Ports currently in use:", file=sys.stderr) for server in servers: - print(" - {}".format(server['port']), file=sys.stderr) + print(" - {}".format(server["port"]), file=sys.stderr) self.exit(1) class JupyterServerListApp(JupyterApp): version = __version__ - description=_("List currently running notebook servers.") + description = _("List currently running notebook servers.") flags = dict( - jsonlist=({'JupyterServerListApp': {'jsonlist': True}}, - _("Produce machine-readable JSON list output.")), - json=({'JupyterServerListApp': {'json': True}}, - _("Produce machine-readable JSON object on each line of output.")), + jsonlist=( + {"JupyterServerListApp": {"jsonlist": True}}, + _("Produce machine-readable JSON list output."), + ), + json=( + {"JupyterServerListApp": {"json": True}}, + _("Produce machine-readable JSON object on each line of output."), + ), ) - jsonlist = Bool(False, config=True, - help=_("If True, the output will be a JSON list of objects, one per " - "active notebook server, each with the details from the " - "relevant server info file.")) - json = Bool(False, config=True, - help=_("If True, each line of output will be a JSON object with the " - "details from the server info file. For a JSON list output, " - "see the JupyterServerListApp.jsonlist configuration value")) + jsonlist = Bool( + False, + config=True, + help=_( + "If True, the output will be a JSON list of objects, one per " + "active notebook server, each with the details from the " + "relevant server info file." + ), + ) + json = Bool( + False, + config=True, + help=_( + "If True, each line of output will be a JSON object with the " + "details from the server info file. For a JSON list output, " + "see the JupyterServerListApp.jsonlist configuration value" + ), + ) def start(self): serverinfo_list = list(list_running_servers(self.runtime_dir)) @@ -500,20 +601,22 @@ def start(self): else: print("Currently running servers:") for serverinfo in serverinfo_list: - url = serverinfo['url'] - if serverinfo.get('token'): - url = url + '?token=%s' % serverinfo['token'] - print(url, "::", serverinfo['root_dir']) + url = serverinfo["url"] + if serverinfo.get("token"): + url = url + "?token=%s" % serverinfo["token"] + print(url, "::", serverinfo["root_dir"]) + -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Aliases and Flags -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + flags = dict(base_flags) -flags['allow-root']=( - {'ServerApp' : {'allow_root' : True}}, - _("Allow the server to be run from root user.") +flags["allow-root"] = ( + {"ServerApp": {"allow_root": True}}, + _("Allow the server to be run from root user."), ) flags["no-browser"] = ( {"ServerApp": {"open_browser": False}}, @@ -521,45 +624,63 @@ def start(self): ) # Add notebook manager flags -flags.update(boolean_flag('script', 'FileContentsManager.save_script', - 'DEPRECATED, IGNORED', - 'DEPRECATED, IGNORED')) +flags.update( + boolean_flag( + "script", + "FileContentsManager.save_script", + "DEPRECATED, IGNORED", + "DEPRECATED, IGNORED", + ) +) aliases = dict(base_aliases) -aliases.update({ - 'ip': 'ServerApp.ip', - 'port': 'ServerApp.port', - 'port-retries': 'ServerApp.port_retries', - 'transport': 'KernelManager.transport', - 'keyfile': 'ServerApp.keyfile', - 'certfile': 'ServerApp.certfile', - 'client-ca': 'ServerApp.client_ca', - 'notebook-dir': 'ServerApp.root_dir', - 'browser': 'ServerApp.browser', - 'pylab': 'ServerApp.pylab', - 'gateway-url': 'GatewayClient.url', -}) - -#----------------------------------------------------------------------------- +aliases.update( + { + "ip": "ServerApp.ip", + "port": "ServerApp.port", + "port-retries": "ServerApp.port_retries", + "transport": "KernelManager.transport", + "keyfile": "ServerApp.keyfile", + "certfile": "ServerApp.certfile", + "client-ca": "ServerApp.client_ca", + "notebook-dir": "ServerApp.root_dir", + "browser": "ServerApp.browser", + "pylab": "ServerApp.pylab", + "gateway-url": "GatewayClient.url", + } +) + +# ----------------------------------------------------------------------------- # ServerApp -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + class ServerApp(JupyterApp): - name = 'jupyter-server' + name = "jupyter-server" version = __version__ - description = _("""The Jupyter Server. + description = _( + """The Jupyter Server. - This launches a Tornado-based Jupyter Server.""") + This launches a Tornado-based Jupyter Server.""" + ) examples = _examples aliases = aliases flags = flags classes = [ - KernelManager, Session, MappingKernelManager, KernelSpecManager, - ContentsManager, FileContentsManager, NotebookNotary, - GatewayKernelManager, GatewayKernelSpecManager, GatewaySessionManager, GatewayClient, + KernelManager, + Session, + MappingKernelManager, + KernelSpecManager, + ContentsManager, + FileContentsManager, + NotebookNotary, + GatewayKernelManager, + GatewayKernelSpecManager, + GatewaySessionManager, + GatewayClient, ] flags = Dict(flags) aliases = Dict(aliases) @@ -575,52 +696,56 @@ class ServerApp(JupyterApp): # Subclasses can override this list to # expose a subset of these handlers. default_services = ( - 'api', - 'auth', - 'config', - 'contents', - 'edit', - 'files', - 'kernels', - 'kernelspecs', - 'nbconvert', - 'security', - 'sessions', - 'shutdown', - 'view' + "api", + "auth", + "config", + "contents", + "edit", + "files", + "kernels", + "kernelspecs", + "nbconvert", + "security", + "sessions", + "shutdown", + "view", ) _log_formatter_cls = LogFormatter - @default('log_level') + @default("log_level") def _default_log_level(self): return logging.INFO - @default('log_datefmt') + @default("log_datefmt") def _default_log_datefmt(self): """Exclude date from default date format""" return "%H:%M:%S" - @default('log_format') + @default("log_format") def _default_log_format(self): """override default log format to include time""" return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s" # file to be opened in the Jupyter server - file_to_run = Unicode('', config=True) + file_to_run = Unicode("", config=True) # Network related information - allow_origin = Unicode('', config=True, + allow_origin = Unicode( + "", + config=True, help="""Set the Access-Control-Allow-Origin header Use '*' to allow any origin to access your server. Takes precedence over allow_origin_pat. - """ + """, ) - allow_origin_pat = Unicode('', config=True, + allow_origin_pat = Unicode( + "", + config=True, help="""Use a regular expression for the Access-Control-Allow-Origin header Requests from an origin matching the expression will get replies with: @@ -630,26 +755,32 @@ def _default_log_format(self): where `origin` is the origin of the request. Ignored if allow_origin is set. - """ + """, ) - allow_credentials = Bool(False, config=True, - help=_("Set the Access-Control-Allow-Credentials: true header") + allow_credentials = Bool( + False, + config=True, + help=_("Set the Access-Control-Allow-Credentials: true header"), ) - allow_root = Bool(False, config=True, - help=_("Whether to allow the user to run the server as root.") + allow_root = Bool( + False, + config=True, + help=_("Whether to allow the user to run the server as root."), ) - default_url = Unicode('/', config=True, - help=_("The default URL to redirect to from `/`") + default_url = Unicode( + "/", config=True, help=_("The default URL to redirect to from `/`") ) - ip = Unicode('localhost', config=True, - help=_("The IP address the Jupyter server will listen on.") + ip = Unicode( + "localhost", + config=True, + help=_("The IP address the Jupyter server will listen on."), ) - @default('ip') + @default("ip") def _default_ip(self): """Return localhost if available, 127.0.0.1 otherwise. @@ -657,23 +788,28 @@ def _default_ip(self): """ s = socket.socket() try: - s.bind(('localhost', 0)) + s.bind(("localhost", 0)) except socket.error as e: - self.log.warning(_("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s"), e) - return '127.0.0.1' + self.log.warning( + _("Cannot bind to localhost, using 127.0.0.1 as default ip\n%s"), e + ) + return "127.0.0.1" else: s.close() - return 'localhost' + return "localhost" - @validate('ip') + @validate("ip") def _valdate_ip(self, proposal): - value = proposal['value'] - if value == u'*': - value = u'' + value = proposal["value"] + if value == u"*": + value = u"" return value - custom_display_url = Unicode(u'', config=True, - help=_("""Override URL shown to users. + custom_display_url = Unicode( + u"", + config=True, + help=_( + """Override URL shown to users. Replace actual URL, including protocol, address, port and base URL, with the given value when displaying URL to the users. Do not change @@ -682,51 +818,64 @@ def _valdate_ip(self, proposal): This option is intended to be used when the URL to display to the user cannot be determined reliably by the Jupyter server (proxified - or containerized setups for example).""") + or containerized setups for example).""" + ), ) - port = Integer(8888, config=True, - help=_("The port the Jupyter server will listen on.") + port = Integer( + 8888, config=True, help=_("The port the Jupyter server will listen on.") ) - port_retries = Integer(50, config=True, - help=_("The number of additional ports to try if the specified port is not available.") + port_retries = Integer( + 50, + config=True, + help=_( + "The number of additional ports to try if the specified port is not available." + ), ) - certfile = Unicode(u'', config=True, - help=_("""The full path to an SSL/TLS certificate file.""") + certfile = Unicode( + u"", config=True, help=_("""The full path to an SSL/TLS certificate file.""") ) - keyfile = Unicode(u'', config=True, - help=_("""The full path to a private key file for usage with SSL/TLS.""") + keyfile = Unicode( + u"", + config=True, + help=_("""The full path to a private key file for usage with SSL/TLS."""), ) - client_ca = Unicode(u'', config=True, - help=_("""The full path to a certificate authority certificate for SSL/TLS client authentication.""") + client_ca = Unicode( + u"", + config=True, + help=_( + """The full path to a certificate authority certificate for SSL/TLS client authentication.""" + ), ) - cookie_secret_file = Unicode(config=True, - help=_("""The file where the cookie secret is stored.""") + cookie_secret_file = Unicode( + config=True, help=_("""The file where the cookie secret is stored.""") ) - @default('cookie_secret_file') + @default("cookie_secret_file") def _default_cookie_secret_file(self): - return os.path.join(self.runtime_dir, 'jupytr_cookie_secret') + return os.path.join(self.runtime_dir, "jupytr_cookie_secret") - cookie_secret = Bytes(b'', config=True, + cookie_secret = Bytes( + b"", + config=True, help="""The random bytes used to secure cookies. By default this is a new random number every time you start the server. Set it to a value in a config file to enable logins to persist across server sessions. Note: Cookie secrets should be kept private, do not share config files with cookie_secret stored in plaintext (you can read the value from a file). - """ + """, ) - @default('cookie_secret') + @default("cookie_secret") def _default_cookie_secret(self): if os.path.exists(self.cookie_secret_file): - with io.open(self.cookie_secret_file, 'rb') as f: + with io.open(self.cookie_secret_file, "rb") as f: key = f.read() else: key = encodebytes(os.urandom(32)) @@ -737,96 +886,113 @@ def _default_cookie_secret(self): def _write_cookie_secret_file(self, secret): """write my secret to my secret_file""" - self.log.info(_("Writing notebook server cookie secret to %s"), self.cookie_secret_file) + self.log.info( + _("Writing notebook server cookie secret to %s"), self.cookie_secret_file + ) try: with secure_write(self.cookie_secret_file, True) as f: f.write(secret) except OSError as e: - self.log.error(_("Failed to write cookie secret to %s: %s"), - self.cookie_secret_file, e) + self.log.error( + _("Failed to write cookie secret to %s: %s"), self.cookie_secret_file, e + ) - token = Unicode('', - help=_("""Token used for authenticating first-time connections to the server. + token = Unicode( + "", + help=_( + """Token used for authenticating first-time connections to the server. When no password is enabled, the default is to generate a new, random token. Setting to an empty string disables authentication altogether, which is NOT RECOMMENDED. - """) + """ + ), ).tag(config=True) _token_generated = True - @default('token') + @default("token") def _token_default(self): - if os.getenv('JUPYTER_TOKEN'): + if os.getenv("JUPYTER_TOKEN"): self._token_generated = False - return os.getenv('JUPYTER_TOKEN') + return os.getenv("JUPYTER_TOKEN") if self.password: # no token if password is enabled self._token_generated = False - return u'' + return u"" else: self._token_generated = True - return binascii.hexlify(os.urandom(24)).decode('ascii') + return binascii.hexlify(os.urandom(24)).decode("ascii") - max_body_size = Integer(512 * 1024 * 1024, config=True, + max_body_size = Integer( + 512 * 1024 * 1024, + config=True, help=""" - Sets the maximum allowed size of the client request body, specified in - the Content-Length request header field. If the size in a request + Sets the maximum allowed size of the client request body, specified in + the Content-Length request header field. If the size in a request exceeds the configured value, a malformed HTTP message is returned to the client. Note: max_body_size is applied even in streaming mode. - """ + """, ) - max_buffer_size = Integer(512 * 1024 * 1024, config=True, + max_buffer_size = Integer( + 512 * 1024 * 1024, + config=True, help=""" - Gets or sets the maximum amount of memory, in bytes, that is allocated + Gets or sets the maximum amount of memory, in bytes, that is allocated for use by the buffer manager. - """ + """, ) - @observe('token') + @observe("token") def _token_changed(self, change): self._token_generated = False - password = Unicode(u'', config=True, - help="""Hashed password to use for web authentication. + password = Unicode( + u"", + config=True, + help="""Hashed password to use for web authentication. To generate, type in a python/IPython shell: from jupyter_server.auth import passwd; passwd() The string should be of the form type:salt:hashed-password. - """ + """, ) - password_required = Bool(False, config=True, - help="""Forces users to use a password for the Jupyter server. + password_required = Bool( + False, + config=True, + help="""Forces users to use a password for the Jupyter server. This is useful in a multi user environment, for instance when everybody in the LAN can access each other's machine through ssh. In such a case, serving on localhost is not secure since any user can connect to the Jupyter server via ssh. - """ + """, ) - allow_password_change = Bool(True, config=True, - help="""Allow password to be changed at login for the Jupyter server. + allow_password_change = Bool( + True, + config=True, + help="""Allow password to be changed at login for the Jupyter server. While loggin in with a token, the Jupyter server UI will give the opportunity to the user to enter a new password at the same time that will replace the token login mechanism. This can be set to false to prevent changing password from the UI/API. - """ + """, ) - - disable_check_xsrf = Bool(False, config=True, + disable_check_xsrf = Bool( + False, + config=True, help="""Disable cross-site-request-forgery protection Jupyter notebook 4.3.1 introduces protection from cross-site request forgeries, @@ -839,11 +1005,12 @@ def _token_changed(self, change): completely without authentication. These services can disable all authentication and security checks, with the full knowledge of what that implies. - """ + """, ) - allow_remote_access = Bool(config=True, - help="""Allow requests where the Host header doesn't point to a local server + allow_remote_access = Bool( + config=True, + help="""Allow requests where the Host header doesn't point to a local server By default, requests get a 403 forbidden response if the 'Host' header shows that the browser thinks it's on a non-local domain. @@ -855,9 +1022,10 @@ def _token_changed(self, change): Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, along with hostnames configured in local_hostnames. - """) + """, + ) - @default('allow_remote_access') + @default("allow_remote_access") def _default_allow_remote(self): """Disallow remote access if we're listening only on loopback addresses""" @@ -873,10 +1041,10 @@ def _default_allow_remote(self): for info in socket.getaddrinfo(self.ip, self.port, 0, socket.SOCK_STREAM): addr = info[4][0] if not py3compat.PY3: - addr = addr.decode('ascii') + addr = addr.decode("ascii") try: - parsed = ipaddress.ip_address(addr.split('%')[0]) + parsed = ipaddress.ip_address(addr.split("%")[0]) except ValueError: self.log.warning("Unrecognised IP address: %r", addr) continue @@ -884,39 +1052,50 @@ def _default_allow_remote(self): # Macs map localhost to 'fe80::1%lo0', a link local address # scoped to the loopback interface. For now, we'll assume that # any scoped link-local address is effectively local. - if not (parsed.is_loopback - or (('%' in addr) and parsed.is_link_local)): + if not (parsed.is_loopback or (("%" in addr) and parsed.is_link_local)): return True return False else: return not addr.is_loopback - local_hostnames = List(Unicode(), ['localhost'], config=True, - help="""Hostnames to allow as local when allow_remote_access is False. + local_hostnames = List( + Unicode(), + ["localhost"], + config=True, + help="""Hostnames to allow as local when allow_remote_access is False. Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted as local as well. - """ + """, ) - open_browser = Bool(False, config=True, - help="""Whether to open in a browser after starting. + open_browser = Bool( + False, + config=True, + help="""Whether to open in a browser after starting. The specific browser used is platform dependent and determined by the python standard library `webbrowser` module, unless it is overridden using the --browser (ServerApp.browser) configuration option. - """) + """, + ) - browser = Unicode(u'', config=True, - help="""Specify what command to use to invoke a web + browser = Unicode( + u"", + config=True, + help="""Specify what command to use to invoke a web browser when starting the server. If not specified, the default browser will be determined by the `webbrowser` standard library module, which allows setting of the BROWSER environment variable to override it. - """) + """, + ) - webbrowser_open_new = Integer(2, config=True, - help=_("""Specify where to open the server on startup. This is the + webbrowser_open_new = Integer( + 2, + config=True, + help=_( + """Specify where to open the server on startup. This is the `new` argument passed to the standard library method `webbrowser.open`. The behaviour is not guaranteed, but depends on browser support. Valid values are: @@ -926,14 +1105,23 @@ def _default_allow_remote(self): - 0 opens in an existing window. See the `webbrowser.open` documentation for details. - """)) + """ + ), + ) - tornado_settings = Dict(config=True, - help=_("Supply overrides for the tornado.web.Application that the " - "Jupyter server uses.")) + tornado_settings = Dict( + config=True, + help=_( + "Supply overrides for the tornado.web.Application that the " + "Jupyter server uses." + ), + ) - websocket_compression_options = Any(None, config=True, - help=_(""" + websocket_compression_options = Any( + None, + config=True, + help=_( + """ Set the tornado compression options for websocket connections. This value will be returned from :meth:`WebSocketHandler.get_compression_options`. @@ -941,54 +1129,75 @@ def _default_allow_remote(self): A dict (even an empty one) will enable compression. See the tornado docs for WebSocketHandler.get_compression_options for details. - """) + """ + ), + ) + terminado_settings = Dict( + config=True, + help=_( + 'Supply overrides for terminado. Currently only supports "shell_command".' + ), ) - terminado_settings = Dict(config=True, - help=_('Supply overrides for terminado. Currently only supports "shell_command".')) - cookie_options = Dict(config=True, - help=_("Extra keyword arguments to pass to `set_secure_cookie`." - " See tornado's set_secure_cookie docs for details.") + cookie_options = Dict( + config=True, + help=_( + "Extra keyword arguments to pass to `set_secure_cookie`." + " See tornado's set_secure_cookie docs for details." + ), ) - get_secure_cookie_kwargs = Dict(config=True, - help=_("Extra keyword arguments to pass to `get_secure_cookie`." - " See tornado's get_secure_cookie docs for details.") + get_secure_cookie_kwargs = Dict( + config=True, + help=_( + "Extra keyword arguments to pass to `get_secure_cookie`." + " See tornado's get_secure_cookie docs for details." + ), ) ssl_options = Dict( - allow_none=True, - config=True, - help=_("""Supply SSL options for the tornado HTTPServer. - See the tornado docs for details.""")) + allow_none=True, + config=True, + help=_( + """Supply SSL options for the tornado HTTPServer. + See the tornado docs for details.""" + ), + ) - jinja_environment_options = Dict(config=True, - help=_("Supply extra arguments that will be passed to Jinja environment.")) + jinja_environment_options = Dict( + config=True, + help=_("Supply extra arguments that will be passed to Jinja environment."), + ) jinja_template_vars = Dict( config=True, help=_("Extra variables to supply to jinja templates when rendering."), ) - base_url = Unicode('/', config=True, - help='''The base URL for the Jupyter server. + base_url = Unicode( + "/", + config=True, + help="""The base URL for the Jupyter server. Leading and trailing slashes can be omitted, and will automatically be added. - ''') + """, + ) - @validate('base_url') + @validate("base_url") def _update_base_url(self, proposal): - value = proposal['value'] - if not value.startswith('/'): - value = '/' + value - if not value.endswith('/'): - value = value + '/' + value = proposal["value"] + if not value.startswith("/"): + value = "/" + value + if not value.endswith("/"): + value = value + "/" return value - extra_static_paths = List(Unicode(), config=True, + extra_static_paths = List( + Unicode(), + config=True, help="""Extra paths to search for serving static files. This allows adding javascript/css to be available from the Jupyter server machine, - or overriding individual files in the IPython""" + or overriding individual files in the IPython""", ) @property @@ -996,22 +1205,25 @@ def static_file_path(self): """return extra paths + the default location""" return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] - static_custom_path = List(Unicode(), - help=_("""Path to search for custom.js, css""") + static_custom_path = List( + Unicode(), help=_("""Path to search for custom.js, css""") ) - @default('static_custom_path') + @default("static_custom_path") def _default_static_custom_path(self): return [ - os.path.join(d, 'custom') for d in ( - self.config_dir, - DEFAULT_STATIC_FILES_PATH) + os.path.join(d, "custom") + for d in (self.config_dir, DEFAULT_STATIC_FILES_PATH) ] - extra_template_paths = List(Unicode(), config=True, - help=_("""Extra paths to search for serving jinja templates. + extra_template_paths = List( + Unicode(), + config=True, + help=_( + """Extra paths to search for serving jinja templates. - Can be used to override templates from jupyter_server.templates.""") + Can be used to override templates from jupyter_server.templates.""" + ), ) @property @@ -1019,46 +1231,54 @@ def template_file_path(self): """return extra paths + the default locations""" return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST - extra_services = List(Unicode(), config=True, - help=_("""handlers that should be loaded at higher priority than the default services""") + extra_services = List( + Unicode(), + config=True, + help=_( + """handlers that should be loaded at higher priority than the default services""" + ), ) - websocket_url = Unicode("", config=True, + websocket_url = Unicode( + "", + config=True, help="""The base URL for websockets, if it differs from the HTTP server (hint: it almost certainly doesn't). Should be in the form of an HTTP origin: ws[s]://hostname[:port] - """ + """, ) - quit_button = Bool(True, config=True, + quit_button = Bool( + True, + config=True, help="""If True, display a button in the dashboard to quit - (shutdown the Jupyter server).""" + (shutdown the Jupyter server).""", ) contents_manager_class = Type( default_value=LargeFileManager, klass=ContentsManager, config=True, - help=_('The content manager class to use.') + help=_("The content manager class to use."), ) kernel_manager_class = Type( default_value=MappingKernelManager, config=True, - help=_('The kernel manager class to use.') + help=_("The kernel manager class to use."), ) session_manager_class = Type( default_value=SessionManager, config=True, - help=_('The session manager class to use.') + help=_("The session manager class to use."), ) config_manager_class = Type( default_value=ConfigManager, - config = True, - help=_('The config manager class to use') + config=True, + help=_("The config manager class to use"), ) kernel_spec_manager = Instance(KernelSpecManager, allow_none=True) @@ -1072,85 +1292,96 @@ def template_file_path(self): The Api of KernelSpecManager is provisional and might change without warning between this version of Jupyter and the next stable one. - """ + """, ) login_handler_class = Type( default_value=LoginHandler, klass=web.RequestHandler, config=True, - help=_('The login handler class to use.'), + help=_("The login handler class to use."), ) logout_handler_class = Type( default_value=LogoutHandler, klass=web.RequestHandler, config=True, - help=_('The logout handler class to use.'), + help=_("The logout handler class to use."), ) - trust_xheaders = Bool(False, config=True, - help=(_("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" - "sent by the upstream reverse proxy. Necessary if the proxy handles SSL")) + trust_xheaders = Bool( + False, + config=True, + help=( + _( + "Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" + "sent by the upstream reverse proxy. Necessary if the proxy handles SSL" + ) + ), ) info_file = Unicode() - @default('info_file') + @default("info_file") def _default_info_file(self): info_file = "jpserver-%s.json" % os.getpid() return os.path.join(self.runtime_dir, info_file) browser_open_file = Unicode() - @default('browser_open_file') + @default("browser_open_file") def _default_browser_open_file(self): basename = "jpserver-%s-open.html" % os.getpid() return os.path.join(self.runtime_dir, basename) - - pylab = Unicode('disabled', config=True, - help=_(""" + + pylab = Unicode( + "disabled", + config=True, + help=_( + """ DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. - """) + """ + ), ) - @observe('pylab') + @observe("pylab") def _update_pylab(self, change): """when --pylab is specified, display a warning and exit""" - if change['new'] != 'warn': - backend = ' %s' % change['new'] + if change["new"] != "warn": + backend = " %s" % change["new"] else: - backend = '' - self.log.error(_("Support for specifying --pylab on the command line has been removed.")) + backend = "" + self.log.error( + _("Support for specifying --pylab on the command line has been removed.") + ) self.log.error( - _("Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.").format(backend) + _( + "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself." + ).format(backend) ) self.exit(1) - notebook_dir = Unicode( - config=True, - help=_("DEPRECATED, use root_dir.") - ) + notebook_dir = Unicode(config=True, help=_("DEPRECATED, use root_dir.")) - @observe('notebook_dir') + @observe("notebook_dir") def _update_notebook_dir(self, change): self.log.warning(_("notebook_dir is deprecated, use root_dir")) - self.root_dir = change['new'] + self.root_dir = change["new"] - root_dir = Unicode(config=True, - help=_("The directory to use for notebooks and kernels.") + root_dir = Unicode( + config=True, help=_("The directory to use for notebooks and kernels.") ) - @default('root_dir') + @default("root_dir") def _default_root_dir(self): if self.file_to_run: return os.path.dirname(os.path.abspath(self.file_to_run)) else: return py3compat.getcwd() - @validate('root_dir') + @validate("root_dir") def _root_dir_validate(self, proposal): - value = proposal['value'] + value = proposal["value"] # Strip any trailing slashes # *except* if it's root _, path = os.path.splitdrive(value) @@ -1164,16 +1395,22 @@ def _root_dir_validate(self, proposal): raise TraitError(trans.gettext("No such notebook dir: '%r'") % value) return value - @observe('server_extensions') + @observe("server_extensions") def _update_server_extensions(self, change): self.log.warning(_("server_extensions is deprecated, use jpserver_extensions")) - self.server_extensions = change['new'] + self.server_extensions = change["new"] - jpserver_extensions = Dict({}, config=True, - help=(_("Dict of Python modules to load as notebook server extensions." - "Entry values can be used to enable and disable the loading of" - "the extensions. The extensions will be loaded in alphabetical " - "order.")) + jpserver_extensions = Dict( + {}, + config=True, + help=( + _( + "Dict of Python modules to load as notebook server extensions." + "Entry values can be used to enable and disable the loading of" + "the extensions. The extensions will be loaded in alphabetical " + "order." + ) + ), ) reraise_server_extension_failures = Bool( @@ -1182,36 +1419,63 @@ def _update_server_extensions(self, change): help=_("Reraise exceptions encountered loading server extensions?"), ) - iopub_msg_rate_limit = Float(1000, config=True, help=_("""(msgs/sec) + iopub_msg_rate_limit = Float( + 1000, + config=True, + help=_( + """(msgs/sec) Maximum rate at which messages can be sent on iopub before they are - limited.""")) + limited.""" + ), + ) - iopub_data_rate_limit = Float(1000000, config=True, help=_("""(bytes/sec) + iopub_data_rate_limit = Float( + 1000000, + config=True, + help=_( + """(bytes/sec) Maximum rate at which stream output can be sent on iopub before they are - limited.""")) + limited.""" + ), + ) - rate_limit_window = Float(3, config=True, help=_("""(sec) Time window used to - check the message and data rate limits.""")) + rate_limit_window = Float( + 3, + config=True, + help=_( + """(sec) Time window used to + check the message and data rate limits.""" + ), + ) - shutdown_no_activity_timeout = Integer(0, config=True, - help=("Shut down the server after N seconds with no kernels or " - "terminals running and no activity. " - "This can be used together with culling idle kernels " - "(MappingKernelManager.cull_idle_timeout) to " - "shutdown the Jupyter server when it's not in use. This is not " - "precisely timed: it may shut down up to a minute later. " - "0 (the default) disables this automatic shutdown.") + shutdown_no_activity_timeout = Integer( + 0, + config=True, + help=( + "Shut down the server after N seconds with no kernels or " + "terminals running and no activity. " + "This can be used together with culling idle kernels " + "(MappingKernelManager.cull_idle_timeout) to " + "shutdown the Jupyter server when it's not in use. This is not " + "precisely timed: it may shut down up to a minute later. " + "0 (the default) disables this automatic shutdown." + ), ) - terminals_enabled = Bool(True, config=True, - help=_("""Set to False to disable terminals. + terminals_enabled = Bool( + True, + config=True, + help=_( + """Set to False to disable terminals. This does *not* make the server more secure by itself. Anything the user can in a terminal, they can also do in a notebook. Terminals may also be automatically disabled if the terminado package is not available. - """)) + """ + ), + ) def parse_command_line(self, argv=None): @@ -1241,33 +1505,31 @@ def init_configurables(self): self.gateway_config = GatewayClient.instance(parent=self) if self.gateway_config.gateway_enabled: - self.kernel_manager_class = 'jupyter_server.gateway.managers.GatewayKernelManager' - self.session_manager_class = 'jupyter_server.gateway.managers.GatewaySessionManager' - self.kernel_spec_manager_class = 'jupyter_server.gateway.managers.GatewayKernelSpecManager' + self.kernel_manager_class = ( + "jupyter_server.gateway.managers.GatewayKernelManager" + ) + self.session_manager_class = ( + "jupyter_server.gateway.managers.GatewaySessionManager" + ) + self.kernel_spec_manager_class = ( + "jupyter_server.gateway.managers.GatewayKernelSpecManager" + ) - self.kernel_spec_manager = self.kernel_spec_manager_class( - parent=self, - ) + self.kernel_spec_manager = self.kernel_spec_manager_class(parent=self,) self.kernel_manager = self.kernel_manager_class( parent=self, log=self.log, connection_dir=self.runtime_dir, kernel_spec_manager=self.kernel_spec_manager, ) - self.contents_manager = self.contents_manager_class( - parent=self, - log=self.log, - ) + self.contents_manager = self.contents_manager_class(parent=self, log=self.log,) self.session_manager = self.session_manager_class( parent=self, log=self.log, kernel_manager=self.kernel_manager, contents_manager=self.contents_manager, ) - self.config_manager = self.config_manager_class( - parent=self, - log=self.log, - ) + self.config_manager = self.config_manager_class(parent=self, log=self.log,) def init_logging(self): # This prevents double log messages because tornado use a root logger that @@ -1279,60 +1541,76 @@ def init_logging(self): # consistent log output name (ServerApp instead of tornado.access, etc.) log.name = self.log.name # hook up tornado 3's loggers to our app handlers - logger = logging.getLogger('tornado') + logger = logging.getLogger("tornado") logger.propagate = True logger.parent = self.log logger.setLevel(self.log.level) def init_webapp(self): """initialize tornado webapp""" - self.tornado_settings['allow_origin'] = self.allow_origin - self.tornado_settings['websocket_compression_options'] = self.websocket_compression_options + self.tornado_settings["allow_origin"] = self.allow_origin + self.tornado_settings[ + "websocket_compression_options" + ] = self.websocket_compression_options if self.allow_origin_pat: - self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat) - self.tornado_settings['allow_credentials'] = self.allow_credentials - self.tornado_settings['cookie_options'] = self.cookie_options - self.tornado_settings['get_secure_cookie_kwargs'] = self.get_secure_cookie_kwargs - self.tornado_settings['token'] = self.token + self.tornado_settings["allow_origin_pat"] = re.compile( + self.allow_origin_pat + ) + self.tornado_settings["allow_credentials"] = self.allow_credentials + self.tornado_settings["cookie_options"] = self.cookie_options + self.tornado_settings[ + "get_secure_cookie_kwargs" + ] = self.get_secure_cookie_kwargs + self.tornado_settings["token"] = self.token # ensure default_url starts with base_url if not self.default_url.startswith(self.base_url): self.default_url = url_path_join(self.base_url, self.default_url) if self.password_required and (not self.password): - self.log.critical(_("Jupyter servers are configured to only be run with a password.")) + self.log.critical( + _("Jupyter servers are configured to only be run with a password.") + ) self.log.critical(_("Hint: run the following command to set a password")) self.log.critical(_("\t$ python -m jupyter_server.auth password")) sys.exit(1) self.web_app = ServerWebApplication( - self, self.default_services, self.kernel_manager, self.contents_manager, - self.session_manager, self.kernel_spec_manager, - self.config_manager, self.extra_services, - self.log, self.base_url, self.default_url, self.tornado_settings, + self, + self.default_services, + self.kernel_manager, + self.contents_manager, + self.session_manager, + self.kernel_spec_manager, + self.config_manager, + self.extra_services, + self.log, + self.base_url, + self.default_url, + self.tornado_settings, self.jinja_environment_options, ) if self.certfile: - self.ssl_options['certfile'] = self.certfile + self.ssl_options["certfile"] = self.certfile if self.keyfile: - self.ssl_options['keyfile'] = self.keyfile + self.ssl_options["keyfile"] = self.keyfile if self.client_ca: - self.ssl_options['ca_certs'] = self.client_ca + self.ssl_options["ca_certs"] = self.client_ca if len(self.ssl_options) == 0: # None indicates no SSL config self.ssl_options = None else: # SSL may be missing, so only import it if it's to be used import ssl + # PROTOCOL_TLS selects the highest ssl/tls protocol version that both the client and # server support. When PROTOCOL_TLS is not available use PROTOCOL_SSLv23. # PROTOCOL_TLS is new in version 2.7.13, 3.5.3 and 3.6 self.ssl_options.setdefault( - 'ssl_version', - getattr(ssl, 'PROTOCOL_TLS', ssl.PROTOCOL_SSLv23) + "ssl_version", getattr(ssl, "PROTOCOL_TLS", ssl.PROTOCOL_SSLv23) ) - if self.ssl_options.get('ca_certs', False): - self.ssl_options.setdefault('cert_reqs', ssl.CERT_REQUIRED) + if self.ssl_options.get("ca_certs", False): + self.ssl_options.setdefault("cert_reqs", ssl.CERT_REQUIRED) self.login_handler_class.validate_security(self, ssl_options=self.ssl_options) @@ -1344,7 +1622,7 @@ def display_url(self): ip = parts.hostname else: path = None - if self.ip in ('', '0.0.0.0'): + if self.ip in ("", "0.0.0.0"): ip = "%s" % socket.gethostname() else: ip = self.ip @@ -1352,37 +1630,37 @@ def display_url(self): token = None if self.token: # Don't log full token if it came from config - token = self.token if self._token_generated else '...' + token = self.token if self._token_generated else "..." url = ( self.get_url(ip=ip, path=path, token=token) - + '\n or ' - + self.get_url(ip='127.0.0.1', path=path, token=token) - ) + + "\n or " + + self.get_url(ip="127.0.0.1", path=path, token=token) + ) return url @property def connection_url(self): - ip = self.ip if self.ip else 'localhost' + ip = self.ip if self.ip else "localhost" return self.get_url(ip=ip) def get_url(self, ip=None, path=None, token=None): """Build a url for the application with reasonable defaults.""" if not ip: - ip = self.ip if self.ip else 'localhost' + ip = self.ip if self.ip else "localhost" if not path: path = url_path_join(self.base_url, self.default_url) # Build query string. if token: - token = urllib.parse.urlencode({'token': token}) + token = urllib.parse.urlencode({"token": token}) # Build the URL Parts to dump. urlparts = urllib.parse.ParseResult( - scheme='https' if self.certfile else 'http', + scheme="https" if self.certfile else "http", netloc="{ip}:{port}".format(ip=ip, port=self.port), path=path, params=None, query=token, - fragment=None + fragment=None, ) return urlparts.geturl() @@ -1392,19 +1670,25 @@ def init_terminals(self): try: from .terminal import initialize - initialize(self.web_app, self.root_dir, self.connection_url, self.terminado_settings) - self.web_app.settings['terminals_available'] = True + + initialize( + self.web_app, + self.root_dir, + self.connection_url, + self.terminado_settings, + ) + self.web_app.settings["terminals_available"] = True except ImportError as e: self.log.warning(_("Terminals not available (error was %s)"), e) def init_signal(self): - if not sys.platform.startswith('win') and sys.stdin and sys.stdin.isatty(): + if not sys.platform.startswith("win") and sys.stdin and sys.stdin.isatty(): signal.signal(signal.SIGINT, self._handle_sigint) signal.signal(signal.SIGTERM, self._signal_stop) - if hasattr(signal, 'SIGUSR1'): + if hasattr(signal, "SIGUSR1"): # Windows doesn't support SIGUSR1 signal.signal(signal.SIGUSR1, self._signal_info) - if hasattr(signal, 'SIGINFO'): + if hasattr(signal, "SIGINFO"): # only on BSD-based systems signal.signal(signal.SIGINFO, self._signal_info) @@ -1431,13 +1715,13 @@ def _confirm_exit(self): This doesn't work on Windows. """ info = self.log.info - info(_('interrupted')) + info(_("interrupted")) print(self.running_server_info()) - yes = _('y') - no = _('n') + yes = _("y") + no = _("n") sys.stdout.write(_("Shutdown this Jupyter server (%s/[%s])? ") % (yes, no)) sys.stdout.flush() - r,w,x = select.select([sys.stdin], [], [], 5) + r, w, x = select.select([sys.stdin], [], [], 5) if r: line = sys.stdin.readline() if line.lower().startswith(yes) and no not in line.lower(): @@ -1447,7 +1731,7 @@ def _confirm_exit(self): self.io_loop.add_callback_from_signal(self.io_loop.stop) return else: - print(_("No answer for 5s:"), end=' ') + print(_("No answer for 5s:"), end=" ") print(_("resuming operation...")) # no answer, or answer is no: # set it back to original SIGINT handler @@ -1471,7 +1755,7 @@ def init_server_extension_config(self): """Consolidate server extensions specified by all configs. The resulting list is stored on self.jpserver_extensions and updates config object. - + The extension API is experimental, and may change in future releases. """ # Load server extensions with ConfigManager. @@ -1484,7 +1768,7 @@ def init_server_extension_config(self): config_path.insert(0, self.config_dir) manager = ConfigManager(read_config_path=config_path) section = manager.get(self.config_file_name) - extensions = section.get('ServerApp', {}).get('jpserver_extensions', {}) + extensions = section.get("ServerApp", {}).get("jpserver_extensions", {}) for modulename, enabled in sorted(extensions.items()): if modulename not in self.jpserver_extensions: @@ -1496,7 +1780,7 @@ def init_server_extensions(self): Import the module, then call the load_jupyter_server_extension function, if one exists. - + The extension API is experimental, and may change in future releases. """ # Initialize extensions @@ -1504,18 +1788,24 @@ def init_server_extensions(self): if enabled: try: mod = importlib.import_module(modulename) - func = getattr(mod, 'load_jupyter_server_extension', None) + func = getattr(mod, "load_jupyter_server_extension", None) if func is not None: func(self) # Add debug log for loaded extensions. self.log.debug("%s is enabled and loaded." % modulename) else: - self.log.warning("%s is enabled but no `load_jupyter_server_extension` function was found" % modulename) + self.log.warning( + "%s is enabled but no `load_jupyter_server_extension` function was found" + % modulename + ) except Exception: if self.reraise_server_extension_failures: raise - self.log.warning(_("Error loading server extension %s"), modulename, - exc_info=True) + self.log.warning( + _("Error loading server extension %s"), + modulename, + exc_info=True, + ) def init_mime_overrides(self): # On some Windows machines, an application has registered incorrect @@ -1524,40 +1814,42 @@ def init_mime_overrides(self): # reject these files. We know the mimetype always needs to be text/css for css # and application/javascript for JS, so we override it here # and explicitly tell the mimetypes to not trust the Windows registry - if os.name == 'nt': + if os.name == "nt": # do not trust windows registry, which regularly has bad info mimetypes.init(files=[]) # ensure css, js are correct, which are required for pages to function - mimetypes.add_type('text/css', '.css') - mimetypes.add_type('application/javascript', '.js') + mimetypes.add_type("text/css", ".css") + mimetypes.add_type("application/javascript", ".js") def shutdown_no_activity(self): """Shutdown server on timeout when there are no kernels or terminals.""" km = self.kernel_manager if len(km) != 0: - return # Kernels still running + return # Kernels still running try: - term_mgr = self.web_app.settings['terminal_manager'] + term_mgr = self.web_app.settings["terminal_manager"] except KeyError: pass # Terminals not enabled else: if term_mgr.terminals: - return # Terminals still running + return # Terminals still running - seconds_since_active = \ - (utcnow() - self.web_app.last_activity()).total_seconds() - self.log.debug("No activity for %d seconds.", - seconds_since_active) + seconds_since_active = (utcnow() - self.web_app.last_activity()).total_seconds() + self.log.debug("No activity for %d seconds.", seconds_since_active) if seconds_since_active > self.shutdown_no_activity_timeout: - self.log.info("No kernels or terminals for %d seconds; shutting down.", - seconds_since_active) + self.log.info( + "No kernels or terminals for %d seconds; shutting down.", + seconds_since_active, + ) self.stop() def init_shutdown_no_activity(self): if self.shutdown_no_activity_timeout > 0: - self.log.info("Will shut down after %d seconds with no kernels or terminals.", - self.shutdown_no_activity_timeout) + self.log.info( + "Will shut down after %d seconds with no kernels or terminals.", + self.shutdown_no_activity_timeout, + ) pc = ioloop.PeriodicCallback(self.shutdown_no_activity, 60000) pc.start() @@ -1568,37 +1860,44 @@ def http_server(self): return self._http_server except AttributeError: raise AttributeError( - 'An HTTPServer instance has not been created for the ' - 'Server Web Application. To create an HTTPServer for this ' - 'application, call `.init_httpserver()`.' - ) + "An HTTPServer instance has not been created for the " + "Server Web Application. To create an HTTPServer for this " + "application, call `.init_httpserver()`." + ) def init_httpserver(self): """Creates an instance of a Tornado HTTPServer for the Server Web Application - and sets the http_server attribute. + and sets the http_server attribute. """ # Check that a web_app has been initialized before starting a server. - if not hasattr(self, 'web_app'): - raise AttributeError('A tornado web application has not be initialized. ' - 'Try calling `.init_webapp()` first.') - + if not hasattr(self, "web_app"): + raise AttributeError( + "A tornado web application has not be initialized. " + "Try calling `.init_webapp()` first." + ) + # Create an instance of the server. self._http_server = httpserver.HTTPServer( - self.web_app, + self.web_app, ssl_options=self.ssl_options, xheaders=self.trust_xheaders, max_body_size=self.max_body_size, - max_buffer_size=self.max_buffer_size + max_buffer_size=self.max_buffer_size, ) success = None - for port in random_ports(self.port, self.port_retries+1): + for port in random_ports(self.port, self.port_retries + 1): try: self.http_server.listen(port, self.ip) except socket.error as e: if e.errno == errno.EADDRINUSE: - self.log.info(_('The port %i is already in use, trying another port.') % port) + self.log.info( + _("The port %i is already in use, trying another port.") % port + ) continue - elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)): + elif e.errno in ( + errno.EACCES, + getattr(errno, "WSAEACCES", errno.EACCES), + ): self.log.warning(_("Permission to listen on port %i denied") % port) continue else: @@ -1608,8 +1907,12 @@ def init_httpserver(self): success = True break if not success: - self.log.critical(_('ERROR: the Jupyter server could not be started because ' - 'no available port could be found.')) + self.log.critical( + _( + "ERROR: the Jupyter server could not be started because " + "no available port could be found." + ) + ) self.exit(1) @staticmethod @@ -1626,6 +1929,7 @@ def _init_asyncio_patch(): """ if sys.platform.startswith("win") and sys.version_info >= (3, 8): import asyncio + try: from asyncio import ( WindowsProactorEventLoopPolicy, @@ -1635,7 +1939,10 @@ def _init_asyncio_patch(): pass # not affected else: - if type(asyncio.get_event_loop_policy()) is WindowsProactorEventLoopPolicy: + if ( + type(asyncio.get_event_loop_policy()) + is WindowsProactorEventLoopPolicy + ): # WindowsProactorEventLoopPolicy is not compatible with tornado 6 # fallback to the pre-3.8 default of Selector asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) @@ -1648,11 +1955,11 @@ def initialize(self, argv=None, load_extensions=True, new_httpserver=True): ---------- argv: list or None CLI arguments to parse. - + load_extensions: bool If True, the server will load server extensions listed in the jpserver_extension trait. Otherwise, no server extensions will be loaded. - + new_httpserver: bool If True, a tornado HTTPServer instance will be created and configured for the Server Web Application. This will set the http_server attribute of this class. @@ -1683,7 +1990,9 @@ def cleanup_kernels(self): but explicit shutdown allows the KernelManagers to cleanup the connection files. """ n_kernels = len(self.kernel_manager.list_kernel_ids()) - kernel_msg = trans.ngettext('Shutting down %d kernel', 'Shutting down %d kernels', n_kernels) + kernel_msg = trans.ngettext( + "Shutting down %d kernel", "Shutting down %d kernels", n_kernels + ) self.log.info(kernel_msg % n_kernels) self.kernel_manager.shutdown_all() @@ -1692,29 +2001,38 @@ def running_server_info(self, kernel_count=True): info = self.contents_manager.info_string() + "\n" if kernel_count: n_kernels = len(self.kernel_manager.list_kernel_ids()) - kernel_msg = trans.ngettext("%d active kernel", "%d active kernels", n_kernels) + kernel_msg = trans.ngettext( + "%d active kernel", "%d active kernels", n_kernels + ) info += kernel_msg % n_kernels info += "\n" # Format the info so that the URL fits on a single line in 80 char display - info += _("Jupyter Server {version} is running at:\n{url}". - format(version=ServerApp.version, url=self.display_url)) + info += _( + "Jupyter Server {version} is running at:\n{url}".format( + version=ServerApp.version, url=self.display_url + ) + ) if self.gateway_config.gateway_enabled: - info += _("\nKernels will be managed by the Gateway server running at:\n%s") % self.gateway_config.url + info += ( + _("\nKernels will be managed by the Gateway server running at:\n%s") + % self.gateway_config.url + ) return info def server_info(self): """Return a JSONable dict of information about this server.""" - return {'url': self.connection_url, - 'hostname': self.ip if self.ip else 'localhost', - 'port': self.port, - 'secure': bool(self.certfile), - 'base_url': self.base_url, - 'token': self.token, - 'root_dir': os.path.abspath(self.root_dir), - 'password': bool(self.password), - 'pid': os.getpid(), - 'version': ServerApp.version, - } + return { + "url": self.connection_url, + "hostname": self.ip if self.ip else "localhost", + "port": self.port, + "secure": bool(self.certfile), + "base_url": self.base_url, + "token": self.token, + "root_dir": os.path.abspath(self.root_dir), + "password": bool(self.password), + "pid": os.getpid(), + "version": ServerApp.version, + } def write_server_info_file(self): """Write the result of server_info() to the JSON file info_file.""" @@ -1722,8 +2040,9 @@ def write_server_info_file(self): with secure_write(self.info_file) as f: json.dump(self.server_info(), f, indent=2, sort_keys=True) except OSError as e: - self.log.error(_("Failed to write server-info to %s: %s"), - self.info_file, e) + self.log.error( + _("Failed to write server-info to %s: %s"), self.info_file, e + ) def remove_server_info_file(self): """Remove the jpserver-.json file created for this server. @@ -1742,18 +2061,18 @@ def write_browser_open_file(self): This can be used to open the notebook in a browser """ # default_url contains base_url, but so does connection_url - open_url = self.default_url[len(self.base_url):] + open_url = self.default_url[len(self.base_url) :] - with open(self.browser_open_file, 'w', encoding='utf-8') as f: + with open(self.browser_open_file, "w", encoding="utf-8") as f: self._write_browser_open_file(open_url, f) def _write_browser_open_file(self, url, fh): if self.token: - url = url_concat(url, {'token': self.token}) + url = url_concat(url, {"token": self.token}) url = url_path_join(self.connection_url, url) - jinja2_env = self.web_app.settings['jinja2_env'] - template = jinja2_env.get_template('browser-open.html') + jinja2_env = self.web_app.settings["jinja2_env"] + template = jinja2_env.get_template("browser-open.html") fh.write(template.render(open_url=url, base_url=self.base_url)) def remove_browser_open_file(self): @@ -1771,7 +2090,7 @@ def launch_browser(self): try: browser = webbrowser.get(self.browser or None) except webbrowser.Error as e: - self.log.warning(_('No web browser found: %s.') % e) + self.log.warning(_("No web browser found: %s.") % e) browser = None if not browser: @@ -1783,18 +2102,20 @@ def launch_browser(self): self.exit(1) relpath = os.path.relpath(self.file_to_run, self.root_dir) - uri = url_escape(url_path_join('notebooks', *relpath.split(os.sep))) + uri = url_escape(url_path_join("notebooks", *relpath.split(os.sep))) # Write a temporary file to open in the browser - fd, open_file = tempfile.mkstemp(suffix='.html') - with open(fd, 'w', encoding='utf-8') as fh: + fd, open_file = tempfile.mkstemp(suffix=".html") + with open(fd, "w", encoding="utf-8") as fh: self._write_browser_open_file(uri, fh) else: open_file = self.browser_open_file - b = lambda: browser.open( - urljoin('file:', pathname2url(open_file)), - new=self.webbrowser_open_new) + def b(): + return browser.open( + urljoin("file:", pathname2url(open_file)), new=self.webbrowser_open_new + ) + threading.Thread(target=b).start() def start_app(self): @@ -1805,20 +2126,32 @@ def start_app(self): try: uid = os.geteuid() except AttributeError: - uid = -1 # anything nonzero here, since we can't check UID assume non-root + uid = ( + -1 + ) # anything nonzero here, since we can't check UID assume non-root if uid == 0: - self.log.critical(_("Running as root is not recommended. Use --allow-root to bypass.")) + self.log.critical( + _("Running as root is not recommended. Use --allow-root to bypass.") + ) self.exit(1) info = self.log.info for line in self.running_server_info(kernel_count=False).split("\n"): info(line) - info(_("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).")) - if 'dev' in jupyter_server.__version__: - info(_("Welcome to Project Jupyter! Explore the various tools available" - " and their corresponding documentation. If you are interested" - " in contributing to the platform, please visit the community" - "resources section at https://jupyter.org/community.html.")) + info( + _( + "Use Control-C to stop this server and shut down all kernels (twice to skip confirmation)." + ) + ) + if "dev" in jupyter_server.__version__: + info( + _( + "Welcome to Project Jupyter! Explore the various tools available" + " and their corresponding documentation. If you are interested" + " in contributing to the platform, please visit the community" + "resources section at https://jupyter.org/community.html." + ) + ) self.write_server_info_file() self.write_browser_open_file() @@ -1829,21 +2162,26 @@ def start_app(self): if self.token and self._token_generated: # log full URL with generated token, so there's a copy/pasteable link # with auth info. - self.log.critical('\n'.join([ - '\n', - 'To access the server, open this file in a browser:', - ' %s' % urljoin('file:', pathname2url(self.browser_open_file)), - 'Or copy and paste one of these URLs:', - ' %s' % self.display_url, - ])) + self.log.critical( + "\n".join( + [ + "\n", + "To access the server, open this file in a browser:", + " %s" + % urljoin("file:", pathname2url(self.browser_open_file)), + "Or copy and paste one of these URLs:", + " %s" % self.display_url, + ] + ) + ) def start_ioloop(self): """Start the IO Loop.""" self.io_loop = ioloop.IOLoop.current() - if sys.platform.startswith('win'): + if sys.platform.startswith("win"): # add no-op to wake every 5s # to handle signals that may be ignored by the inner loop - pc = ioloop.PeriodicCallback(lambda : None, 5000) + pc = ioloop.PeriodicCallback(lambda: None, 5000) pc.start() try: self.io_loop.start() @@ -1865,9 +2203,10 @@ def start(self): def stop(self): def _stop(): # Stop a server if its set. - if hasattr(self, '_http_server'): + if hasattr(self, "_http_server"): self.http_server.stop() self.io_loop.stop() + self.io_loop.add_callback(_stop) @@ -1886,13 +2225,13 @@ def list_running_servers(runtime_dir=None): return for file_name in os.listdir(runtime_dir): - if re.match('jpserver-(.+).json', file_name): - with io.open(os.path.join(runtime_dir, file_name), encoding='utf-8') as f: + if re.match("jpserver-(.+).json", file_name): + with io.open(os.path.join(runtime_dir, file_name), encoding="utf-8") as f: info = json.load(f) # Simple check whether that process is really still running # Also remove leftover files from IPython 2.x without a pid field - if ('pid' in info) and check_pid(info['pid']): + if ("pid" in info) and check_pid(info["pid"]): yield info else: # If the process has died, try to delete its info file @@ -1900,8 +2239,11 @@ def list_running_servers(runtime_dir=None): os.unlink(os.path.join(runtime_dir, file_name)) except OSError: pass # TODO: This should warn or log or something -#----------------------------------------------------------------------------- + + +# ----------------------------------------------------------------------------- # Main entry point -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- + main = launch_new_instance = ServerApp.launch_instance diff --git a/jupyter_server/services/api/handlers.py b/jupyter_server/services/api/handlers.py index c5859ab527..a2785e7298 100644 --- a/jupyter_server/services/api/handlers.py +++ b/jupyter_server/services/api/handlers.py @@ -14,17 +14,16 @@ class APISpecHandler(web.StaticFileHandler, JupyterHandler): - def initialize(self): web.StaticFileHandler.initialize(self, path=os.path.dirname(__file__)) @web.authenticated def get(self): self.log.warning("Serving api spec (experimental, incomplete)") - return web.StaticFileHandler.get(self, 'api.yaml') + return web.StaticFileHandler.get(self, "api.yaml") def get_content_type(self): - return 'text/x-yaml' + return "text/x-yaml" class APIStatusHandler(APIHandler): @@ -35,17 +34,17 @@ class APIStatusHandler(APIHandler): @gen.coroutine def get(self): # if started was missing, use unix epoch - started = self.settings.get('started', utcfromtimestamp(0)) + started = self.settings.get("started", utcfromtimestamp(0)) started = isoformat(started) kernels = yield maybe_future(self.kernel_manager.list_kernels()) - total_connections = sum(k['connections'] for k in kernels) + total_connections = sum(k["connections"] for k in kernels) last_activity = isoformat(self.application.last_activity()) model = { - 'started': started, - 'last_activity': last_activity, - 'kernels': len(kernels), - 'connections': total_connections, + "started": started, + "last_activity": last_activity, + "kernels": len(kernels), + "connections": total_connections, } self.finish(json.dumps(model, sort_keys=True)) diff --git a/jupyter_server/services/config/handlers.py b/jupyter_server/services/config/handlers.py index 76c1bd3e56..9d3bfc090e 100644 --- a/jupyter_server/services/config/handlers.py +++ b/jupyter_server/services/config/handlers.py @@ -11,11 +11,11 @@ from ipython_genutils.py3compat import PY3 from ...base.handlers import APIHandler -class ConfigHandler(APIHandler): +class ConfigHandler(APIHandler): @web.authenticated def get(self, section_name): - self.set_header("Content-Type", 'application/json') + self.set_header("Content-Type", "application/json") self.finish(json.dumps(self.config_manager.get(section_name))) @web.authenticated diff --git a/jupyter_server/services/config/manager.py b/jupyter_server/services/config/manager.py index fe2b40c6a6..2e6ddb7aa4 100644 --- a/jupyter_server/services/config/manager.py +++ b/jupyter_server/services/config/manager.py @@ -37,22 +37,24 @@ def update(self, section_name, new_data): read_config_path = List(Unicode()) - @default('read_config_path') + @default("read_config_path") def _default_read_config_path(self): - return [os.path.join(p, 'serverconfig') for p in jupyter_config_path()] + return [os.path.join(p, "serverconfig") for p in jupyter_config_path()] write_config_dir = Unicode() - @default('write_config_dir') + @default("write_config_dir") def _default_write_config_dir(self): - return os.path.join(jupyter_config_dir(), 'serverconfig') + return os.path.join(jupyter_config_dir(), "serverconfig") write_config_manager = Instance(BaseJSONConfigManager) - @default('write_config_manager') + @default("write_config_manager") def _default_write_config_manager(self): return BaseJSONConfigManager(config_dir=self.write_config_dir) - @observe('write_config_dir') + @observe("write_config_dir") def _update_write_config_dir(self, change): - self.write_config_manager = BaseJSONConfigManager(config_dir=self.write_config_dir) + self.write_config_manager = BaseJSONConfigManager( + config_dir=self.write_config_dir + ) diff --git a/jupyter_server/services/contents/checkpoints.py b/jupyter_server/services/contents/checkpoints.py index c29a669c22..cde7e4bfac 100644 --- a/jupyter_server/services/contents/checkpoints.py +++ b/jupyter_server/services/contents/checkpoints.py @@ -22,6 +22,7 @@ class Checkpoints(LoggingConfigurable): delete_checkpoint(self, checkpoint_id, path) list_checkpoints(self, path) """ + def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" raise NotImplementedError("must be implemented in a subclass") @@ -45,12 +46,12 @@ def list_checkpoints(self, path): def rename_all_checkpoints(self, old_path, new_path): """Rename all checkpoints for old_path to new_path.""" for cp in self.list_checkpoints(old_path): - self.rename_checkpoint(cp['id'], old_path, new_path) + self.rename_checkpoint(cp["id"], old_path, new_path) def delete_all_checkpoints(self, path): """Delete all checkpoints for the given path.""" for checkpoint in self.list_checkpoints(path): - self.delete_checkpoint(checkpoint['id'], path) + self.delete_checkpoint(checkpoint["id"], path) class GenericCheckpointsMixin(object): @@ -77,30 +78,23 @@ class GenericCheckpointsMixin(object): def create_checkpoint(self, contents_mgr, path): model = contents_mgr.get(path, content=True) - type = model['type'] - if type == 'notebook': - return self.create_notebook_checkpoint( - model['content'], - path, - ) - elif type == 'file': - return self.create_file_checkpoint( - model['content'], - model['format'], - path, - ) + type = model["type"] + if type == "notebook": + return self.create_notebook_checkpoint(model["content"], path,) + elif type == "file": + return self.create_file_checkpoint(model["content"], model["format"], path,) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, u"Unexpected type %s" % type) def restore_checkpoint(self, contents_mgr, checkpoint_id, path): """Restore a checkpoint.""" - type = contents_mgr.get(path, content=False)['type'] - if type == 'notebook': + type = contents_mgr.get(path, content=False)["type"] + if type == "notebook": model = self.get_notebook_checkpoint(checkpoint_id, path) - elif type == 'file': + elif type == "file": model = self.get_file_checkpoint(checkpoint_id, path) else: - raise HTTPError(500, u'Unexpected type %s' % type) + raise HTTPError(500, u"Unexpected type %s" % type) contents_mgr.save(model, path) # Required Methods diff --git a/jupyter_server/services/contents/filecheckpoints.py b/jupyter_server/services/contents/filecheckpoints.py index a8a795da65..623a01f52f 100644 --- a/jupyter_server/services/contents/filecheckpoints.py +++ b/jupyter_server/services/contents/filecheckpoints.py @@ -29,7 +29,7 @@ class FileCheckpoints(FileManagerMixin, Checkpoints): """ checkpoint_dir = Unicode( - '.ipynb_checkpoints', + ".ipynb_checkpoints", config=True, help="""The directory name in which to keep file checkpoints @@ -50,7 +50,7 @@ def _root_dir_default(self): # ContentsManager-dependent checkpoint API def create_checkpoint(self, contents_mgr, path): """Create a checkpoint.""" - checkpoint_id = u'checkpoint' + checkpoint_id = u"checkpoint" src_path = contents_mgr._get_os_path(path) dest_path = self.checkpoint_path(checkpoint_id, path) self._copy(src_path, dest_path) @@ -69,16 +69,14 @@ def rename_checkpoint(self, checkpoint_id, old_path, new_path): new_cp_path = self.checkpoint_path(checkpoint_id, new_path) if os.path.isfile(old_cp_path): self.log.debug( - "Renaming checkpoint %s -> %s", - old_cp_path, - new_cp_path, + "Renaming checkpoint %s -> %s", old_cp_path, new_cp_path, ) with self.perm_to_403(): shutil.move(old_cp_path, new_cp_path) def delete_checkpoint(self, checkpoint_id, path): """delete a file's checkpoint""" - path = path.strip('/') + path = path.strip("/") cp_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(cp_path): self.no_such_checkpoint(path, checkpoint_id) @@ -92,7 +90,7 @@ def list_checkpoints(self, path): This contents manager currently only supports one checkpoint per file. """ - path = path.strip('/') + path = path.strip("/") checkpoint_id = "checkpoint" os_path = self.checkpoint_path(checkpoint_id, path) if not os.path.isfile(os_path): @@ -103,14 +101,12 @@ def list_checkpoints(self, path): # Checkpoint-related utilities def checkpoint_path(self, checkpoint_id, path): """find the path to a checkpoint""" - path = path.strip('/') - parent, name = ('/' + path).rsplit('/', 1) - parent = parent.strip('/') + path = path.strip("/") + parent, name = ("/" + path).rsplit("/", 1) + parent = parent.strip("/") basename, ext = os.path.splitext(name) filename = u"{name}-{checkpoint_id}{ext}".format( - name=basename, - checkpoint_id=checkpoint_id, - ext=ext, + name=basename, checkpoint_id=checkpoint_id, ext=ext, ) os_path = self._get_os_path(path=parent) cp_dir = os.path.join(os_path, self.checkpoint_dir) @@ -123,17 +119,13 @@ def checkpoint_model(self, checkpoint_id, os_path): """construct the info dict for a given checkpoint""" stats = os.stat(os_path) last_modified = tz.utcfromtimestamp(stats.st_mtime) - info = dict( - id=checkpoint_id, - last_modified=last_modified, - ) + info = dict(id=checkpoint_id, last_modified=last_modified,) return info # Error Handling def no_such_checkpoint(self, path, checkpoint_id): raise HTTPError( - 404, - u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id) + 404, u"Checkpoint does not exist: %s@%s" % (path, checkpoint_id) ) @@ -142,9 +134,10 @@ class GenericFileCheckpoints(GenericCheckpointsMixin, FileCheckpoints): Local filesystem Checkpoints that works with any conforming ContentsManager. """ + def create_file_checkpoint(self, content, format, path): """Create a checkpoint from the current content of a file.""" - path = path.strip('/') + path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -157,7 +150,7 @@ def create_file_checkpoint(self, content, format, path): def create_notebook_checkpoint(self, nb, path): """Create a checkpoint from the current content of a notebook.""" - path = path.strip('/') + path = path.strip("/") # only the one checkpoint ID: checkpoint_id = u"checkpoint" os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -170,7 +163,7 @@ def create_notebook_checkpoint(self, nb, path): def get_notebook_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a notebook.""" - path = path.strip('/') + path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -178,16 +171,13 @@ def get_notebook_checkpoint(self, checkpoint_id, path): self.no_such_checkpoint(path, checkpoint_id) return { - 'type': 'notebook', - 'content': self._read_notebook( - os_checkpoint_path, - as_version=4, - ), + "type": "notebook", + "content": self._read_notebook(os_checkpoint_path, as_version=4,), } def get_file_checkpoint(self, checkpoint_id, path): """Get a checkpoint for a file.""" - path = path.strip('/') + path = path.strip("/") self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) @@ -196,7 +186,7 @@ def get_file_checkpoint(self, checkpoint_id, path): content, format = self._read_file(os_checkpoint_path, format=None) return { - 'type': 'file', - 'content': content, - 'format': format, + "type": "file", + "content": content, + "format": format, } diff --git a/jupyter_server/services/contents/fileio.py b/jupyter_server/services/contents/fileio.py index 7e92497773..959a1deea4 100644 --- a/jupyter_server/services/contents/fileio.py +++ b/jupyter_server/services/contents/fileio.py @@ -32,6 +32,7 @@ def replace_file(src, dst): """ os.replace(src, dst) + def copy2_safe(src, dst, log=None): """copy src to dst @@ -44,20 +45,23 @@ def copy2_safe(src, dst, log=None): if log: log.debug("copystat on %s failed", dst, exc_info=True) + def path_to_intermediate(path): - '''Name of the intermediate file used in atomic writes. + """Name of the intermediate file used in atomic writes. - The .~ prefix will make Dropbox ignore the temporary file.''' + The .~ prefix will make Dropbox ignore the temporary file.""" dirname, basename = os.path.split(path) - return os.path.join(dirname, '.~'+basename) + return os.path.join(dirname, ".~" + basename) + def path_to_invalid(path): - '''Name of invalid file after a failed atomic write and subsequent read.''' + """Name of invalid file after a failed atomic write and subsequent read.""" dirname, basename = os.path.split(path) - return os.path.join(dirname, basename+'.invalid') + return os.path.join(dirname, basename + ".invalid") + @contextmanager -def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs): +def atomic_writing(path, text=True, encoding="utf-8", log=None, **kwargs): """Context manager to write to a file only if the entire write is successful. This works by copying the previous file contents to a temporary file in the @@ -93,10 +97,10 @@ def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default - kwargs.setdefault('newline', '\n') - fileobj = io.open(path, 'w', encoding=encoding, **kwargs) + kwargs.setdefault("newline", "\n") + fileobj = io.open(path, "w", encoding=encoding, **kwargs) else: - fileobj = io.open(path, 'wb', **kwargs) + fileobj = io.open(path, "wb", **kwargs) try: yield fileobj @@ -116,9 +120,8 @@ def atomic_writing(path, text=True, encoding='utf-8', log=None, **kwargs): os.remove(tmp_path) - @contextmanager -def _simple_writing(path, text=True, encoding='utf-8', log=None, **kwargs): +def _simple_writing(path, text=True, encoding="utf-8", log=None, **kwargs): """Context manager to write file without doing atomic writing ( for weird filesystem eg: nfs). @@ -145,10 +148,10 @@ def _simple_writing(path, text=True, encoding='utf-8', log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default - kwargs.setdefault('newline', '\n') - fileobj = io.open(path, 'w', encoding=encoding, **kwargs) + kwargs.setdefault("newline", "\n") + fileobj = io.open(path, "w", encoding=encoding, **kwargs) else: - fileobj = io.open(path, 'wb', **kwargs) + fileobj = io.open(path, "wb", **kwargs) try: yield fileobj @@ -159,8 +162,6 @@ def _simple_writing(path, text=True, encoding='utf-8', log=None, **kwargs): fileobj.close() - - class FileManagerMixin(Configurable): """ Mixin for ContentsAPI classes that interact with the filesystem. @@ -179,10 +180,13 @@ class FileManagerMixin(Configurable): log : logging.Logger """ - use_atomic_writing = Bool(True, config=True, help= - """By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. + use_atomic_writing = Bool( + True, + config=True, + help="""By default notebooks are saved on disk on a temporary file and then if succefully written, it replaces the old ones. This procedure, namely 'atomic_writing', causes some bugs on file system whitout operation order enforcement (like some networked fs). - If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""") + If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""", + ) @contextmanager def open(self, os_path, *args, **kwargs): @@ -205,7 +209,7 @@ def atomic_writing(self, os_path, *args, **kwargs): yield f @contextmanager - def perm_to_403(self, os_path=''): + def perm_to_403(self, os_path=""): """context manager for turning permission errors into 403.""" try: yield @@ -215,9 +219,9 @@ def perm_to_403(self, os_path=''): # this may not work perfectly on unicode paths on Python 2, # but nobody should be doing that anyway. if not os_path: - os_path = str_to_unicode(e.filename or 'unknown file') + os_path = str_to_unicode(e.filename or "unknown file") path = to_api_path(os_path, root=self.root_dir) - raise HTTPError(403, u'Permission denied: %s' % path) + raise HTTPError(403, u"Permission denied: %s" % path) else: raise @@ -253,7 +257,7 @@ def _get_os_path(self, path): def _read_notebook(self, os_path, as_version=4): """Read a notebook from an os path.""" - with self.open(os_path, 'r', encoding='utf-8') as f: + with self.open(os_path, "r", encoding="utf-8") as f: try: return nbformat.read(f, as_version=as_version) except Exception as e: @@ -266,8 +270,7 @@ def _read_notebook(self, os_path, as_version=4): if not self.use_atomic_writing or not os.path.exists(tmp_path): raise HTTPError( - 400, - u"Unreadable Notebook: %s %r" % (os_path, e_orig), + 400, u"Unreadable Notebook: %s %r" % (os_path, e_orig), ) # Move the bad file aside, restore the intermediate, and try again. @@ -278,7 +281,7 @@ def _read_notebook(self, os_path, as_version=4): def _save_notebook(self, os_path, nb): """Save a notebook to an os_path.""" - with self.atomic_writing(os_path, encoding='utf-8') as f: + with self.atomic_writing(os_path, encoding="utf-8") as f: nbformat.write(nb, f, version=nbformat.NO_CONVERT) def _read_file(self, os_path, format): @@ -293,40 +296,35 @@ def _read_file(self, os_path, format): if not os.path.isfile(os_path): raise HTTPError(400, "Cannot read non-file %s" % os_path) - with self.open(os_path, 'rb') as f: + with self.open(os_path, "rb") as f: bcontent = f.read() - if format is None or format == 'text': + if format is None or format == "text": # Try to interpret as unicode if format is unknown or if unicode # was explicitly requested. try: - return bcontent.decode('utf8'), 'text' + return bcontent.decode("utf8"), "text" except UnicodeError: - if format == 'text': + if format == "text": raise HTTPError( - 400, - "%s is not UTF-8 encoded" % os_path, - reason='bad format', + 400, "%s is not UTF-8 encoded" % os_path, reason="bad format", ) - return encodebytes(bcontent).decode('ascii'), 'base64' + return encodebytes(bcontent).decode("ascii"), "base64" def _save_file(self, os_path, content, format): """Save content of a generic file.""" - if format not in {'text', 'base64'}: + if format not in {"text", "base64"}: raise HTTPError( - 400, - "Must specify format of file contents as 'text' or 'base64'", + 400, "Must specify format of file contents as 'text' or 'base64'", ) try: - if format == 'text': - bcontent = content.encode('utf8') + if format == "text": + bcontent = content.encode("utf8") else: - b64_bytes = content.encode('ascii') + b64_bytes = content.encode("ascii") bcontent = decodebytes(b64_bytes) except Exception as e: - raise HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) - ) + raise HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) with self.atomic_writing(os_path, text=False) as f: f.write(bcontent) diff --git a/jupyter_server/services/contents/filemanager.py b/jupyter_server/services/contents/filemanager.py index 0a62682f92..3aa771995e 100644 --- a/jupyter_server/services/contents/filemanager.py +++ b/jupyter_server/services/contents/filemanager.py @@ -28,7 +28,8 @@ from jupyter_server import _tz as tz from jupyter_server.utils import ( - is_hidden, is_file_hidden, + is_hidden, + is_file_hidden, to_api_path, ) from jupyter_server.base.handlers import AuthenticatedFileHandler @@ -47,14 +48,17 @@ class FileContentsManager(FileManagerMixin, ContentsManager): root_dir = Unicode(config=True) - @default('root_dir') + @default("root_dir") def _default_root_dir(self): try: return self.parent.root_dir except AttributeError: return getcwd() - post_save_hook = Any(None, config=True, allow_none=True, + post_save_hook = Any( + None, + config=True, + allow_none=True, help="""Python callable or importstring thereof to be called on the path of a file just saved. @@ -69,12 +73,12 @@ def _default_root_dir(self): - path: the filesystem path to the file just written - model: the model representing the file - contents_manager: this ContentsManager instance - """ + """, ) - @validate('post_save_hook') + @validate("post_save_hook") def _validate_post_save_hook(self, proposal): - value = proposal['value'] + value = proposal["value"] if isinstance(value, string_types): value = import_item(value) if not callable(value): @@ -89,12 +93,14 @@ def run_post_save_hook(self, model, os_path): self.post_save_hook(os_path=os_path, model=model, contents_manager=self) except Exception as e: self.log.error("Post-save hook failed o-n %s", os_path, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while running post hook save: %s' % e) + raise web.HTTPError( + 500, u"Unexpected error while running post hook save: %s" % e + ) - @validate('root_dir') + @validate("root_dir") def _validate_root_dir(self, proposal): """Do a bit of validation of the root_dir.""" - value = proposal['value'] + value = proposal["value"] if not os.path.isabs(value): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) @@ -102,22 +108,25 @@ def _validate_root_dir(self, proposal): raise TraitError("%r is not a directory" % value) return value - @default('checkpoints_class') + @default("checkpoints_class") def _checkpoints_class_default(self): return FileCheckpoints - delete_to_trash = Bool(True, config=True, + delete_to_trash = Bool( + True, + config=True, help="""If True (default), deleting files will send them to the platform's trash/recycle bin, where they can be recovered. If False, - deleting files really deletes them.""") + deleting files really deletes them.""", + ) - @default('files_handler_class') + @default("files_handler_class") def _files_handler_class_default(self): return AuthenticatedFileHandler - @default('files_handler_params') + @default("files_handler_params") def _files_handler_params_default(self): - return {'path': self.root_dir} + return {"path": self.root_dir} def is_hidden(self, path): """Does the API style path correspond to a hidden directory or file? @@ -133,7 +142,7 @@ def is_hidden(self, path): hidden : bool Whether the path exists and is hidden. """ - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path=path) return is_hidden(os_path, self.root_dir) @@ -152,7 +161,7 @@ def file_exists(self, path): exists : bool Whether the file exists. """ - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path) return os.path.isfile(os_path) @@ -172,7 +181,7 @@ def dir_exists(self, path): exists : bool Whether the path is indeed a directory. """ - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path=path) return os.path.isdir(os_path) @@ -191,7 +200,7 @@ def exists(self, path): exists : bool Whether the target exists. """ - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path=path) return exists(os_path) @@ -204,7 +213,7 @@ def _base_model(self, path): # size of file size = info.st_size except (ValueError, OSError): - self.log.warning('Unable to get size.') + self.log.warning("Unable to get size.") size = None try: @@ -214,31 +223,31 @@ def _base_model(self, path): # https://github.com/jupyter/notebook/issues/2539 # https://github.com/jupyter/notebook/issues/2757 # Use the Unix epoch as a fallback so we don't crash. - self.log.warning('Invalid mtime %s for %s', info.st_mtime, os_path) + self.log.warning("Invalid mtime %s for %s", info.st_mtime, os_path) last_modified = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC) try: created = tz.utcfromtimestamp(info.st_ctime) except (ValueError, OSError): # See above - self.log.warning('Invalid ctime %s for %s', info.st_ctime, os_path) + self.log.warning("Invalid ctime %s for %s", info.st_ctime, os_path) created = datetime(1970, 1, 1, 0, 0, tzinfo=tz.UTC) # Create the base model. model = {} - model['name'] = path.rsplit('/', 1)[-1] - model['path'] = path - model['last_modified'] = last_modified - model['created'] = created - model['content'] = None - model['format'] = None - model['mimetype'] = None - model['size'] = size + model["name"] = path.rsplit("/", 1)[-1] + model["path"] = path + model["last_modified"] = last_modified + model["created"] = created + model["content"] = None + model["format"] = None + model["mimetype"] = None + model["size"] = size try: - model['writable'] = os.access(os_path, os.W_OK) + model["writable"] = os.access(os_path, os.W_OK) except OSError: self.log.error("Failed to check write permissions on %s", os_path) - model['writable'] = False + model["writable"] = False return model def _dir_model(self, path, content=True): @@ -248,28 +257,27 @@ def _dir_model(self, path, content=True): """ os_path = self._get_os_path(path) - four_o_four = u'directory does not exist: %r' % path + four_o_four = u"directory does not exist: %r" % path if not os.path.isdir(os_path): raise web.HTTPError(404, four_o_four) elif is_hidden(os_path, self.root_dir) and not self.allow_hidden: - self.log.info("Refusing to serve hidden directory %r, via 404 Error", - os_path + self.log.info( + "Refusing to serve hidden directory %r, via 404 Error", os_path ) raise web.HTTPError(404, four_o_four) model = self._base_model(path) - model['type'] = 'directory' - model['size'] = None + model["type"] = "directory" + model["size"] = None if content: - model['content'] = contents = [] + model["content"] = contents = [] os_dir = self._get_os_path(path) for name in os.listdir(os_dir): try: os_path = os.path.join(os_dir, name) except UnicodeDecodeError as e: - self.log.warning( - "failed to decode filename '%s': %s", name, e) + self.log.warning("failed to decode filename '%s': %s", name, e) continue try: @@ -282,23 +290,24 @@ def _dir_model(self, path, content=True): self.log.warning("Error stat-ing %s: %s", os_path, e) continue - if (not stat.S_ISLNK(st.st_mode) - and not stat.S_ISREG(st.st_mode) - and not stat.S_ISDIR(st.st_mode)): + if ( + not stat.S_ISLNK(st.st_mode) + and not stat.S_ISREG(st.st_mode) + and not stat.S_ISDIR(st.st_mode) + ): self.log.debug("%s not a regular file", os_path) continue if self.should_list(name): if self.allow_hidden or not is_file_hidden(os_path, stat_res=st): contents.append( - self.get(path='%s/%s' % (path, name), content=False) + self.get(path="%s/%s" % (path, name), content=False) ) - model['format'] = 'json' + model["format"] = "json" return model - def _file_model(self, path, content=True, format=None): """Build a model for a file @@ -310,23 +319,22 @@ def _file_model(self, path, content=True, format=None): If not specified, try to decode as UTF-8, and fall back to base64 """ model = self._base_model(path) - model['type'] = 'file' + model["type"] = "file" os_path = self._get_os_path(path) - model['mimetype'] = mimetypes.guess_type(os_path)[0] + model["mimetype"] = mimetypes.guess_type(os_path)[0] if content: content, format = self._read_file(os_path, format) - if model['mimetype'] is None: + if model["mimetype"] is None: default_mime = { - 'text': 'text/plain', - 'base64': 'application/octet-stream' + "text": "text/plain", + "base64": "application/octet-stream", }[format] - model['mimetype'] = default_mime + model["mimetype"] = default_mime model.update( - content=content, - format=format, + content=content, format=format, ) return model @@ -338,14 +346,14 @@ def _notebook_model(self, path, content=True): as a JSON structure (not double-serialized) """ model = self._base_model(path) - model['type'] = 'notebook' + model["type"] = "notebook" os_path = self._get_os_path(path) if content: nb = self._read_notebook(os_path, as_version=4) self.mark_trusted_cells(nb, path) - model['content'] = nb - model['format'] = 'json' + model["content"] = nb + model["format"] = "json" self.validate_notebook_model(model) return model @@ -372,46 +380,50 @@ def get(self, path, content=True, type=None, format=None): the contents model. If content=True, returns the contents of the file or directory as well. """ - path = path.strip('/') + path = path.strip("/") if not self.exists(path): - raise web.HTTPError(404, u'No such file or directory: %s' % path) + raise web.HTTPError(404, u"No such file or directory: %s" % path) os_path = self._get_os_path(path) if os.path.isdir(os_path): - if type not in (None, 'directory'): - raise web.HTTPError(400, - u'%s is a directory, not a %s' % (path, type), reason='bad type') + if type not in (None, "directory"): + raise web.HTTPError( + 400, + u"%s is a directory, not a %s" % (path, type), + reason="bad type", + ) model = self._dir_model(path, content=content) - elif type == 'notebook' or (type is None and path.endswith('.ipynb')): + elif type == "notebook" or (type is None and path.endswith(".ipynb")): model = self._notebook_model(path, content=content) else: - if type == 'directory': - raise web.HTTPError(400, - u'%s is not a directory' % path, reason='bad type') + if type == "directory": + raise web.HTTPError( + 400, u"%s is not a directory" % path, reason="bad type" + ) model = self._file_model(path, content=content, format=format) return model - def _save_directory(self, os_path, model, path=''): + def _save_directory(self, os_path, model, path=""): """create a directory""" if is_hidden(os_path, self.root_dir) and not self.allow_hidden: - raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path) + raise web.HTTPError(400, u"Cannot create hidden directory %r" % os_path) if not os.path.exists(os_path): with self.perm_to_403(): os.mkdir(os_path) elif not os.path.isdir(os_path): - raise web.HTTPError(400, u'Not a directory: %s' % (os_path)) + raise web.HTTPError(400, u"Not a directory: %s" % (os_path)) else: self.log.debug("Directory %r already exists", os_path) - def save(self, model, path=''): + def save(self, model, path=""): """Save the file model and return the model with no content.""" - path = path.strip('/') + path = path.strip("/") - if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') - if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + if "type" not in model: + raise web.HTTPError(400, u"No file type provided") + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) @@ -419,34 +431,36 @@ def save(self, model, path=''): self.run_pre_save_hook(model=model, path=path) try: - if model['type'] == 'notebook': - nb = nbformat.from_dict(model['content']) + if model["type"] == "notebook": + nb = nbformat.from_dict(model["content"]) self.check_and_sign(nb, path) self._save_notebook(os_path, nb) # One checkpoint should always exist for notebooks. if not self.checkpoints.list_checkpoints(path): self.create_checkpoint(path) - elif model['type'] == 'file': + elif model["type"] == "file": # Missing format will be handled internally by _save_file. - self._save_file(os_path, model['content'], model.get('format')) - elif model['type'] == 'directory': + self._save_file(os_path, model["content"], model.get("format")) + elif model["type"] == "directory": self._save_directory(os_path, model, path) else: - raise web.HTTPError(400, "Unhandled contents type: %s" % model['type']) + raise web.HTTPError(400, "Unhandled contents type: %s" % model["type"]) except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e)) + self.log.error(u"Error while saving file: %s %s", path, e, exc_info=True) + raise web.HTTPError( + 500, u"Unexpected error while saving file: %s %s" % (path, e) + ) validation_message = None - if model['type'] == 'notebook': + if model["type"] == "notebook": self.validate_notebook_model(model) - validation_message = model.get('message', None) + validation_message = model.get("message", None) model = self.get(path, content=False) if validation_message: - model['message'] = validation_message + model["message"] = validation_message self.run_post_save_hook(model=model, os_path=os_path) @@ -454,38 +468,38 @@ def save(self, model, path=''): def delete_file(self, path): """Delete file at path.""" - path = path.strip('/') + path = path.strip("/") os_path = self._get_os_path(path) rm = os.unlink if not os.path.exists(os_path): - raise web.HTTPError(404, u'File or directory does not exist: %s' % os_path) + raise web.HTTPError(404, u"File or directory does not exist: %s" % os_path) def _check_trash(os_path): - if sys.platform in {'win32', 'darwin'}: + if sys.platform in {"win32", "darwin"}: return True # It's a bit more nuanced than this, but until we can better # distinguish errors from send2trash, assume that we can only trash # files on the same partition as the home directory. file_dev = os.stat(os_path).st_dev - home_dev = os.stat(os.path.expanduser('~')).st_dev + home_dev = os.stat(os.path.expanduser("~")).st_dev return file_dev == home_dev def is_non_empty_dir(os_path): if os.path.isdir(os_path): # A directory containing only leftover checkpoints is # considered empty. - cp_dir = getattr(self.checkpoints, 'checkpoint_dir', None) + cp_dir = getattr(self.checkpoints, "checkpoint_dir", None) if set(os.listdir(os_path)) - {cp_dir}: return True return False if self.delete_to_trash: - if sys.platform == 'win32' and is_non_empty_dir(os_path): + if sys.platform == "win32" and is_non_empty_dir(os_path): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, u"Directory %s not empty" % os_path) if _check_trash(os_path): self.log.debug("Sending %s to trash", os_path) # Looking at the code in send2trash, I don't think the errors it @@ -494,13 +508,15 @@ def is_non_empty_dir(os_path): send2trash(os_path) return else: - self.log.warning("Skipping trash for %s, on different device " - "to home directory", os_path) + self.log.warning( + "Skipping trash for %s, on different device " "to home directory", + os_path, + ) if os.path.isdir(os_path): # Don't permanently delete non-empty directories. if is_non_empty_dir(os_path): - raise web.HTTPError(400, u'Directory %s not empty' % os_path) + raise web.HTTPError(400, u"Directory %s not empty" % os_path) self.log.debug("Removing directory %s", os_path) with self.perm_to_403(): shutil.rmtree(os_path) @@ -511,8 +527,8 @@ def is_non_empty_dir(os_path): def rename_file(self, old_path, new_path): """Rename a file.""" - old_path = old_path.strip('/') - new_path = new_path.strip('/') + old_path = old_path.strip("/") + new_path = new_path.strip("/") if new_path == old_path: return @@ -521,7 +537,7 @@ def rename_file(self, old_path, new_path): # Should we proceed with the move? if os.path.exists(new_os_path) and not samefile(old_os_path, new_os_path): - raise web.HTTPError(409, u'File already exists: %s' % new_path) + raise web.HTTPError(409, u"File already exists: %s" % new_path) # Move the file try: @@ -530,7 +546,9 @@ def rename_file(self, old_path, new_path): except web.HTTPError: raise except Exception as e: - raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_path, e)) + raise web.HTTPError( + 500, u"Unknown error renaming file: %s %s" % (old_path, e) + ) def info_string(self): return _("Serving notebooks from local directory: %s") % self.root_dir @@ -539,9 +557,8 @@ def get_kernel_path(self, path, model=None): """Return the initial API path of a kernel associated with a given notebook""" if self.dir_exists(path): return path - if '/' in path: - parent_dir = path.rsplit('/', 1)[0] + if "/" in path: + parent_dir = path.rsplit("/", 1)[0] else: - parent_dir = '' + parent_dir = "" return parent_dir - diff --git a/jupyter_server/services/contents/handlers.py b/jupyter_server/services/contents/handlers.py index 943ba8638d..ce17ab1445 100644 --- a/jupyter_server/services/contents/handlers.py +++ b/jupyter_server/services/contents/handlers.py @@ -14,7 +14,9 @@ from jupyter_client.jsonutil import date_default from jupyter_server.base.handlers import ( - JupyterHandler, APIHandler, path_regex, + JupyterHandler, + APIHandler, + path_regex, ) @@ -39,33 +41,25 @@ def validate_model(model, expect_content): missing = required_keys - set(model.keys()) if missing: raise web.HTTPError( - 500, - u"Missing Model Keys: {missing}".format(missing=missing), + 500, u"Missing Model Keys: {missing}".format(missing=missing), ) - maybe_none_keys = ['content', 'format'] + maybe_none_keys = ["content", "format"] if expect_content: errors = [key for key in maybe_none_keys if model[key] is None] if errors: raise web.HTTPError( - 500, - u"Keys unexpectedly None: {keys}".format(keys=errors), + 500, u"Keys unexpectedly None: {keys}".format(keys=errors), ) else: - errors = { - key: model[key] - for key in maybe_none_keys - if model[key] is not None - } + errors = {key: model[key] for key in maybe_none_keys if model[key] is not None} if errors: raise web.HTTPError( - 500, - u"Keys unexpectedly not None: {keys}".format(keys=errors), + 500, u"Keys unexpectedly not None: {keys}".format(keys=errors), ) class ContentsHandler(APIHandler): - def location_url(self, path): """Return the full URL location of a file. @@ -74,54 +68,54 @@ def location_url(self, path): path : unicode The API path of the file, such as "foo/bar.txt". """ - return url_path_join( - self.base_url, 'api', 'contents', url_escape(path) - ) + return url_path_join(self.base_url, "api", "contents", url_escape(path)) def _finish_model(self, model, location=True): """Finish a JSON request with a model, setting relevant headers, etc.""" if location: - location = self.location_url(model['path']) - self.set_header('Location', location) - self.set_header('Last-Modified', model['last_modified']) - self.set_header('Content-Type', 'application/json') + location = self.location_url(model["path"]) + self.set_header("Location", location) + self.set_header("Last-Modified", model["last_modified"]) + self.set_header("Content-Type", "application/json") self.finish(json.dumps(model, default=date_default)) @web.authenticated @gen.coroutine - def get(self, path=''): + def get(self, path=""): """Return a model for a file or directory. A directory model contains a list of models (without content) of the files and directories it contains. """ - path = path or '' - type = self.get_query_argument('type', default=None) - if type not in {None, 'directory', 'file', 'notebook'}: - raise web.HTTPError(400, u'Type %r is invalid' % type) - - format = self.get_query_argument('format', default=None) - if format not in {None, 'text', 'base64'}: - raise web.HTTPError(400, u'Format %r is invalid' % format) - content = self.get_query_argument('content', default='1') - if content not in {'0', '1'}: - raise web.HTTPError(400, u'Content %r is invalid' % content) + path = path or "" + type = self.get_query_argument("type", default=None) + if type not in {None, "directory", "file", "notebook"}: + raise web.HTTPError(400, u"Type %r is invalid" % type) + + format = self.get_query_argument("format", default=None) + if format not in {None, "text", "base64"}: + raise web.HTTPError(400, u"Format %r is invalid" % format) + content = self.get_query_argument("content", default="1") + if content not in {"0", "1"}: + raise web.HTTPError(400, u"Content %r is invalid" % content) content = int(content) - - model = yield maybe_future(self.contents_manager.get( - path=path, type=type, format=format, content=content, - )) + + model = yield maybe_future( + self.contents_manager.get( + path=path, type=type, format=format, content=content, + ) + ) validate_model(model, expect_content=content) self._finish_model(model, location=False) @web.authenticated @gen.coroutine - def patch(self, path=''): + def patch(self, path=""): """PATCH renames a file or directory without re-uploading content.""" cm = self.contents_manager model = self.get_json_body() if model is None: - raise web.HTTPError(400, u'JSON body missing') + raise web.HTTPError(400, u"JSON body missing") model = yield maybe_future(cm.update(model, path)) validate_model(model, expect_content=False) self._finish_model(model) @@ -129,10 +123,11 @@ def patch(self, path=''): @gen.coroutine def _copy(self, copy_from, copy_to=None): """Copy a file, optionally specifying a target directory.""" - self.log.info(u"Copying {copy_from} to {copy_to}".format( - copy_from=copy_from, - copy_to=copy_to or '', - )) + self.log.info( + u"Copying {copy_from} to {copy_to}".format( + copy_from=copy_from, copy_to=copy_to or "", + ) + ) model = yield maybe_future(self.contents_manager.copy(copy_from, copy_to)) self.set_status(201) validate_model(model, expect_content=False) @@ -148,10 +143,12 @@ def _upload(self, model, path): self._finish_model(model) @gen.coroutine - def _new_untitled(self, path, type='', ext=''): + def _new_untitled(self, path, type="", ext=""): """Create a new, empty untitled entity""" - self.log.info(u"Creating new %s in %s", type or 'file', path) - model = yield maybe_future(self.contents_manager.new_untitled(path=path, type=type, ext=ext)) + self.log.info(u"Creating new %s in %s", type or "file", path) + model = yield maybe_future( + self.contents_manager.new_untitled(path=path, type=type, ext=ext) + ) self.set_status(201) validate_model(model, expect_content=False) self._finish_model(model) @@ -161,14 +158,14 @@ def _save(self, model, path): """Save an existing file.""" chunk = model.get("chunk", None) if not chunk or chunk == -1: # Avoid tedious log information - self.log.info(u"Saving file at %s", path) + self.log.info(u"Saving file at %s", path) model = yield maybe_future(self.contents_manager.save(model, path)) validate_model(model, expect_content=False) self._finish_model(model) @web.authenticated @gen.coroutine - def post(self, path=''): + def post(self, path=""): """Create a new file in the specified path. POST creates new files. The server always decides on the name. @@ -193,9 +190,9 @@ def post(self, path=''): model = self.get_json_body() if model is not None: - copy_from = model.get('copy_from') - ext = model.get('ext', '') - type = model.get('type', '') + copy_from = model.get("copy_from") + ext = model.get("ext", "") + type = model.get("type", "") if copy_from: yield self._copy(copy_from, path) else: @@ -205,7 +202,7 @@ def post(self, path=''): @web.authenticated @gen.coroutine - def put(self, path=''): + def put(self, path=""): """Saves the file in the location specified by name and path. PUT is very similar to POST, but the requester specifies the name, @@ -218,7 +215,7 @@ def put(self, path=''): """ model = self.get_json_body() if model: - if model.get('copy_from'): + if model.get("copy_from"): raise web.HTTPError(400, "Cannot copy with PUT, only POST") exists = yield maybe_future(self.contents_manager.file_exists(path)) if exists: @@ -230,20 +227,19 @@ def put(self, path=''): @web.authenticated @gen.coroutine - def delete(self, path=''): + def delete(self, path=""): """delete a file in the given path""" cm = self.contents_manager - self.log.warning('delete %s', path) + self.log.warning("delete %s", path) yield maybe_future(cm.delete(path)) self.set_status(204) self.finish() class CheckpointsHandler(APIHandler): - @web.authenticated @gen.coroutine - def get(self, path=''): + def get(self, path=""): """get lists checkpoints for a file""" cm = self.contents_manager checkpoints = yield maybe_future(cm.list_checkpoints(path)) @@ -252,20 +248,24 @@ def get(self, path=''): @web.authenticated @gen.coroutine - def post(self, path=''): + def post(self, path=""): """post creates a new checkpoint""" cm = self.contents_manager checkpoint = yield maybe_future(cm.create_checkpoint(path)) data = json.dumps(checkpoint, default=date_default) - location = url_path_join(self.base_url, 'api/contents', - url_escape(path), 'checkpoints', url_escape(checkpoint['id'])) - self.set_header('Location', location) + location = url_path_join( + self.base_url, + "api/contents", + url_escape(path), + "checkpoints", + url_escape(checkpoint["id"]), + ) + self.set_header("Location", location) self.set_status(201) self.finish(data) class ModifyCheckpointsHandler(APIHandler): - @web.authenticated @gen.coroutine def post(self, path, checkpoint_id): @@ -287,15 +287,12 @@ def delete(self, path, checkpoint_id): class NotebooksRedirectHandler(JupyterHandler): """Redirect /api/notebooks to /api/contents""" - SUPPORTED_METHODS = ('GET', 'PUT', 'PATCH', 'POST', 'DELETE') + + SUPPORTED_METHODS = ("GET", "PUT", "PATCH", "POST", "DELETE") def get(self, path): self.log.warning("/api/notebooks is deprecated, use /api/contents") - self.redirect(url_path_join( - self.base_url, - 'api/contents', - path - )) + self.redirect(url_path_join(self.base_url, "api/contents", path)) put = patch = post = delete = get @@ -305,24 +302,27 @@ class TrustNotebooksHandler(JupyterHandler): @web.authenticated @gen.coroutine - def post(self,path=''): + def post(self, path=""): cm = self.contents_manager yield maybe_future(cm.trust_notebook(path)) self.set_status(201) self.finish() -#----------------------------------------------------------------------------- + + +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _checkpoint_id_regex = r"(?P[\w-]+)" default_handlers = [ (r"/api/contents%s/checkpoints" % path_regex, CheckpointsHandler), - (r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex), - ModifyCheckpointsHandler), + ( + r"/api/contents%s/checkpoints/%s" % (path_regex, _checkpoint_id_regex), + ModifyCheckpointsHandler, + ), (r"/api/contents%s/trust" % path_regex, TrustNotebooksHandler), (r"/api/contents%s" % path_regex, ContentsHandler), (r"/api/notebooks/?(.*)", NotebooksRedirectHandler), ] - diff --git a/jupyter_server/services/contents/largefilemanager.py b/jupyter_server/services/contents/largefilemanager.py index 5487047474..5d047a7db0 100644 --- a/jupyter_server/services/contents/largefilemanager.py +++ b/jupyter_server/services/contents/largefilemanager.py @@ -3,24 +3,30 @@ from tornado import web import nbformat import base64 -import os, io +import os +import io class LargeFileManager(FileContentsManager): """Handle large file upload.""" - def save(self, model, path=''): + def save(self, model, path=""): """Save the file model and return the model with no content.""" - chunk = model.get('chunk', None) + chunk = model.get("chunk", None) if chunk is not None: - path = path.strip('/') + path = path.strip("/") - if 'type' not in model: - raise web.HTTPError(400, u'No file type provided') - if model['type'] != 'file': - raise web.HTTPError(400, u'File type "{}" is not supported for large file transfer'.format(model['type'])) - if 'content' not in model and model['type'] != 'directory': - raise web.HTTPError(400, u'No file content provided') + if "type" not in model: + raise web.HTTPError(400, u"No file type provided") + if model["type"] != "file": + raise web.HTTPError( + 400, + u'File type "{}" is not supported for large file transfer'.format( + model["type"] + ), + ) + if "content" not in model and model["type"] != "directory": + raise web.HTTPError(400, u"No file content provided") os_path = self._get_os_path(path) @@ -28,14 +34,22 @@ def save(self, model, path=''): if chunk == 1: self.log.debug("Saving %s", os_path) self.run_pre_save_hook(model=model, path=path) - super(LargeFileManager, self)._save_file(os_path, model['content'], model.get('format')) + super(LargeFileManager, self)._save_file( + os_path, model["content"], model.get("format") + ) else: - self._save_large_file(os_path, model['content'], model.get('format')) + self._save_large_file( + os_path, model["content"], model.get("format") + ) except web.HTTPError: raise except Exception as e: - self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) - raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e)) + self.log.error( + u"Error while saving file: %s %s", path, e, exc_info=True + ) + raise web.HTTPError( + 500, u"Unexpected error while saving file: %s %s" % (path, e) + ) model = self.get(path, content=False) @@ -48,25 +62,21 @@ def save(self, model, path=''): def _save_large_file(self, os_path, content, format): """Save content of a generic file.""" - if format not in {'text', 'base64'}: + if format not in {"text", "base64"}: raise web.HTTPError( - 400, - "Must specify format of file contents as 'text' or 'base64'", + 400, "Must specify format of file contents as 'text' or 'base64'", ) try: - if format == 'text': - bcontent = content.encode('utf8') + if format == "text": + bcontent = content.encode("utf8") else: - b64_bytes = content.encode('ascii') + b64_bytes = content.encode("ascii") bcontent = base64.b64decode(b64_bytes) except Exception as e: - raise web.HTTPError( - 400, u'Encoding error saving %s: %s' % (os_path, e) - ) + raise web.HTTPError(400, u"Encoding error saving %s: %s" % (os_path, e)) with self.perm_to_403(os_path): if os.path.islink(os_path): os_path = os.path.join(os.path.dirname(os_path), os.readlink(os_path)) - with io.open(os_path, 'ab') as f: + with io.open(os_path, "ab") as f: f.write(bcontent) - diff --git a/jupyter_server/services/contents/manager.py b/jupyter_server/services/contents/manager.py index 28ac63fcce..8bbab7e833 100644 --- a/jupyter_server/services/contents/manager.py +++ b/jupyter_server/services/contents/manager.py @@ -34,7 +34,7 @@ from jupyter_server.transutils import _ -copy_pat = re.compile(r'\-Copy\d*\.') +copy_pat = re.compile(r"\-Copy\d*\.") class ContentsManager(LoggingConfigurable): @@ -56,34 +56,44 @@ class ContentsManager(LoggingConfigurable): """ - root_dir = Unicode('/', config=True) + root_dir = Unicode("/", config=True) allow_hidden = Bool(False, config=True, help="Allow access to hidden files") notary = Instance(sign.NotebookNotary) + def _notary_default(self): return sign.NotebookNotary(parent=self) - hide_globs = List(Unicode(), [ - u'__pycache__', '*.pyc', '*.pyo', - '.DS_Store', '*.so', '*.dylib', '*~', - ], config=True, help=""" + hide_globs = List( + Unicode(), + [u"__pycache__", "*.pyc", "*.pyo", ".DS_Store", "*.so", "*.dylib", "*~",], + config=True, + help=""" Glob patterns to hide in file and directory listings. - """) + """, + ) - untitled_notebook = Unicode(_("Untitled"), config=True, - help="The base name used when creating untitled notebooks." + untitled_notebook = Unicode( + _("Untitled"), + config=True, + help="The base name used when creating untitled notebooks.", ) - untitled_file = Unicode("untitled", config=True, - help="The base name used when creating untitled files." + untitled_file = Unicode( + "untitled", config=True, help="The base name used when creating untitled files." ) - untitled_directory = Unicode("Untitled Folder", config=True, - help="The base name used when creating untitled directories." + untitled_directory = Unicode( + "Untitled Folder", + config=True, + help="The base name used when creating untitled directories.", ) - pre_save_hook = Any(None, config=True, allow_none=True, + pre_save_hook = Any( + None, + config=True, + allow_none=True, help="""Python callable or importstring thereof To be called on a contents model prior to save. @@ -100,12 +110,12 @@ def _notary_default(self): Modifying this dict will affect the file that is stored. - path: the API path of the save destination - contents_manager: this ContentsManager instance - """ + """, ) - @validate('pre_save_hook') + @validate("pre_save_hook") def _validate_pre_save_hook(self, proposal): - value = proposal['value'] + value = proposal["value"] if isinstance(value, string_types): value = import_item(self.pre_save_hook) if not callable(value): @@ -117,7 +127,9 @@ def run_pre_save_hook(self, model, path, **kwargs): if self.pre_save_hook: try: self.log.debug("Running pre-save hook on %s", path) - self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs) + self.pre_save_hook( + model=model, path=path, contents_manager=self, **kwargs + ) except Exception: self.log.error("Pre-save hook failed on %s", path, exc_info=True) @@ -125,19 +137,19 @@ def run_pre_save_hook(self, model, path, **kwargs): checkpoints = Instance(Checkpoints, config=True) checkpoints_kwargs = Dict(config=True) - @default('checkpoints') + @default("checkpoints") def _default_checkpoints(self): return self.checkpoints_class(**self.checkpoints_kwargs) - @default('checkpoints_kwargs') + @default("checkpoints_kwargs") def _default_checkpoints_kwargs(self): - return dict( - parent=self, - log=self.log, - ) + return dict(parent=self, log=self.log,) files_handler_class = Type( - FilesHandler, klass=RequestHandler, allow_none=True, config=True, + FilesHandler, + klass=RequestHandler, + allow_none=True, + config=True, help="""handler class to use when serving raw file requests. Default is a fallback that talks to the ContentsManager API, @@ -147,7 +159,7 @@ def _default_checkpoints_kwargs(self): which will be much more efficient. Access to these files should be Authenticated. - """ + """, ) files_handler_params = Dict( @@ -156,7 +168,7 @@ def _default_checkpoints_kwargs(self): For example, StaticFileHandlers generally expect a `path` argument specifying the root directory from which to serve files. - """ + """, ) def get_extra_handlers(self): @@ -210,7 +222,7 @@ def is_hidden(self, path): """ raise NotImplementedError - def file_exists(self, path=''): + def file_exists(self, path=""): """Does a file exist at the given path? Like os.path.isfile @@ -227,7 +239,7 @@ def file_exists(self, path=''): exists : bool Whether the file exists. """ - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") def exists(self, path): """Does a file or directory exist at the given path? @@ -248,7 +260,7 @@ def exists(self, path): def get(self, path, content=True, type=None, format=None): """Get a file or directory model.""" - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") def save(self, model, path): """ @@ -258,22 +270,22 @@ def save(self, model, path): should call self.run_pre_save_hook(model=model, path=path) prior to writing any data. """ - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") def delete_file(self, path): """Delete the file or directory at path.""" - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") def rename_file(self, old_path, new_path): """Rename a file or directory.""" - raise NotImplementedError('must be implemented in a subclass') + raise NotImplementedError("must be implemented in a subclass") # ContentsManager API part 2: methods that have useable default # implementations, but can be overridden in subclasses. def delete(self, path): """Delete a file/directory and any associated checkpoints.""" - path = path.strip('/') + path = path.strip("/") if not path: raise HTTPError(400, "Can't delete root") self.delete_file(path) @@ -290,8 +302,8 @@ def update(self, model, path): For use in PATCH requests, to enable renaming a file without re-uploading its contents. Only used for renaming at the moment. """ - path = path.strip('/') - new_path = model.get('path', path).strip('/') + path = path.strip("/") + new_path = model.get("path", path).strip("/") if path != new_path: self.rename(path, new_path) model = self.get(new_path, content=False) @@ -310,9 +322,9 @@ def get_kernel_path(self, path, model=None): notebook server. FileContentsManager overrides this to use the directory containing the notebook. """ - return '' + return "" - def increment_filename(self, filename, path='', insert=''): + def increment_filename(self, filename, path="", insert=""): """Increment a filename until it is unique. Parameters @@ -330,96 +342,98 @@ def increment_filename(self, filename, path='', insert=''): A filename that is unique, based on the input filename. """ # Extract the full suffix from the filename (e.g. .tar.gz) - path = path.strip('/') - basename, dot, ext = filename.rpartition('.') - if ext != 'ipynb': - basename, dot, ext = filename.partition('.') - + path = path.strip("/") + basename, dot, ext = filename.rpartition(".") + if ext != "ipynb": + basename, dot, ext = filename.partition(".") + suffix = dot + ext for i in itertools.count(): if i: - insert_i = '{}{}'.format(insert, i) + insert_i = "{}{}".format(insert, i) else: - insert_i = '' - name = u'{basename}{insert}{suffix}'.format(basename=basename, - insert=insert_i, suffix=suffix) - if not self.exists(u'{}/{}'.format(path, name)): + insert_i = "" + name = u"{basename}{insert}{suffix}".format( + basename=basename, insert=insert_i, suffix=suffix + ) + if not self.exists(u"{}/{}".format(path, name)): break return name def validate_notebook_model(self, model): """Add failed-validation message to model""" try: - validate_nb(model['content']) + validate_nb(model["content"]) except ValidationError as e: - model['message'] = u'Notebook validation failed: {}:\n{}'.format( - e.message, json.dumps(e.instance, indent=1, default=lambda obj: ''), + model["message"] = u"Notebook validation failed: {}:\n{}".format( + e.message, + json.dumps(e.instance, indent=1, default=lambda obj: ""), ) return model - - def new_untitled(self, path='', type='', ext=''): + + def new_untitled(self, path="", type="", ext=""): """Create a new untitled file or directory in path - + path must be a directory - + File extension can be specified. - + Use `new` to create files with a fully specified path (including filename). """ - path = path.strip('/') + path = path.strip("/") if not self.dir_exists(path): - raise HTTPError(404, 'No such directory: %s' % path) - + raise HTTPError(404, "No such directory: %s" % path) + model = {} if type: - model['type'] = type - - if ext == '.ipynb': - model.setdefault('type', 'notebook') + model["type"] = type + + if ext == ".ipynb": + model.setdefault("type", "notebook") else: - model.setdefault('type', 'file') - - insert = '' - if model['type'] == 'directory': + model.setdefault("type", "file") + + insert = "" + if model["type"] == "directory": untitled = self.untitled_directory - insert = ' ' - elif model['type'] == 'notebook': + insert = " " + elif model["type"] == "notebook": untitled = self.untitled_notebook - ext = '.ipynb' - elif model['type'] == 'file': + ext = ".ipynb" + elif model["type"] == "file": untitled = self.untitled_file else: - raise HTTPError(400, "Unexpected model type: %r" % model['type']) - + raise HTTPError(400, "Unexpected model type: %r" % model["type"]) + name = self.increment_filename(untitled + ext, path, insert=insert) - path = u'{0}/{1}'.format(path, name) + path = u"{0}/{1}".format(path, name) return self.new(model, path) - - def new(self, model=None, path=''): + + def new(self, model=None, path=""): """Create a new file or directory and return its model with no content. - + To create a new untitled entity in a directory, use `new_untitled`. """ - path = path.strip('/') + path = path.strip("/") if model is None: model = {} - - if path.endswith('.ipynb'): - model.setdefault('type', 'notebook') + + if path.endswith(".ipynb"): + model.setdefault("type", "notebook") else: - model.setdefault('type', 'file') - + model.setdefault("type", "file") + # no content, not a directory, so fill out new-file model - if 'content' not in model and model['type'] != 'directory': - if model['type'] == 'notebook': - model['content'] = new_notebook() - model['format'] = 'json' + if "content" not in model and model["type"] != "directory": + if model["type"] == "notebook": + model["content"] = new_notebook() + model["format"] = "json" else: - model['content'] = '' - model['type'] = 'file' - model['format'] = 'text' - + model["content"] = "" + model["type"] = "file" + model["format"] = "text" + model = self.save(model, path) return model @@ -433,29 +447,29 @@ def copy(self, from_path, to_path=None): from_path must be a full path to a file. """ - path = from_path.strip('/') + path = from_path.strip("/") if to_path is not None: - to_path = to_path.strip('/') + to_path = to_path.strip("/") - if '/' in path: - from_dir, from_name = path.rsplit('/', 1) + if "/" in path: + from_dir, from_name = path.rsplit("/", 1) else: - from_dir = '' + from_dir = "" from_name = path - + model = self.get(path) - model.pop('path', None) - model.pop('name', None) - if model['type'] == 'directory': + model.pop("path", None) + model.pop("name", None) + if model["type"] == "directory": raise HTTPError(400, "Can't copy directories") - + if to_path is None: to_path = from_dir if self.dir_exists(to_path): - name = copy_pat.sub(u'.', from_name) - to_name = self.increment_filename(name, to_path, insert='-Copy') - to_path = u'{0}/{1}'.format(to_path, to_name) - + name = copy_pat.sub(u".", from_name) + to_name = self.increment_filename(name, to_path, insert="-Copy") + to_path = u"{0}/{1}".format(to_path, to_name) + model = self.save(model, to_path) return model @@ -471,12 +485,12 @@ def trust_notebook(self, path): The path of a notebook """ model = self.get(path) - nb = model['content'] + nb = model["content"] self.log.warning("Trusting notebook %s", path) self.notary.mark_cells(nb, True) self.check_and_sign(nb, path) - def check_and_sign(self, nb, path=''): + def check_and_sign(self, nb, path=""): """Check for trusted cells, and sign the notebook. Called as a part of saving notebooks. @@ -493,7 +507,7 @@ def check_and_sign(self, nb, path=''): else: self.log.warning("Notebook %s is not trusted", path) - def mark_trusted_cells(self, nb, path=''): + def mark_trusted_cells(self, nb, path=""): """Mark cells as trusted if the notebook signature matches. Called as a part of loading notebooks. diff --git a/jupyter_server/services/kernels/handlers.py b/jupyter_server/services/kernels/handlers.py index 358798408c..1b3aa11709 100644 --- a/jupyter_server/services/kernels/handlers.py +++ b/jupyter_server/services/kernels/handlers.py @@ -20,12 +20,13 @@ from jupyter_server.utils import url_path_join, url_escape, maybe_future from ...base.handlers import APIHandler -from ...base.zmqhandlers import AuthenticatedZMQStreamHandler, deserialize_binary_message - +from ...base.zmqhandlers import ( + AuthenticatedZMQStreamHandler, + deserialize_binary_message, +) class MainKernelHandler(APIHandler): - @web.authenticated @gen.coroutine def get(self): @@ -39,22 +40,19 @@ def post(self): km = self.kernel_manager model = self.get_json_body() if model is None: - model = { - 'name': km.default_kernel_name - } + model = {"name": km.default_kernel_name} else: - model.setdefault('name', km.default_kernel_name) + model.setdefault("name", km.default_kernel_name) - kernel_id = yield maybe_future(km.start_kernel(kernel_name=model['name'])) + kernel_id = yield maybe_future(km.start_kernel(kernel_name=model["name"])) model = yield maybe_future(km.kernel_model(kernel_id)) - location = url_path_join(self.base_url, 'api', 'kernels', url_escape(kernel_id)) - self.set_header('Location', location) + location = url_path_join(self.base_url, "api", "kernels", url_escape(kernel_id)) + self.set_header("Location", location) self.set_status(201) self.finish(json.dumps(model, default=date_default)) class KernelHandler(APIHandler): - @web.authenticated def get(self, kernel_id): km = self.kernel_manager @@ -71,15 +69,14 @@ def delete(self, kernel_id): class KernelActionHandler(APIHandler): - @web.authenticated @gen.coroutine def post(self, kernel_id, action): km = self.kernel_manager - if action == 'interrupt': + if action == "interrupt": km.interrupt_kernel(kernel_id) self.set_status(204) - if action == 'restart': + if action == "restart": try: yield maybe_future(km.restart_kernel(kernel_id)) @@ -93,9 +90,9 @@ def post(self, kernel_id, action): class ZMQChannelsHandler(AuthenticatedZMQStreamHandler): - '''There is one ZMQChannelsHandler per running kernel and it oversees all + """There is one ZMQChannelsHandler per running kernel and it oversees all the sessions. - ''' + """ # class-level registry of open sessions # allows checking for conflict on session-id, @@ -105,28 +102,31 @@ class ZMQChannelsHandler(AuthenticatedZMQStreamHandler): @property def kernel_info_timeout(self): km_default = self.kernel_manager.kernel_info_timeout - return self.settings.get('kernel_info_timeout', km_default) + return self.settings.get("kernel_info_timeout", km_default) @property def iopub_msg_rate_limit(self): - return self.settings.get('iopub_msg_rate_limit', 0) + return self.settings.get("iopub_msg_rate_limit", 0) @property def iopub_data_rate_limit(self): - return self.settings.get('iopub_data_rate_limit', 0) + return self.settings.get("iopub_data_rate_limit", 0) @property def rate_limit_window(self): - return self.settings.get('rate_limit_window', 1.0) + return self.settings.get("rate_limit_window", 1.0) def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, getattr(self, 'kernel_id', 'uninitialized')) + return "%s(%s)" % ( + self.__class__.__name__, + getattr(self, "kernel_id", "uninitialized"), + ) def create_stream(self): km = self.kernel_manager identity = self.session.bsession - for channel in ('shell', 'control', 'iopub', 'stdin'): - meth = getattr(km, 'connect_' + channel) + for channel in ("shell", "control", "iopub", "stdin"): + meth = getattr(km, "connect_" + channel) self.channels[channel] = stream = meth(self.kernel_id, identity=identity) stream.channel = channel @@ -158,7 +158,7 @@ def _handle_kernel_info_reply(self, msg): enabling msg spec adaptation, if necessary """ - idents,msg = self.session.feed_identities(msg) + idents, msg = self.session.feed_identities(msg) try: msg = self.session.deserialize(msg) except: @@ -166,9 +166,9 @@ def _handle_kernel_info_reply(self, msg): self._kernel_info_future.set_result({}) return else: - info = msg['content'] + info = msg["content"] self.log.debug("Received kernel info: %s", info) - if msg['msg_type'] != 'kernel_info_reply' or 'protocol_version' not in info: + if msg["msg_type"] != "kernel_info_reply" or "protocol_version" not in info: self.log.error("Kernel info request failed, assuming current %s", info) info = {} self._finish_kernel_info(info) @@ -184,10 +184,16 @@ def _finish_kernel_info(self, info): Set up protocol adaptation, if needed, and signal that connection can continue. """ - protocol_version = info.get('protocol_version', client_protocol_version) + protocol_version = info.get("protocol_version", client_protocol_version) if protocol_version != client_protocol_version: - self.session.adapt_version = int(protocol_version.split('.')[0]) - self.log.info("Adapting from protocol version {protocol_version} (kernel {kernel_id}) to {client_protocol_version} (client).".format(protocol_version=protocol_version, kernel_id=self.kernel_id, client_protocol_version=client_protocol_version)) + self.session.adapt_version = int(protocol_version.split(".")[0]) + self.log.info( + "Adapting from protocol version {protocol_version} (kernel {kernel_id}) to {client_protocol_version} (client).".format( + protocol_version=protocol_version, + kernel_id=self.kernel_id, + client_protocol_version=client_protocol_version, + ) + ) if not self._kernel_info_future.done(): self._kernel_info_future.set_result(info) @@ -199,7 +205,7 @@ def initialize(self): self.kernel_info_channel = None self._kernel_info_future = Future() self._close_future = Future() - self.session_key = '' + self.session_key = "" # Rate limiting code self._iopub_window_msg_count = 0 @@ -228,8 +234,11 @@ def give_up(): """Don't wait forever for the kernel to reply""" if future.done(): return - self.log.warning("Timeout waiting for kernel_info reply from %s", self.kernel_id) + self.log.warning( + "Timeout waiting for kernel_info reply from %s", self.kernel_id + ) future.set_result({}) + loop = IOLoop.current() loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up) # actually wait for it @@ -237,7 +246,7 @@ def give_up(): @gen.coroutine def get(self, kernel_id): - self.kernel_id = cast_unicode(kernel_id, 'ascii') + self.kernel_id = cast_unicode(kernel_id, "ascii") yield super(ZMQChannelsHandler, self).get(kernel_id=kernel_id) @gen.coroutine @@ -248,7 +257,7 @@ def _register_session(self): This is likely due to a client reconnecting from a lost network connection, where the socket on our side has not been cleaned up yet. """ - self.session_key = '%s:%s' % (self.kernel_id, self.session.session) + self.session_key = "%s:%s" % (self.kernel_id, self.session.session) stale_handler = self._open_sessions.get(self.session_key) if stale_handler: self.log.warning("Replacing stale connection: %s", self.session_key) @@ -262,10 +271,10 @@ def open(self, kernel_id): # on new connections, flush the message buffer buffer_info = km.get_buffer(kernel_id, self.session_key) - if buffer_info and buffer_info['session_key'] == self.session_key: + if buffer_info and buffer_info["session_key"] == self.session_key: self.log.info("Restoring connection for %s", self.session_key) - self.channels = buffer_info['channels'] - replay_buffer = buffer_info['buffer'] + self.channels = buffer_info["channels"] + replay_buffer = buffer_info["buffer"] if replay_buffer: self.log.info("Replaying %s buffered messages", len(replay_buffer)) for channel, msg_list in replay_buffer: @@ -285,7 +294,7 @@ def open(self, kernel_id): return km.add_restart_callback(self.kernel_id, self.on_kernel_restarted) - km.add_restart_callback(self.kernel_id, self.on_restart_failed, 'dead') + km.add_restart_callback(self.kernel_id, self.on_restart_failed, "dead") for channel, stream in self.channels.items(): stream.on_recv_stream(self._on_zmq_reply) @@ -299,17 +308,19 @@ def on_message(self, msg): msg = deserialize_binary_message(msg) else: msg = json.loads(msg) - channel = msg.pop('channel', None) + channel = msg.pop("channel", None) if channel is None: self.log.warning("No channel specified, assuming shell: %s", msg) - channel = 'shell' + channel = "shell" if channel not in self.channels: self.log.warning("No such channel: %r", channel) return am = self.kernel_manager.allowed_message_types - mt = msg['header']['msg_type'] + mt = msg["header"]["msg_type"] if am and mt not in am: - self.log.warning('Received message of type "%s", which is not allowed. Ignoring.' % mt) + self.log.warning( + 'Received message of type "%s", which is not allowed. Ignoring.' % mt + ) else: stream = self.channels[channel] self.session.send(stream, msg) @@ -317,19 +328,26 @@ def on_message(self, msg): def _on_zmq_reply(self, stream, msg_list): idents, fed_msg_list = self.session.feed_identities(msg_list) msg = self.session.deserialize(fed_msg_list) - parent = msg['parent_header'] + parent = msg["parent_header"] + def write_stderr(error_message): self.log.warning(error_message) - msg = self.session.msg("stream", - content={"text": error_message + '\n', "name": "stderr"}, - parent=parent + msg = self.session.msg( + "stream", + content={"text": error_message + "\n", "name": "stderr"}, + parent=parent, ) - msg['channel'] = 'iopub' + msg["channel"] = "iopub" self.write_message(json.dumps(msg, default=date_default)) - channel = getattr(stream, 'channel', None) - msg_type = msg['header']['msg_type'] - if channel == 'iopub' and msg_type == 'status' and msg['content'].get('execution_state') == 'idle': + channel = getattr(stream, "channel", None) + msg_type = msg["header"]["msg_type"] + + if ( + channel == "iopub" + and msg_type == "status" + and msg["content"].get("execution_state") == "idle" + ): # reset rate limit counter on status=idle, # to avoid 'Run All' hitting limits prematurely. self._iopub_window_byte_queue = [] @@ -338,13 +356,17 @@ def write_stderr(error_message): self._iopub_msgs_exceeded = False self._iopub_data_exceeded = False - if channel == 'iopub' and msg_type not in {'status', 'comm_open', 'execute_input'}: + if channel == "iopub" and msg_type not in { + "status", + "comm_open", + "execute_input", + }: # Remove the counts queued for removal. now = IOLoop.current().time() while len(self._iopub_window_byte_queue) > 0: queued = self._iopub_window_byte_queue[0] - if (now >= queued[0]): + if now >= queued[0]: self._iopub_window_byte_count -= queued[1] self._iopub_window_msg_count -= 1 del self._iopub_window_byte_queue[0] @@ -355,7 +377,7 @@ def write_stderr(error_message): # Increment the bytes and message count self._iopub_window_msg_count += 1 - if msg_type == 'stream': + if msg_type == "stream": byte_count = sum([len(x) for x in msg_list]) else: byte_count = 0 @@ -363,7 +385,9 @@ def write_stderr(error_message): # Queue a removal of the byte and message count for a time in the # future, when we are no longer interested in it. - self._iopub_window_byte_queue.append((now + self.rate_limit_window, byte_count)) + self._iopub_window_byte_queue.append( + (now + self.rate_limit_window, byte_count) + ) # Check the limits, set the limit flags, and reset the # message and data counts. @@ -374,7 +398,9 @@ def write_stderr(error_message): if self.iopub_msg_rate_limit > 0 and msg_rate > self.iopub_msg_rate_limit: if not self._iopub_msgs_exceeded: self._iopub_msgs_exceeded = True - write_stderr(dedent("""\ + write_stderr( + dedent( + """\ IOPub message rate exceeded. The Jupyter server will temporarily stop sending output to the client in order to avoid crashing it. @@ -384,19 +410,30 @@ def write_stderr(error_message): Current values: ServerApp.iopub_msg_rate_limit={} (msgs/sec) ServerApp.rate_limit_window={} (secs) - """.format(self.iopub_msg_rate_limit, self.rate_limit_window))) + """.format( + self.iopub_msg_rate_limit, self.rate_limit_window + ) + ) + ) else: # resume once we've got some headroom below the limit - if self._iopub_msgs_exceeded and msg_rate < (0.8 * self.iopub_msg_rate_limit): + if self._iopub_msgs_exceeded and msg_rate < ( + 0.8 * self.iopub_msg_rate_limit + ): self._iopub_msgs_exceeded = False if not self._iopub_data_exceeded: self.log.warning("iopub messages resumed") # Check the data rate - if self.iopub_data_rate_limit > 0 and data_rate > self.iopub_data_rate_limit: + if ( + self.iopub_data_rate_limit > 0 + and data_rate > self.iopub_data_rate_limit + ): if not self._iopub_data_exceeded: self._iopub_data_exceeded = True - write_stderr(dedent("""\ + write_stderr( + dedent( + """\ IOPub data rate exceeded. The Jupyter server will temporarily stop sending output to the client in order to avoid crashing it. @@ -406,10 +443,16 @@ def write_stderr(error_message): Current values: ServerApp.iopub_data_rate_limit={} (bytes/sec) ServerApp.rate_limit_window={} (secs) - """.format(self.iopub_data_rate_limit, self.rate_limit_window))) + """.format( + self.iopub_data_rate_limit, self.rate_limit_window + ) + ) + ) else: # resume once we've got some headroom below the limit - if self._iopub_data_exceeded and data_rate < (0.8 * self.iopub_data_rate_limit): + if self._iopub_data_exceeded and data_rate < ( + 0.8 * self.iopub_data_rate_limit + ): self._iopub_data_exceeded = False if not self._iopub_msgs_exceeded: self.log.warning("iopub messages resumed") @@ -440,7 +483,7 @@ def on_close(self): self.kernel_id, self.on_kernel_restarted, ) km.remove_restart_callback( - self.kernel_id, self.on_restart_failed, 'dead', + self.kernel_id, self.on_restart_failed, "dead", ) # start buffering instead of closing if this was the last connection @@ -461,30 +504,28 @@ def on_close(self): self._close_future.set_result(None) def _send_status_message(self, status): - iopub = self.channels.get('iopub', None) + iopub = self.channels.get("iopub", None) if iopub and not iopub.closed(): # flush IOPub before sending a restarting/dead status message # ensures proper ordering on the IOPub channel # that all messages from the stopped kernel have been delivered iopub.flush() - msg = self.session.msg("status", - {'execution_state': status} - ) - msg['channel'] = 'iopub' + msg = self.session.msg("status", {"execution_state": status}) + msg["channel"] = "iopub" self.write_message(json.dumps(msg, default=date_default)) def on_kernel_restarted(self): logging.warn("kernel %s restarted", self.kernel_id) - self._send_status_message('restarting') + self._send_status_message("restarting") def on_restart_failed(self): logging.error("kernel %s restarted failed!", self.kernel_id) - self._send_status_message('dead') + self._send_status_message("dead") -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _kernel_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" @@ -493,7 +534,9 @@ def on_restart_failed(self): default_handlers = [ (r"/api/kernels", MainKernelHandler), (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler), - (r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler), + ( + r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), + KernelActionHandler, + ), (r"/api/kernels/%s/channels" % _kernel_id_regex, ZMQChannelsHandler), ] - diff --git a/jupyter_server/services/kernels/kernelmanager.py b/jupyter_server/services/kernels/kernelmanager.py index 250472f2cc..2048b96235 100644 --- a/jupyter_server/services/kernels/kernelmanager.py +++ b/jupyter_server/services/kernels/kernelmanager.py @@ -18,8 +18,18 @@ from jupyter_client.session import Session from jupyter_client.multikernelmanager import MultiKernelManager -from traitlets import (Any, Bool, Dict, List, Unicode, TraitError, Integer, - Float, Instance, default, validate +from traitlets import ( + Any, + Bool, + Dict, + List, + Unicode, + TraitError, + Integer, + Float, + Instance, + default, + validate, ) from jupyter_server.utils import maybe_future, to_os_path, exists @@ -36,7 +46,7 @@ class MappingKernelManager(MultiKernelManager): - Kernel message filtering """ - @default('kernel_manager_class') + @default("kernel_manager_class") def _default_kernel_manager_class(self): return "jupyter_client.ioloop.IOLoopKernelManager" @@ -50,17 +60,17 @@ def _default_kernel_manager_class(self): _initialized_culler = False - @default('root_dir') + @default("root_dir") def _default_root_dir(self): try: return self.parent.root_dir except AttributeError: return getcwd() - @validate('root_dir') + @validate("root_dir") def _update_root_dir(self, proposal): """Do a bit of validation of the root dir.""" - value = proposal['value'] + value = proposal["value"] if not os.path.isabs(value): # If we receive a non-absolute path, make it absolute. value = os.path.abspath(value) @@ -68,28 +78,38 @@ def _update_root_dir(self, proposal): raise TraitError("kernel root dir %r is not a directory" % value) return value - cull_idle_timeout = Integer(0, config=True, + cull_idle_timeout = Integer( + 0, + config=True, help="""Timeout (in seconds) after which a kernel is considered idle and ready to be culled. Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled - for users with poor network connections.""" + for users with poor network connections.""", ) - cull_interval_default = 300 # 5 minutes - cull_interval = Integer(cull_interval_default, config=True, - help="""The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value.""" + cull_interval_default = 300 # 5 minutes + cull_interval = Integer( + cull_interval_default, + config=True, + help="""The interval (in seconds) on which to check for idle kernels exceeding the cull timeout value.""", ) - cull_connected = Bool(False, config=True, + cull_connected = Bool( + False, + config=True, help="""Whether to consider culling kernels which have one or more connections. - Only effective if cull_idle_timeout > 0.""" + Only effective if cull_idle_timeout > 0.""", ) - cull_busy = Bool(False, config=True, + cull_busy = Bool( + False, + config=True, help="""Whether to consider culling kernels which are busy. - Only effective if cull_idle_timeout > 0.""" + Only effective if cull_idle_timeout > 0.""", ) - buffer_offline_messages = Bool(True, config=True, + buffer_offline_messages = Bool( + True, + config=True, help="""Whether messages from kernels whose frontends have disconnected should be buffered in-memory. When True (default), messages are buffered and replayed on reconnect, @@ -97,10 +117,12 @@ def _update_root_dir(self, proposal): Disable if long-running kernels will produce too much output while no frontends are connected. - """ + """, ) - kernel_info_timeout = Float(60, config=True, + kernel_info_timeout = Float( + 60, + config=True, help="""Timeout for giving up on a kernel (in seconds). On starting and restarting kernels, we check whether the @@ -109,30 +131,35 @@ def _update_root_dir(self, proposal): before being presumed dead. This affects the MappingKernelManager (which handles kernel restarts) and the ZMQChannelsHandler (which handles the startup). - """ + """, ) _kernel_buffers = Any() - @default('_kernel_buffers') + + @default("_kernel_buffers") def _default_kernel_buffers(self): - return defaultdict(lambda: {'buffer': [], 'session_key': '', 'channels': {}}) + return defaultdict(lambda: {"buffer": [], "session_key": "", "channels": {}}) - last_kernel_activity = Instance(datetime, - help="The last activity on any kernel, including shutting down a kernel") + last_kernel_activity = Instance( + datetime, + help="The last activity on any kernel, including shutting down a kernel", + ) def __init__(self, **kwargs): super(MappingKernelManager, self).__init__(**kwargs) self.last_kernel_activity = utcnow() - allowed_message_types = List(trait=Unicode(), config=True, + allowed_message_types = List( + trait=Unicode(), + config=True, help="""White list of allowed kernel message types. When the list is empty, all message types are allowed. - """ + """, ) - #------------------------------------------------------------------------- + # ------------------------------------------------------------------------- # Methods for managing kernels and sessions - #------------------------------------------------------------------------- + # ------------------------------------------------------------------------- def _handle_kernel_died(self, kernel_id): """notice that a kernel died""" @@ -167,7 +194,7 @@ def start_kernel(self, kernel_id=None, path=None, **kwargs): """ if kernel_id is None: if path is not None: - kwargs['cwd'] = self.cwd_for_path(path) + kwargs["cwd"] = self.cwd_for_path(path) kernel_id = yield maybe_future( super(MappingKernelManager, self).start_kernel(**kwargs) ) @@ -176,9 +203,8 @@ def start_kernel(self, kernel_id=None, path=None, **kwargs): self.log.info("Kernel started: %s" % kernel_id) self.log.debug("Kernel args: %r" % kwargs) # register callback for failed auto-restart - self.add_restart_callback(kernel_id, - lambda : self._handle_kernel_died(kernel_id), - 'dead', + self.add_restart_callback( + kernel_id, lambda: self._handle_kernel_died(kernel_id), "dead", ) # Increase the metric of number of kernels running @@ -224,15 +250,15 @@ def start_buffering(self, kernel_id, session_key, channels): self.stop_buffering(kernel_id) buffer_info = self._kernel_buffers[kernel_id] # record the session key because only one session can buffer - buffer_info['session_key'] = session_key + buffer_info["session_key"] = session_key # TODO: the buffer should likely be a memory bounded queue, we're starting with a list to keep it simple - buffer_info['buffer'] = [] - buffer_info['channels'] = channels + buffer_info["buffer"] = [] + buffer_info["channels"] = channels # forward any future messages to the internal buffer def buffer_msg(channel, msg_parts): self.log.debug("Buffering msg on %s:%s", kernel_id, channel) - buffer_info['buffer'].append((channel, msg_parts)) + buffer_info["buffer"].append((channel, msg_parts)) for channel, stream in channels.items(): stream.on_recv(partial(buffer_msg, channel)) @@ -254,7 +280,7 @@ def get_buffer(self, kernel_id, session_key): return buffer_info = self._kernel_buffers[kernel_id] - if buffer_info['session_key'] == session_key: + if buffer_info["session_key"] == session_key: # remove buffer self._kernel_buffers.pop(kernel_id) # only return buffer_info if it's a match @@ -277,15 +303,18 @@ def stop_buffering(self, kernel_id): return buffer_info = self._kernel_buffers.pop(kernel_id) # close buffering streams - for stream in buffer_info['channels'].values(): + for stream in buffer_info["channels"].values(): if not stream.closed(): stream.on_recv(None) stream.close() - msg_buffer = buffer_info['buffer'] + msg_buffer = buffer_info["buffer"] if msg_buffer: - self.log.info("Discarding %s buffered messages for %s", - len(msg_buffer), buffer_info['session_key']) + self.log.info( + "Discarding %s buffered messages for %s", + len(msg_buffer), + buffer_info["session_key"], + ) def shutdown_kernel(self, kernel_id, now=False): """Shutdown a kernel by kernel_id""" @@ -320,7 +349,7 @@ def finish(): if not channel.closed(): channel.close() loop.remove_timeout(timeout) - kernel.remove_restart_callback(on_restart_failed, 'dead') + kernel.remove_restart_callback(on_restart_failed, "dead") def on_reply(msg): self.log.debug("Kernel info reply received: %s", kernel_id) @@ -340,7 +369,7 @@ def on_restart_failed(): if not future.done(): future.set_exception(RuntimeError("Restart failed")) - kernel.add_restart_callback(on_restart_failed, 'dead') + kernel.add_restart_callback(on_restart_failed, "dead") kernel.session.send(channel, "kernel_info_request") channel.on_recv(on_reply) loop = IOLoop.current() @@ -366,7 +395,7 @@ def kernel_model(self, kernel_id): kernel = self._kernels[kernel_id] model = { - "id":kernel_id, + "id": kernel_id, "name": kernel.kernel_name, "last_activity": isoformat(kernel.last_activity), "execution_state": kernel.execution_state, @@ -387,7 +416,7 @@ def list_kernels(self): def _check_kernel_id(self, kernel_id): """Check a that a kernel_id exists and raise 404 if not.""" if kernel_id not in self: - raise web.HTTPError(404, u'Kernel does not exist: %s' % kernel_id) + raise web.HTTPError(404, u"Kernel does not exist: %s" % kernel_id) # monitoring activity: @@ -399,13 +428,10 @@ def start_watching_activity(self, kernel_id): """ kernel = self._kernels[kernel_id] # add busy/activity markers: - kernel.execution_state = 'starting' + kernel.execution_state = "starting" kernel.last_activity = utcnow() kernel._activity_stream = kernel.connect_iopub() - session = Session( - config=kernel.session.config, - key=kernel.session.key, - ) + session = Session(config=kernel.session.config, key=kernel.session.key,) def record_activity(msg_list): """Record an IOPub message arriving from a kernel""" @@ -414,10 +440,15 @@ def record_activity(msg_list): idents, fed_msg_list = session.feed_identities(msg_list) msg = session.deserialize(fed_msg_list) - msg_type = msg['header']['msg_type'] - if msg_type == 'status': - kernel.execution_state = msg['content']['execution_state'] - self.log.debug("activity on %s: %s (%s)", kernel_id, msg_type, kernel.execution_state) + msg_type = msg["header"]["msg_type"] + if msg_type == "status": + kernel.execution_state = msg["content"]["execution_state"] + self.log.debug( + "activity on %s: %s (%s)", + kernel_id, + msg_type, + kernel.execution_state, + ) else: self.log.debug("activity on %s: %s", kernel_id, msg_type) @@ -431,14 +462,21 @@ def initialize_culler(self): if not self._initialized_culler and self.cull_idle_timeout > 0: if self._culler_callback is None: loop = IOLoop.current() - if self.cull_interval <= 0: #handle case where user set invalid value - self.log.warning("Invalid value for 'cull_interval' detected (%s) - using default value (%s).", - self.cull_interval, self.cull_interval_default) + if self.cull_interval <= 0: # handle case where user set invalid value + self.log.warning( + "Invalid value for 'cull_interval' detected (%s) - using default value (%s).", + self.cull_interval, + self.cull_interval_default, + ) self.cull_interval = self.cull_interval_default self._culler_callback = PeriodicCallback( - self.cull_kernels, 1000*self.cull_interval) - self.log.info("Culling kernels with idle durations > %s seconds at %s second intervals ...", - self.cull_idle_timeout, self.cull_interval) + self.cull_kernels, 1000 * self.cull_interval + ) + self.log.info( + "Culling kernels with idle durations > %s seconds at %s second intervals ...", + self.cull_idle_timeout, + self.cull_interval, + ) if self.cull_busy: self.log.info("Culling kernels even if busy") if self.cull_connected: @@ -448,30 +486,47 @@ def initialize_culler(self): self._initialized_culler = True def cull_kernels(self): - self.log.debug("Polling every %s seconds for kernels idle > %s seconds...", - self.cull_interval, self.cull_idle_timeout) + self.log.debug( + "Polling every %s seconds for kernels idle > %s seconds...", + self.cull_interval, + self.cull_idle_timeout, + ) """Create a separate list of kernels to avoid conflicting updates while iterating""" for kernel_id in list(self._kernels): try: self.cull_kernel_if_idle(kernel_id) except Exception as e: - self.log.exception("The following exception was encountered while checking the idle duration of kernel %s: %s", - kernel_id, e) + self.log.exception( + "The following exception was encountered while checking the idle duration of kernel %s: %s", + kernel_id, + e, + ) def cull_kernel_if_idle(self, kernel_id): kernel = self._kernels[kernel_id] - self.log.debug("kernel_id=%s, kernel_name=%s, last_activity=%s", kernel_id, kernel.kernel_name, kernel.last_activity) + self.log.debug( + "kernel_id=%s, kernel_name=%s, last_activity=%s", + kernel_id, + kernel.kernel_name, + kernel.last_activity, + ) if kernel.last_activity is not None: dt_now = utcnow() dt_idle = dt_now - kernel.last_activity # Compute idle properties is_idle_time = dt_idle > timedelta(seconds=self.cull_idle_timeout) - is_idle_execute = self.cull_busy or (kernel.execution_state != 'busy') + is_idle_execute = self.cull_busy or (kernel.execution_state != "busy") connections = self._kernel_connections.get(kernel_id, 0) is_idle_connected = self.cull_connected or not connections # Cull the kernel if all three criteria are met - if (is_idle_time and is_idle_execute and is_idle_connected): + if is_idle_time and is_idle_execute and is_idle_connected: idle_duration = int(dt_idle.total_seconds()) - self.log.warning("Culling '%s' kernel '%s' (%s) with %d connections due to %s seconds of inactivity.", - kernel.execution_state, kernel.kernel_name, kernel_id, connections, idle_duration) + self.log.warning( + "Culling '%s' kernel '%s' (%s) with %d connections due to %s seconds of inactivity.", + kernel.execution_state, + kernel.kernel_name, + kernel_id, + connections, + idle_duration, + ) self.shutdown_kernel(kernel_id) diff --git a/jupyter_server/services/kernelspecs/handlers.py b/jupyter_server/services/kernelspecs/handlers.py index 6302e96dfa..ed2a64d2e9 100644 --- a/jupyter_server/services/kernelspecs/handlers.py +++ b/jupyter_server/services/kernelspecs/handlers.py @@ -6,80 +6,78 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from ...utils import maybe_future, url_path_join, url_unescape +from ...base.handlers import APIHandler +from tornado import web, gen import glob import json import os -pjoin = os.path.join - -from tornado import web, gen - -from ...base.handlers import APIHandler -from ...utils import maybe_future, url_path_join, url_unescape +pjoin = os.path.join def kernelspec_model(handler, name, spec_dict, resource_dir): """Load a KernelSpec by name and return the REST API model""" - d = { - 'name': name, - 'spec': spec_dict, - 'resources': {} - } + d = {"name": name, "spec": spec_dict, "resources": {}} # Add resource files if they exist resource_dir = resource_dir - for resource in ['kernel.js', 'kernel.css']: + for resource in ["kernel.js", "kernel.css"]: if os.path.exists(pjoin(resource_dir, resource)): - d['resources'][resource] = url_path_join( - handler.base_url, - 'kernelspecs', - name, - resource + d["resources"][resource] = url_path_join( + handler.base_url, "kernelspecs", name, resource ) - for logo_file in glob.glob(pjoin(resource_dir, 'logo-*')): + for logo_file in glob.glob(pjoin(resource_dir, "logo-*")): fname = os.path.basename(logo_file) no_ext, _ = os.path.splitext(fname) - d['resources'][no_ext] = url_path_join( - handler.base_url, - 'kernelspecs', - name, - fname + d["resources"][no_ext] = url_path_join( + handler.base_url, "kernelspecs", name, fname ) return d def is_kernelspec_model(spec_dict): """Returns True if spec_dict is already in proper form. This will occur when using a gateway.""" - return isinstance(spec_dict, dict) and 'name' in spec_dict and 'spec' in spec_dict and 'resources' in spec_dict + return ( + isinstance(spec_dict, dict) + and "name" in spec_dict + and "spec" in spec_dict + and "resources" in spec_dict + ) class MainKernelSpecHandler(APIHandler): - @web.authenticated @gen.coroutine def get(self): ksm = self.kernel_spec_manager km = self.kernel_manager model = {} - model['default'] = km.default_kernel_name - model['kernelspecs'] = specs = {} + model["default"] = km.default_kernel_name + model["kernelspecs"] = specs = {} kspecs = yield maybe_future(ksm.get_all_specs()) for kernel_name, kernel_info in kspecs.items(): try: if is_kernelspec_model(kernel_info): d = kernel_info else: - d = kernelspec_model(self, kernel_name, kernel_info['spec'], kernel_info['resource_dir']) + d = kernelspec_model( + self, + kernel_name, + kernel_info["spec"], + kernel_info["resource_dir"], + ) except Exception: - self.log.error("Failed to load kernel spec: '%s'", kernel_name, exc_info=True) + self.log.error( + "Failed to load kernel spec: '%s'", kernel_name, exc_info=True + ) continue specs[kernel_name] = d - self.set_header("Content-Type", 'application/json') + self.set_header("Content-Type", "application/json") self.finish(json.dumps(model)) class KernelSpecHandler(APIHandler): - @web.authenticated @gen.coroutine def get(self, kernel_name): @@ -88,12 +86,14 @@ def get(self, kernel_name): try: spec = yield maybe_future(ksm.get_kernel_spec(kernel_name)) except KeyError: - raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name) + raise web.HTTPError(404, u"Kernel spec %s not found" % kernel_name) if is_kernelspec_model(spec): model = spec else: - model = kernelspec_model(self, kernel_name, spec.to_dict(), spec.resource_dir) - self.set_header("Content-Type", 'application/json') + model = kernelspec_model( + self, kernel_name, spec.to_dict(), spec.resource_dir + ) + self.set_header("Content-Type", "application/json") self.finish(json.dumps(model)) diff --git a/jupyter_server/services/nbconvert/handlers.py b/jupyter_server/services/nbconvert/handlers.py index 63e731238f..87994e17a2 100644 --- a/jupyter_server/services/nbconvert/handlers.py +++ b/jupyter_server/services/nbconvert/handlers.py @@ -6,7 +6,6 @@ class NbconvertRootHandler(APIHandler): - @web.authenticated def get(self): try: @@ -33,6 +32,7 @@ def get(self): self.finish(json.dumps(res)) + default_handlers = [ (r"/api/nbconvert", NbconvertRootHandler), ] diff --git a/jupyter_server/services/security/handlers.py b/jupyter_server/services/security/handlers.py index 82a00d234b..21e3a43031 100644 --- a/jupyter_server/services/security/handlers.py +++ b/jupyter_server/services/security/handlers.py @@ -8,8 +8,9 @@ from ...base.handlers import APIHandler from . import csp_report_uri + class CSPReportHandler(APIHandler): - '''Accepts a content security policy violation report''' + """Accepts a content security policy violation report""" _track_activity = False @@ -23,10 +24,11 @@ def check_xsrf_cookie(self): @web.authenticated def post(self): - '''Log a content security policy violation report''' - self.log.warning("Content security violation: %s", - self.request.body.decode('utf8', 'replace')) + """Log a content security policy violation report""" + self.log.warning( + "Content security violation: %s", + self.request.body.decode("utf8", "replace"), + ) + -default_handlers = [ - (csp_report_uri, CSPReportHandler) -] +default_handlers = [(csp_report_uri, CSPReportHandler)] diff --git a/jupyter_server/services/sessions/handlers.py b/jupyter_server/services/sessions/handlers.py index 78072c5771..e100b0b87a 100644 --- a/jupyter_server/services/sessions/handlers.py +++ b/jupyter_server/services/sessions/handlers.py @@ -17,7 +17,6 @@ class SessionRootHandler(APIHandler): - @web.authenticated @gen.coroutine def get(self): @@ -30,32 +29,32 @@ def get(self): @gen.coroutine def post(self): # Creates a new session - #(unless a session already exists for the named session) + # (unless a session already exists for the named session) sm = self.session_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, "No JSON data provided") - if 'notebook' in model and 'path' in model['notebook']: - self.log.warning('Sessions API changed, see updated swagger docs') - model['path'] = model['notebook']['path'] - model['type'] = 'notebook' + if "notebook" in model and "path" in model["notebook"]: + self.log.warning("Sessions API changed, see updated swagger docs") + model["path"] = model["notebook"]["path"] + model["type"] = "notebook" try: - path = model['path'] + path = model["path"] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: path") try: - mtype = model['type'] + mtype = model["type"] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: type") - name = model.get('name', None) - kernel = model.get('kernel', {}) - kernel_name = kernel.get('name', None) - kernel_id = kernel.get('id', None) + name = model.get("name", None) + kernel = model.get("kernel", {}) + kernel_name = kernel.get("name", None) + kernel_id = kernel.get("id", None) if not kernel_id and not kernel_name: self.log.debug("No kernel specified, using default kernel") @@ -67,26 +66,32 @@ def post(self): else: try: model = yield maybe_future( - sm.create_session(path=path, kernel_name=kernel_name, - kernel_id=kernel_id, name=name, - type=mtype)) + sm.create_session( + path=path, + kernel_name=kernel_name, + kernel_id=kernel_id, + name=name, + type=mtype, + ) + ) except NoSuchKernel: - msg = ("The '%s' kernel is not available. Please pick another " - "suitable kernel instead, or install that kernel." % kernel_name) - status_msg = '%s not found' % kernel_name - self.log.warning('Kernel not found: %s' % kernel_name) + msg = ( + "The '%s' kernel is not available. Please pick another " + "suitable kernel instead, or install that kernel." % kernel_name + ) + status_msg = "%s not found" % kernel_name + self.log.warning("Kernel not found: %s" % kernel_name) self.set_status(501) self.finish(json.dumps(dict(message=msg, short_message=status_msg))) return - location = url_path_join(self.base_url, 'api', 'sessions', model['id']) - self.set_header('Location', location) + location = url_path_join(self.base_url, "api", "sessions", model["id"]) + self.set_header("Location", location) self.set_status(201) self.finish(json.dumps(model, default=date_default)) class SessionHandler(APIHandler): - @web.authenticated @gen.coroutine def get(self, session_id): @@ -113,39 +118,41 @@ def patch(self, session_id): before = yield maybe_future(sm.get_session(session_id=session_id)) changes = {} - if 'notebook' in model and 'path' in model['notebook']: - self.log.warning('Sessions API changed, see updated swagger docs') - model['path'] = model['notebook']['path'] - model['type'] = 'notebook' - if 'path' in model: - changes['path'] = model['path'] - if 'name' in model: - changes['name'] = model['name'] - if 'type' in model: - changes['type'] = model['type'] - if 'kernel' in model: + if "notebook" in model and "path" in model["notebook"]: + self.log.warning("Sessions API changed, see updated swagger docs") + model["path"] = model["notebook"]["path"] + model["type"] = "notebook" + if "path" in model: + changes["path"] = model["path"] + if "name" in model: + changes["name"] = model["name"] + if "type" in model: + changes["type"] = model["type"] + if "kernel" in model: # Kernel id takes precedence over name. - if model['kernel'].get('id') is not None: - kernel_id = model['kernel']['id'] + if model["kernel"].get("id") is not None: + kernel_id = model["kernel"]["id"] if kernel_id not in km: raise web.HTTPError(400, "No such kernel: %s" % kernel_id) - changes['kernel_id'] = kernel_id - elif model['kernel'].get('name') is not None: - kernel_name = model['kernel']['name'] + changes["kernel_id"] = kernel_id + elif model["kernel"].get("name") is not None: + kernel_name = model["kernel"]["name"] kernel_id = yield sm.start_kernel_for_session( - session_id, kernel_name=kernel_name, name=before['name'], - path=before['path'], type=before['type']) - changes['kernel_id'] = kernel_id + session_id, + kernel_name=kernel_name, + name=before["name"], + path=before["path"], + type=before["type"], + ) + changes["kernel_id"] = kernel_id yield maybe_future(sm.update_session(session_id, **changes)) model = yield maybe_future(sm.get_session(session_id=session_id)) - if model['kernel']['id'] != before['kernel']['id']: + if model["kernel"]["id"] != before["kernel"]["id"]: # kernel_id changed because we got a new kernel # shutdown the old one - yield maybe_future( - km.shutdown_kernel(before['kernel']['id']) - ) + yield maybe_future(km.shutdown_kernel(before["kernel"]["id"])) self.finish(json.dumps(model, default=date_default)) @web.authenticated @@ -162,14 +169,13 @@ def delete(self, session_id): self.finish() -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # URL to handler mappings -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- _session_id_regex = r"(?P\w+-\w+-\w+-\w+-\w+)" default_handlers = [ (r"/api/sessions/%s" % _session_id_regex, SessionHandler), - (r"/api/sessions", SessionRootHandler) + (r"/api/sessions", SessionRootHandler), ] - diff --git a/jupyter_server/services/sessions/sessionmanager.py b/jupyter_server/services/sessions/sessionmanager.py index 50012bc2a1..1dfba2ee0c 100644 --- a/jupyter_server/services/sessions/sessionmanager.py +++ b/jupyter_server/services/sessions/sessionmanager.py @@ -22,31 +22,37 @@ class SessionManager(LoggingConfigurable): - kernel_manager = Instance('jupyter_server.services.kernels.kernelmanager.MappingKernelManager') - contents_manager = Instance('jupyter_server.services.contents.manager.ContentsManager') - + kernel_manager = Instance( + "jupyter_server.services.kernels.kernelmanager.MappingKernelManager" + ) + contents_manager = Instance( + "jupyter_server.services.contents.manager.ContentsManager" + ) + # Session database initialized below _cursor = None _connection = None - _columns = {'session_id', 'path', 'name', 'type', 'kernel_id'} - + _columns = {"session_id", "path", "name", "type", "kernel_id"} + @property def cursor(self): """Start a cursor and create a database called 'session'""" if self._cursor is None: self._cursor = self.connection.cursor() - self._cursor.execute("""CREATE TABLE session - (session_id, path, name, type, kernel_id)""") + self._cursor.execute( + """CREATE TABLE session + (session_id, path, name, type, kernel_id)""" + ) return self._cursor @property def connection(self): """Start a database connection""" if self._connection is None: - self._connection = sqlite3.connect(':memory:') + self._connection = sqlite3.connect(":memory:") self._connection.row_factory = sqlite3.Row return self._connection - + def close(self): """Close the sqlite connection""" if self._cursor is not None: @@ -79,15 +85,21 @@ def new_session_id(self): return unicode_type(uuid.uuid4()) @gen.coroutine - def create_session(self, path=None, name=None, type=None, kernel_name=None, kernel_id=None): + def create_session( + self, path=None, name=None, type=None, kernel_name=None, kernel_id=None + ): """Creates a session and returns its model""" session_id = self.new_session_id() if kernel_id is not None and kernel_id in self.kernel_manager: pass else: - kernel_id = yield self.start_kernel_for_session(session_id, path, name, type, kernel_name) + kernel_id = yield self.start_kernel_for_session( + session_id, path, name, type, kernel_name + ) result = yield maybe_future( - self.save_session(session_id, path=path, name=name, type=type, kernel_id=kernel_id) + self.save_session( + session_id, path=path, name=name, type=type, kernel_id=kernel_id + ) ) # py2-compat raise gen.Return(result) @@ -106,11 +118,11 @@ def start_kernel_for_session(self, session_id, path, name, type, kernel_name): @gen.coroutine def save_session(self, session_id, path=None, name=None, type=None, kernel_id=None): """Saves the items for the session with the given session_id - + Given a session_id (and any other of the arguments), this method creates a row in the sqlite session database that holds the information for a session. - + Parameters ---------- session_id : str @@ -123,14 +135,15 @@ def save_session(self, session_id, path=None, name=None, type=None, kernel_id=No the type of the session kernel_id : str a uuid for the kernel associated with this session - + Returns ------- model : dict a dictionary of the session model """ - self.cursor.execute("INSERT INTO session VALUES (?,?,?,?,?)", - (session_id, path, name, type, kernel_id) + self.cursor.execute( + "INSERT INTO session VALUES (?,?,?,?,?)", + (session_id, path, name, type, kernel_id), ) result = yield maybe_future(self.get_session(session_id=session_id)) raise gen.Return(result) @@ -138,7 +151,7 @@ def save_session(self, session_id, path=None, name=None, type=None, kernel_id=No @gen.coroutine def get_session(self, **kwargs): """Returns the model for a particular session. - + Takes a keyword argument and searches for the value in the session database, then returns the rest of the session's info. @@ -163,7 +176,7 @@ def get_session(self, **kwargs): raise TypeError("No such column: %r", column) conditions.append("%s=?" % column) - query = "SELECT * FROM session WHERE %s" % (' AND '.join(conditions)) + query = "SELECT * FROM session WHERE %s" % (" AND ".join(conditions)) self.cursor.execute(query, list(kwargs.values())) try: @@ -177,7 +190,7 @@ def get_session(self, **kwargs): for key, value in kwargs.items(): q.append("%s=%r" % (key, value)) - raise web.HTTPError(404, u'Session not found: %s' % (', '.join(q))) + raise web.HTTPError(404, u"Session not found: %s" % (", ".join(q))) model = yield maybe_future(self.row_to_model(row)) raise gen.Return(model) @@ -185,10 +198,10 @@ def get_session(self, **kwargs): @gen.coroutine def update_session(self, session_id, **kwargs): """Updates the values in the session database. - + Changes the values of the session with the given session_id with the values from the keyword arguments. - + Parameters ---------- session_id : str @@ -209,7 +222,7 @@ def update_session(self, session_id, **kwargs): if column not in self._columns: raise TypeError("No such column: %r" % column) sets.append("%s=?" % column) - query = "UPDATE session SET %s WHERE session_id=?" % (', '.join(sets)) + query = "UPDATE session SET %s WHERE session_id=?" % (", ".join(sets)) self.cursor.execute(query, list(kwargs.values()) + [session_id]) def kernel_culled(self, kernel_id): @@ -219,7 +232,7 @@ def kernel_culled(self, kernel_id): @gen.coroutine def row_to_model(self, row, tolerate_culled=False): """Takes sqlite database session row and turns it into a dictionary""" - kernel_culled = yield maybe_future(self.kernel_culled(row['kernel_id'])) + kernel_culled = yield maybe_future(self.kernel_culled(row["kernel_id"])) if kernel_culled: # The kernel was culled or died without deleting the session. # We can't use delete_session here because that tries to find @@ -228,27 +241,33 @@ def row_to_model(self, row, tolerate_culled=False): # If caller wishes to tolerate culled kernels, log a warning # and return None. Otherwise, raise KeyError with a similar # message. - self.cursor.execute("DELETE FROM session WHERE session_id=?", - (row['session_id'],)) - msg = "Kernel '{kernel_id}' appears to have been culled or died unexpectedly, " \ - "invalidating session '{session_id}'. The session has been removed.".\ - format(kernel_id=row['kernel_id'],session_id=row['session_id']) + self.cursor.execute( + "DELETE FROM session WHERE session_id=?", (row["session_id"],) + ) + msg = ( + "Kernel '{kernel_id}' appears to have been culled or died unexpectedly, " + "invalidating session '{session_id}'. The session has been removed.".format( + kernel_id=row["kernel_id"], session_id=row["session_id"] + ) + ) if tolerate_culled: self.log.warning(msg + " Continuing...") raise gen.Return(None) raise KeyError(msg) - kernel_model = yield maybe_future(self.kernel_manager.kernel_model(row['kernel_id'])) + kernel_model = yield maybe_future( + self.kernel_manager.kernel_model(row["kernel_id"]) + ) model = { - 'id': row['session_id'], - 'path': row['path'], - 'name': row['name'], - 'type': row['type'], - 'kernel': kernel_model + "id": row["session_id"], + "path": row["path"], + "name": row["name"], + "type": row["type"], + "kernel": kernel_model, } - if row['type'] == 'notebook': + if row["type"] == "notebook": # Provide the deprecated API. - model['notebook'] = {'path': row['path'], 'name': row['name']} + model["notebook"] = {"path": row["path"], "name": row["name"]} raise gen.Return(model) @gen.coroutine @@ -271,5 +290,5 @@ def list_sessions(self): def delete_session(self, session_id): """Deletes the row in the session database with given session_id""" session = yield maybe_future(self.get_session(session_id=session_id)) - yield maybe_future(self.kernel_manager.shutdown_kernel(session['kernel']['id'])) + yield maybe_future(self.kernel_manager.shutdown_kernel(session["kernel"]["id"])) self.cursor.execute("DELETE FROM session WHERE session_id=?", (session_id,)) diff --git a/jupyter_server/terminal/__init__.py b/jupyter_server/terminal/__init__.py index c20001c894..57a4bc9fbe 100644 --- a/jupyter_server/terminal/__init__.py +++ b/jupyter_server/terminal/__init__.py @@ -1,41 +1,43 @@ +from . import api_handlers +from .handlers import TerminalHandler, TermSocket +from jupyter_server.utils import url_path_join as ujoin +from tornado.log import app_log +from terminado import NamedTermManager +from ipython_genutils.py3compat import which import os import terminado from ..utils import check_version -if not check_version(terminado.__version__, '0.8.1'): +if not check_version(terminado.__version__, "0.8.1"): raise ImportError("terminado >= 0.8.1 required, found %s" % terminado.__version__) -from ipython_genutils.py3compat import which -from terminado import NamedTermManager -from tornado.log import app_log -from jupyter_server.utils import url_path_join as ujoin -from .handlers import TerminalHandler, TermSocket -from . import api_handlers def initialize(webapp, root_dir, connection_url, settings): - if os.name == 'nt': - default_shell = 'powershell.exe' + if os.name == "nt": + default_shell = "powershell.exe" else: - default_shell = which('sh') - shell = settings.get('shell_command', - [os.environ.get('SHELL') or default_shell] - ) + default_shell = which("sh") + shell = settings.get("shell_command", [os.environ.get("SHELL") or default_shell]) # Enable login mode - to automatically source the /etc/profile script - if os.name != 'nt': - shell.append('-l') - terminal_manager = webapp.settings['terminal_manager'] = NamedTermManager( + if os.name != "nt": + shell.append("-l") + terminal_manager = webapp.settings["terminal_manager"] = NamedTermManager( shell_command=shell, - extra_env={'JUPYTER_SERVER_ROOT': root_dir, - 'JUPYTER_SERVER_URL': connection_url, - }, + extra_env={ + "JUPYTER_SERVER_ROOT": root_dir, + "JUPYTER_SERVER_URL": connection_url, + }, ) terminal_manager.log = app_log - base_url = webapp.settings['base_url'] + base_url = webapp.settings["base_url"] handlers = [ (ujoin(base_url, r"/terminals/(\w+)"), TerminalHandler), - (ujoin(base_url, r"/terminals/websocket/(\w+)"), TermSocket, - {'term_manager': terminal_manager}), + ( + ujoin(base_url, r"/terminals/websocket/(\w+)"), + TermSocket, + {"term_manager": terminal_manager}, + ), (ujoin(base_url, r"/api/terminals"), api_handlers.TerminalRootHandler), (ujoin(base_url, r"/api/terminals/(\w+)"), api_handlers.TerminalHandler), ] diff --git a/jupyter_server/terminal/api_handlers.py b/jupyter_server/terminal/api_handlers.py index d64e1acb3f..5f98903893 100644 --- a/jupyter_server/terminal/api_handlers.py +++ b/jupyter_server/terminal/api_handlers.py @@ -4,38 +4,34 @@ from ..prometheus.metrics import TERMINAL_CURRENTLY_RUNNING_TOTAL - class TerminalRootHandler(APIHandler): - @web.authenticated def get(self): tm = self.terminal_manager - terms = [{'name': name} for name in tm.terminals] + terms = [{"name": name} for name in tm.terminals] self.finish(json.dumps(terms)) # Update the metric below to the length of the list 'terms' - TERMINAL_CURRENTLY_RUNNING_TOTAL.set( - len(terms) - ) + TERMINAL_CURRENTLY_RUNNING_TOTAL.set(len(terms)) @web.authenticated def post(self): """POST /terminals creates a new terminal and redirects to it""" name, _ = self.terminal_manager.new_named_terminal() - self.finish(json.dumps({'name': name})) + self.finish(json.dumps({"name": name})) # Increase the metric by one because a new terminal was created TERMINAL_CURRENTLY_RUNNING_TOTAL.inc() class TerminalHandler(APIHandler): - SUPPORTED_METHODS = ('GET', 'DELETE') + SUPPORTED_METHODS = ("GET", "DELETE") @web.authenticated def get(self, name): tm = self.terminal_manager if name in tm.terminals: - self.finish(json.dumps({'name': name})) + self.finish(json.dumps({"name": name})) else: raise web.HTTPError(404, "Terminal not found: %r" % name) diff --git a/jupyter_server/terminal/handlers.py b/jupyter_server/terminal/handlers.py index 231ede1f39..fe7e3fffdd 100644 --- a/jupyter_server/terminal/handlers.py +++ b/jupyter_server/terminal/handlers.py @@ -1,4 +1,4 @@ -#encoding: utf-8 +# encoding: utf-8 """Tornado handlers for the terminal emulator.""" # Copyright (c) Jupyter Development Team. @@ -13,14 +13,17 @@ class TerminalHandler(JupyterHandler): """Render the terminal interface.""" + @web.authenticated def get(self, term_name): - self.write(self.render_template('terminal.html', - ws_path="terminals/websocket/%s" % term_name)) + self.write( + self.render_template( + "terminal.html", ws_path="terminals/websocket/%s" % term_name + ) + ) class TermSocket(WebSocketMixin, JupyterHandler, terminado.TermSocket): - def origin_check(self): """Terminado adds redundant origin_check @@ -35,8 +38,8 @@ def get(self, *args, **kwargs): def on_message(self, message): super(TermSocket, self).on_message(message) - self.application.settings['terminal_last_activity'] = utcnow() + self.application.settings["terminal_last_activity"] = utcnow() def write_message(self, message, binary=False): super(TermSocket, self).write_message(message, binary=binary) - self.application.settings['terminal_last_activity'] = utcnow() + self.application.settings["terminal_last_activity"] = utcnow() diff --git a/jupyter_server/transutils.py b/jupyter_server/transutils.py index eed860aa33..bbcc29b707 100644 --- a/jupyter_server/transutils.py +++ b/jupyter_server/transutils.py @@ -8,6 +8,8 @@ # Set up message catalog access -base_dir = os.path.realpath(os.path.join(__file__, '..', '..')) -trans = gettext.translation('notebook', localedir=os.path.join(base_dir, 'notebook/i18n'), fallback=True) +base_dir = os.path.realpath(os.path.join(__file__, "..", "..")) +trans = gettext.translation( + "notebook", localedir=os.path.join(base_dir, "notebook/i18n"), fallback=True +) _ = trans.gettext diff --git a/jupyter_server/utils.py b/jupyter_server/utils.py index 24c0577ec2..8a74c50556 100644 --- a/jupyter_server/utils.py +++ b/jupyter_server/utils.py @@ -29,7 +29,7 @@ # UF_HIDDEN is a stat flag not defined in the stat module. # It is used by BSD to indicate hidden files. -UF_HIDDEN = getattr(stat, 'UF_HIDDEN', 32768) +UF_HIDDEN = getattr(stat, "UF_HIDDEN", 32768) def exists(path): @@ -49,51 +49,61 @@ def url_path_join(*pieces): Use to prevent double slash when joining subpath. This will leave the initial and final / in place """ - initial = pieces[0].startswith('/') - final = pieces[-1].endswith('/') - stripped = [s.strip('/') for s in pieces] - result = '/'.join(s for s in stripped if s) - if initial: result = '/' + result - if final: result = result + '/' - if result == '//': result = '/' + initial = pieces[0].startswith("/") + final = pieces[-1].endswith("/") + stripped = [s.strip("/") for s in pieces] + result = "/".join(s for s in stripped if s) + if initial: + result = "/" + result + if final: + result = result + "/" + if result == "//": + result = "/" return result + def url_is_absolute(url): """Determine whether a given URL is absolute""" return urlparse(url).path.startswith("/") + def path2url(path): """Convert a local file path to a URL""" - pieces = [ quote(p) for p in path.split(os.sep) ] + pieces = [quote(p) for p in path.split(os.sep)] # preserve trailing / - if pieces[-1] == '': - pieces[-1] = '/' + if pieces[-1] == "": + pieces[-1] = "/" url = url_path_join(*pieces) return url + def url2path(url): """Convert a URL to a local file path""" - pieces = [ unquote(p) for p in url.split('/') ] + pieces = [unquote(p) for p in url.split("/")] path = os.path.join(*pieces) return path + def url_escape(path): """Escape special characters in a URL path Turns '/foo bar/' into '/foo%20bar/' """ - parts = py3compat.unicode_to_str(path, encoding='utf8').split('/') - return u'/'.join([quote(p) for p in parts]) + parts = py3compat.unicode_to_str(path, encoding="utf8").split("/") + return u"/".join([quote(p) for p in parts]) + def url_unescape(path): """Unescape special characters in a URL path Turns '/foo%20bar/' into '/foo bar/' """ - return u'/'.join([ - py3compat.str_to_unicode(unquote(p), encoding='utf8') - for p in py3compat.unicode_to_str(path, encoding='utf8').split('/') - ]) + return u"/".join( + [ + py3compat.str_to_unicode(unquote(p), encoding="utf8") + for p in py3compat.unicode_to_str(path, encoding="utf8").split("/") + ] + ) def is_file_hidden_win(abs_path, stat_res=None): @@ -112,7 +122,7 @@ def is_file_hidden_win(abs_path, stat_res=None): Ignored on Windows, exists for compatibility with POSIX version of the function. """ - if os.path.basename(abs_path).startswith('.'): + if os.path.basename(abs_path).startswith("."): return True win32_FILE_ATTRIBUTE_HIDDEN = 0x02 @@ -128,6 +138,7 @@ def is_file_hidden_win(abs_path, stat_res=None): return False + def is_file_hidden_posix(abs_path, stat_res=None): """Is a file hidden? @@ -144,7 +155,7 @@ def is_file_hidden_posix(abs_path, stat_res=None): The result of calling stat() on abs_path. If not passed, this function will call stat() internally. """ - if os.path.basename(abs_path).startswith('.'): + if os.path.basename(abs_path).startswith("."): return True if stat_res is None or stat.S_ISLNK(stat_res.st_mode): @@ -162,17 +173,19 @@ def is_file_hidden_posix(abs_path, stat_res=None): return True # check UF_HIDDEN - if getattr(stat_res, 'st_flags', 0) & UF_HIDDEN: + if getattr(stat_res, "st_flags", 0) & UF_HIDDEN: return True return False -if sys.platform == 'win32': + +if sys.platform == "win32": is_file_hidden = is_file_hidden_win else: is_file_hidden = is_file_hidden_posix -def is_hidden(abs_path, abs_root=''): + +def is_hidden(abs_path, abs_root=""): """Is a file hidden or contained in a hidden directory? This will start with the rightmost path element and work backwards to the @@ -200,8 +213,8 @@ def is_hidden(abs_path, abs_root=''): if not abs_root: abs_root = abs_path.split(os.sep, 1)[0] + os.sep - inside_root = abs_path[len(abs_root):] - if any(part.startswith('.') for part in inside_root.split(os.sep)): + inside_root = abs_path[len(abs_root) :] + if any(part.startswith(".") for part in inside_root.split(os.sep)): return True # check UF_HIDDEN on any location up to root. @@ -216,13 +229,16 @@ def is_hidden(abs_path, abs_root=''): st = os.lstat(path) except OSError: return True - if getattr(st, 'st_flags', 0) & UF_HIDDEN: + if getattr(st, "st_flags", 0) & UF_HIDDEN: return True path = os.path.dirname(path) return False + # TODO: Move to jupyter_core + + def win32_restrict_file_to_user(fname): """Secure a windows file to read-only access for the user. Follows guidance from win32 library creator: @@ -249,12 +265,15 @@ def win32_restrict_file_to_user(fname): dacl = win32security.ACL() # dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_ALL_ACCESS, everyone) - dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_GENERIC_READ | con.FILE_GENERIC_WRITE, user) + dacl.AddAccessAllowedAce( + win32security.ACL_REVISION, con.FILE_GENERIC_READ | con.FILE_GENERIC_WRITE, user + ) dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_ALL_ACCESS, admins) sd.SetSecurityDescriptorDacl(1, dacl, 0) win32security.SetFileSecurity(fname, win32security.DACL_SECURITY_INFORMATION, sd) + # TODO: Move to jupyter_core @contextmanager def secure_write(fname, binary=False): @@ -268,7 +287,7 @@ def secure_write(fname, binary=False): fname : unicode The path to the file to write """ - mode = 'wb' if binary else 'w' + mode = "wb" if binary else "w" open_flag = os.O_CREAT | os.O_WRONLY | os.O_TRUNC try: os.remove(fname) @@ -276,7 +295,7 @@ def secure_write(fname, binary=False): # Skip any issues with the file not existing pass - if os.name == 'nt': + if os.name == "nt": # Python on windows does not respect the group and public bits for chmod, so we need # to take additional steps to secure the contents. # Touch file pre-emptively to avoid editing permissions in open files in Windows @@ -286,11 +305,14 @@ def secure_write(fname, binary=False): win32_restrict_file_to_user(fname) with os.fdopen(os.open(fname, open_flag, 0o600), mode) as f: - if os.name != 'nt': + if os.name != "nt": # Enforce that the file got the requested permissions before writing - assert '0600' == oct(stat.S_IMODE(os.stat(fname).st_mode)).replace('0o', '0') + assert "0600" == oct(stat.S_IMODE(os.stat(fname).st_mode)).replace( + "0o", "0" + ) yield f + def samefile_simple(path, other_path): """ Fill in for os.path.samefile when it is unavailable (Windows+py2). @@ -313,32 +335,32 @@ def samefile_simple(path, other_path): """ path_stat = os.stat(path) other_path_stat = os.stat(other_path) - return (path.lower() == other_path.lower() - and path_stat == other_path_stat) + return path.lower() == other_path.lower() and path_stat == other_path_stat -def to_os_path(path, root=''): +def to_os_path(path, root=""): """Convert an API path to a filesystem path If given, root will be prepended to the path. root must be a filesystem path already. """ - parts = path.strip('/').split('/') - parts = [p for p in parts if p != ''] # remove duplicate splits + parts = path.strip("/").split("/") + parts = [p for p in parts if p != ""] # remove duplicate splits path = os.path.join(root, *parts) return path -def to_api_path(os_path, root=''): + +def to_api_path(os_path, root=""): """Convert a filesystem path to an API path If given, root will be removed from the path. root must be a filesystem path already. """ if os_path.startswith(root): - os_path = os_path[len(root):] + os_path = os_path[len(root) :] parts = os_path.strip(os.path.sep).split(os.path.sep) - parts = [p for p in parts if p != ''] # remove duplicate splits - path = '/'.join(parts) + parts = [p for p in parts if p != ""] # remove duplicate splits + path = "/".join(parts) return path @@ -357,11 +379,14 @@ def check_version(v, check): # Copy of IPython.utils.process.check_pid: + def _check_pid_win32(pid): import ctypes + # OpenProcess returns 0 if no such process (of ours) exists # positive int otherwise - return bool(ctypes.windll.kernel32.OpenProcess(1,0,pid)) + return bool(ctypes.windll.kernel32.OpenProcess(1, 0, pid)) + def _check_pid_posix(pid): """Copy of IPython.utils.process.check_pid""" @@ -377,7 +402,8 @@ def _check_pid_posix(pid): else: return True -if sys.platform == 'win32': + +if sys.platform == "win32": check_pid = _check_pid_win32 else: check_pid = _check_pid_posix @@ -397,4 +423,3 @@ def maybe_future(obj): f = asyncio.Future() f.set_result(obj) return f - diff --git a/jupyter_server/view/handlers.py b/jupyter_server/view/handlers.py index 5663d4db3a..39c992bf54 100644 --- a/jupyter_server/view/handlers.py +++ b/jupyter_server/view/handlers.py @@ -1,4 +1,4 @@ -#encoding: utf-8 +# encoding: utf-8 """Tornado handlers for viewing HTML files.""" # Copyright (c) Jupyter Development Team. @@ -11,18 +11,20 @@ class ViewHandler(JupyterHandler): """Render HTML files within an iframe.""" + @web.authenticated def get(self, path): - path = path.strip('/') + path = path.strip("/") if not self.contents_manager.file_exists(path): - raise web.HTTPError(404, u'File does not exist: %s' % path) + raise web.HTTPError(404, u"File does not exist: %s" % path) - basename = path.rsplit('/', 1)[-1] - file_url = url_path_join(self.base_url, 'files', url_escape(path)) + basename = path.rsplit("/", 1)[-1] + file_url = url_path_join(self.base_url, "files", url_escape(path)) self.write( - self.render_template('view.html', file_url=file_url, page_title=basename) + self.render_template("view.html", file_url=file_url, page_title=basename) ) + default_handlers = [ (r"/view%s" % path_regex, ViewHandler), ] diff --git a/tests/auth/test_security.py b/tests/auth/test_security.py index 85a4ead118..c083f05611 100644 --- a/tests/auth/test_security.py +++ b/tests/auth/test_security.py @@ -4,26 +4,26 @@ def test_passwd_structure(): - p = passwd('passphrase') - algorithm, salt, hashed = p.split(':') - assert algorithm == 'sha1' + p = passwd("passphrase") + algorithm, salt, hashed = p.split(":") + assert algorithm == "sha1" assert len(salt) == salt_len assert len(hashed) == 40 def test_roundtrip(): - p = passwd('passphrase') - assert passwd_check(p, 'passphrase') + p = passwd("passphrase") + assert passwd_check(p, "passphrase") def test_bad(): - p = passwd('passphrase') + p = passwd("passphrase") assert not passwd_check(p, p) - assert not passwd_check(p, 'a:b:c:d') - assert not passwd_check(p, 'a:b') + assert not passwd_check(p, "a:b:c:d") + assert not passwd_check(p, "a:b") def test_passwd_check_unicode(): # GH issue #4524 - phash = u'sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f' - assert passwd_check(phash, u"łe¶ŧ←↓→") \ No newline at end of file + phash = u"sha1:23862bc21dd3:7a415a95ae4580582e314072143d9c382c491e4f" + assert passwd_check(phash, u"łe¶ŧ←↓→") diff --git a/tests/conftest.py b/tests/conftest.py index 225fc8907b..22b378aa52 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,13 +17,13 @@ from jupyter_server.utils import url_path_join -pytest_plugins = ("pytest_tornasync") +pytest_plugins = "pytest_tornasync" # NOTE: This is a temporary fix for Windows 3.8 -# We have to override the io_loop fixture with an -# asyncio patch. This will probably be removed in -# the future. +# We have to override the io_loop fixture with an +# asyncio patch. This will probably be removed in +# the future. if sys.platform.startswith("win") and sys.version_info >= (3, 8): @pytest.fixture @@ -55,31 +55,40 @@ def expected_http_error(error, expected_code, expected_message=None): if expected_message is not None and expected_message != str(e): return False return True - elif any([ - isinstance(e, tornado.httpclient.HTTPClientError), - isinstance(e, tornado.httpclient.HTTPError) - ]): + elif any( + [ + isinstance(e, tornado.httpclient.HTTPClientError), + isinstance(e, tornado.httpclient.HTTPError), + ] + ): if expected_code != e.code: return False if expected_message: - message = json.loads(e.response.body.decode())['message'] + message = json.loads(e.response.body.decode())["message"] if expected_message != message: return False return True config = pytest.fixture(lambda: {}) -home_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'home')) -data_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'data')) -config_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'config')) -runtime_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'runtime')) -root_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'root_dir')) -system_jupyter_path = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'share', 'jupyter')) -env_jupyter_path = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'env', 'share', 'jupyter')) -system_config_path = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'etc', 'jupyter')) -env_config_path = pytest.fixture(lambda tmp_path: mkdir(tmp_path, 'env', 'etc', 'jupyter')) +home_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "home")) +data_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "data")) +config_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "config")) +runtime_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "runtime")) +root_dir = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "root_dir")) +system_jupyter_path = pytest.fixture( + lambda tmp_path: mkdir(tmp_path, "share", "jupyter") +) +env_jupyter_path = pytest.fixture( + lambda tmp_path: mkdir(tmp_path, "env", "share", "jupyter") +) +system_config_path = pytest.fixture(lambda tmp_path: mkdir(tmp_path, "etc", "jupyter")) +env_config_path = pytest.fixture( + lambda tmp_path: mkdir(tmp_path, "env", "etc", "jupyter") +) argv = pytest.fixture(lambda: []) + @pytest.fixture def environ( monkeypatch, @@ -92,48 +101,45 @@ def environ( system_jupyter_path, system_config_path, env_jupyter_path, - env_config_path - ): - monkeypatch.setenv('HOME', str(home_dir)) - monkeypatch.setenv('PYTHONPATH', os.pathsep.join(sys.path)) - monkeypatch.setenv('JUPYTER_NO_CONFIG', '1') - monkeypatch.setenv('JUPYTER_CONFIG_DIR', str(config_dir)) - monkeypatch.setenv('JUPYTER_DATA_DIR', str(data_dir)) - monkeypatch.setenv('JUPYTER_RUNTIME_DIR', str(runtime_dir)) - monkeypatch.setattr(jupyter_core.paths, 'SYSTEM_JUPYTER_PATH', [str(system_jupyter_path)]) - monkeypatch.setattr(jupyter_core.paths, 'ENV_JUPYTER_PATH', [str(env_jupyter_path)]) - monkeypatch.setattr(jupyter_core.paths, 'SYSTEM_CONFIG_PATH', [str(system_config_path)]) - monkeypatch.setattr(jupyter_core.paths, 'ENV_CONFIG_PATH', [str(env_config_path)]) + env_config_path, +): + monkeypatch.setenv("HOME", str(home_dir)) + monkeypatch.setenv("PYTHONPATH", os.pathsep.join(sys.path)) + monkeypatch.setenv("JUPYTER_NO_CONFIG", "1") + monkeypatch.setenv("JUPYTER_CONFIG_DIR", str(config_dir)) + monkeypatch.setenv("JUPYTER_DATA_DIR", str(data_dir)) + monkeypatch.setenv("JUPYTER_RUNTIME_DIR", str(runtime_dir)) + monkeypatch.setattr( + jupyter_core.paths, "SYSTEM_JUPYTER_PATH", [str(system_jupyter_path)] + ) + monkeypatch.setattr(jupyter_core.paths, "ENV_JUPYTER_PATH", [str(env_jupyter_path)]) + monkeypatch.setattr( + jupyter_core.paths, "SYSTEM_CONFIG_PATH", [str(system_config_path)] + ) + monkeypatch.setattr(jupyter_core.paths, "ENV_CONFIG_PATH", [str(env_config_path)]) @pytest.fixture -def configurable_serverapp( - environ, - http_port, - tmp_path, - home_dir, - data_dir, - config_dir, - runtime_dir, - root_dir - ): - +def configurable_serverapp( + environ, http_port, tmp_path, home_dir, data_dir, config_dir, runtime_dir, root_dir +): def serverapp( - config={}, - argv=[], + config={}, + argv=[], environ=environ, - http_port=http_port, - tmp_path=tmp_path, + http_port=http_port, + tmp_path=tmp_path, home_dir=home_dir, data_dir=data_dir, config_dir=config_dir, runtime_dir=runtime_dir, root_dir=root_dir, - **kwargs): + **kwargs + ): c = Config(config) - c.NotebookNotary.db_file = ':memory:' - token = hexlify(os.urandom(4)).decode('ascii') - url_prefix = '/' + c.NotebookNotary.db_file = ":memory:" + token = hexlify(os.urandom(4)).decode("ascii") + url_prefix = "/" app = ServerApp.instance( port=http_port, port_retries=0, @@ -148,7 +154,7 @@ def serverapp( token=token, **kwargs ) - app.init_signal = lambda : None + app.init_signal = lambda: None app.log.propagate = True app.log.handlers = [] # Initialize app without httpserver @@ -179,7 +185,7 @@ def app(serverapp): @pytest.fixture def auth_header(serverapp): - return {'Authorization': 'token {token}'.format(token=serverapp.token)} + return {"Authorization": "token {token}".format(token=serverapp.token)} @pytest.fixture @@ -189,12 +195,13 @@ def http_port(http_server_port): @pytest.fixture def base_url(http_server_port): - return '/' + return "/" @pytest.fixture def fetch(http_server_client, auth_header, base_url): """fetch fixture that handles auth, base_url, and path""" + def client_fetch(*parts, headers={}, params={}, **kwargs): # Handle URL strings path_url = url_escape(url_path_join(base_url, *parts), plus=False) @@ -203,5 +210,8 @@ def client_fetch(*parts, headers={}, params={}, **kwargs): # Add auth keys to header headers.update(auth_header) # Make request. - return http_server_client.fetch(url, headers=headers, request_timeout=20, **kwargs) - return client_fetch \ No newline at end of file + return http_server_client.fetch( + url, headers=headers, request_timeout=20, **kwargs + ) + + return client_fetch diff --git a/tests/extension/conftest.py b/tests/extension/conftest.py index 1dbf2170d2..5cba9f7c5a 100644 --- a/tests/extension/conftest.py +++ b/tests/extension/conftest.py @@ -11,38 +11,36 @@ # ----------------- Mock Extension App ---------------------- -class MockExtensionHandler(ExtensionHandler): +class MockExtensionHandler(ExtensionHandler): def get(self): self.finish(self.config.mock_trait) class MockExtensionApp(ExtensionApp): - extension_name = 'mockextension' - mock_trait = Unicode('mock trait', config=True) + extension_name = "mockextension" + mock_trait = Unicode("mock trait", config=True) loaded = False def initialize_handlers(self): - self.handlers.append(('/mock', MockExtensionHandler)) + self.handlers.append(("/mock", MockExtensionHandler)) self.loaded = True @staticmethod def _jupyter_server_extension_paths(): - return [{ - 'module': '_mockdestination/index' - }] + return [{"module": "_mockdestination/index"}] @pytest.fixture def extension_environ(env_config_path, monkeypatch): - monkeypatch.setattr(serverextension, 'ENV_CONFIG_PATH', [str(env_config_path)]) - monkeypatch.setattr(serverextension, 'ENV_CONFIG_PATH', [str(env_config_path)]) + monkeypatch.setattr(serverextension, "ENV_CONFIG_PATH", [str(env_config_path)]) + monkeypatch.setattr(serverextension, "ENV_CONFIG_PATH", [str(env_config_path)]) @pytest.fixture def config_file(config_dir): - f = config_dir.joinpath('jupyter_mockextension_config.py') + f = config_dir.joinpath("jupyter_mockextension_config.py") f.write_text("c.MockExtensionApp.mock_trait ='config from file'") return f @@ -56,7 +54,8 @@ def extended_serverapp(serverapp): @pytest.fixture def inject_mock_extension(environ, extension_environ): - def ext(modulename='mockextension'): + def ext(modulename="mockextension"): sys.modules[modulename] = e = MockExtensionApp() return e + return ext diff --git a/tests/extension/test_app.py b/tests/extension/test_app.py index 160acb3a87..e842bca9f6 100644 --- a/tests/extension/test_app.py +++ b/tests/extension/test_app.py @@ -11,7 +11,7 @@ def test_instance_creation(): assert mock_extension.static_paths == [] assert mock_extension.template_paths == [] assert mock_extension.settings == {} - assert mock_extension.handlers == [] + assert mock_extension.handlers == [] def test_initialize(serverapp): @@ -24,17 +24,14 @@ def test_initialize(serverapp): traits = [ - ('static_paths', ['test']), - ('template_paths', ['test']), - ('custom_display_url', '/test_custom_url'), - ('default_url', '/test_url') + ("static_paths", ["test"]), + ("template_paths", ["test"]), + ("custom_display_url", "/test_custom_url"), + ("default_url", "/test_url"), ] -@pytest.mark.parametrize( - 'trait_name,trait_value', - traits -) +@pytest.mark.parametrize("trait_name,trait_value", traits) def test_instance_creation_with_instance_args(trait_name, trait_value): kwarg = {} kwarg.setdefault(trait_name, trait_value) @@ -42,15 +39,12 @@ def test_instance_creation_with_instance_args(trait_name, trait_value): assert getattr(mock_extension, trait_name) == trait_value -@pytest.mark.parametrize( - 'trait_name,trait_value', - traits -) +@pytest.mark.parametrize("trait_name,trait_value", traits) def test_instance_creation_with_argv(serverapp, trait_name, trait_value): kwarg = {} kwarg.setdefault(trait_name, trait_value) argv = [ - '--MockExtensionApp.{name}={value}'.format(name=trait_name, value=trait_value) + "--MockExtensionApp.{name}={value}".format(name=trait_name, value=trait_value) ] mock_extension = MockExtensionApp() mock_extension.initialize(serverapp, argv=argv) @@ -60,7 +54,7 @@ def test_instance_creation_with_argv(serverapp, trait_name, trait_value): def test_extensionapp_load_config_file(config_file, serverapp, extended_serverapp): # Assert default config_file_paths is the same in the app and extension. assert extended_serverapp.config_file_paths == serverapp.config_file_paths - assert extended_serverapp.config_file_name == 'jupyter_mockextension_config' + assert extended_serverapp.config_file_name == "jupyter_mockextension_config" assert extended_serverapp.config_dir == serverapp.config_dir # Assert that the trait is updated by config file - assert extended_serverapp.mock_trait == 'config from file' + assert extended_serverapp.mock_trait == "config from file" diff --git a/tests/extension/test_entrypoint.py b/tests/extension/test_entrypoint.py index e07bb00c4f..6aee636de7 100644 --- a/tests/extension/test_entrypoint.py +++ b/tests/extension/test_entrypoint.py @@ -4,11 +4,11 @@ from jupyter_server.extension import serverextension # All test coroutines will be treated as marked. -pytestmark = pytest.mark.script_launch_mode('subprocess') +pytestmark = pytest.mark.script_launch_mode("subprocess") def test_server_extension_list(environ, script_runner): - ret = script_runner.run('jupyter', 'server', 'extension', 'list') + ret = script_runner.run("jupyter", "server", "extension", "list") assert ret.success @@ -16,16 +16,16 @@ def test_server_extension_enable(environ, inject_mock_extension, script_runner): # 'mock' is not a valid extension The entry point should complete # but print to sterr. inject_mock_extension() - extension_name = 'mockextension' - ret = script_runner.run('jupyter', 'server', 'extension', 'enable', extension_name) + extension_name = "mockextension" + ret = script_runner.run("jupyter", "server", "extension", "enable", extension_name) assert ret.success - assert 'Enabling: {}'.format(extension_name) in ret.stderr + assert "Enabling: {}".format(extension_name) in ret.stderr def test_server_extension_disable(environ, script_runner): # 'mock' is not a valid extension The entry point should complete # but print to sterr. - extension_name = 'mockextension' - ret = script_runner.run('jupyter', 'server', 'extension', 'disable', extension_name) + extension_name = "mockextension" + ret = script_runner.run("jupyter", "server", "extension", "disable", extension_name) assert ret.success - assert 'Disabling: {}'.format(extension_name) in ret.stderr + assert "Disabling: {}".format(extension_name) in ret.stderr diff --git a/tests/extension/test_handler.py b/tests/extension/test_handler.py index b8cc8714b6..04fa39b375 100644 --- a/tests/extension/test_handler.py +++ b/tests/extension/test_handler.py @@ -5,27 +5,22 @@ # ------------------ Start tests ------------------- + async def test_handler(fetch, extended_serverapp): - r = await fetch( - 'mock', - method='GET' - ) + r = await fetch("mock", method="GET") assert r.code == 200 - assert r.body.decode() == 'mock trait' + assert r.body.decode() == "mock trait" async def test_handler_setting(fetch, serverapp): # Configure trait in Mock Extension. - m = MockExtensionApp(mock_trait='test mock trait') + m = MockExtensionApp(mock_trait="test mock trait") m.initialize(serverapp) # Test that the extension trait was picked up by the webapp. - r = await fetch( - 'mock', - method='GET' - ) + r = await fetch("mock", method="GET") assert r.code == 200 - assert r.body.decode() == 'test mock trait' + assert r.body.decode() == "test mock trait" async def test_handler_argv(fetch, serverapp): @@ -35,9 +30,6 @@ async def test_handler_argv(fetch, serverapp): m.initialize(serverapp, argv=argv) # Test that the extension trait was picked up by the webapp. - r = await fetch( - 'mock', - method='GET' - ) + r = await fetch("mock", method="GET") assert r.code == 200 - assert r.body.decode() == 'test mock trait' + assert r.body.decode() == "test mock trait" diff --git a/tests/extension/test_serverextension.py b/tests/extension/test_serverextension.py index 3c71eb694f..523d6f61e0 100644 --- a/tests/extension/test_serverextension.py +++ b/tests/extension/test_serverextension.py @@ -12,18 +12,18 @@ from jupyter_server.extension import serverextension from jupyter_server.extension.serverextension import ( validate_server_extension, - toggle_server_extension_python, - _get_config_dir + toggle_server_extension_python, + _get_config_dir, ) from jupyter_server.config_manager import BaseJSONConfigManager def test_help_output(): - check_help_all_output('jupyter_server.extension.serverextension') - check_help_all_output('jupyter_server.extension.serverextension', ['enable']) - check_help_all_output('jupyter_server.extension.serverextension', ['disable']) - check_help_all_output('jupyter_server.extension.serverextension', ['install']) - check_help_all_output('jupyter_server.extension.serverextension', ['uninstall']) + check_help_all_output("jupyter_server.extension.serverextension") + check_help_all_output("jupyter_server.extension.serverextension", ["enable"]) + check_help_all_output("jupyter_server.extension.serverextension", ["disable"]) + check_help_all_output("jupyter_server.extension.serverextension", ["install"]) + check_help_all_output("jupyter_server.extension.serverextension", ["uninstall"]) def get_config(sys_prefix=True): @@ -34,61 +34,57 @@ def get_config(sys_prefix=True): def test_enable(inject_mock_extension): inject_mock_extension() - toggle_server_extension_python('mockextension', True) + toggle_server_extension_python("mockextension", True) config = get_config() - assert config['mockextension'] + assert config["mockextension"] def test_disable(inject_mock_extension): inject_mock_extension() - toggle_server_extension_python('mockextension', True) - toggle_server_extension_python('mockextension', False) + toggle_server_extension_python("mockextension", True) + toggle_server_extension_python("mockextension", False) config = get_config() - assert not config['mockextension'] + assert not config["mockextension"] -def test_merge_config( - env_config_path, - inject_mock_extension, - configurable_serverapp - ): +def test_merge_config(env_config_path, inject_mock_extension, configurable_serverapp): # enabled at sys level - inject_mock_extension('mockext_sys') - validate_server_extension('mockext_sys') + inject_mock_extension("mockext_sys") + validate_server_extension("mockext_sys") # enabled at sys, disabled at user - inject_mock_extension('mockext_both') - validate_server_extension('mockext_both') + inject_mock_extension("mockext_both") + validate_server_extension("mockext_both") # enabled at user - inject_mock_extension('mockext_user') - validate_server_extension('mockext_user') + inject_mock_extension("mockext_user") + validate_server_extension("mockext_user") # enabled at Python - inject_mock_extension('mockext_py') - validate_server_extension('mockext_py') + inject_mock_extension("mockext_py") + validate_server_extension("mockext_py") # Toggle each extension module with a JSON config file # at the sys-prefix config dir. - toggle_server_extension_python('mockext_sys', enabled=True, sys_prefix=True) - toggle_server_extension_python('mockext_user', enabled=True, user=True) + toggle_server_extension_python("mockext_sys", enabled=True, sys_prefix=True) + toggle_server_extension_python("mockext_user", enabled=True, user=True) # Write this configuration in two places, sys-prefix and user. # sys-prefix supercedes users, so the extension should be disabled # when these two configs merge. - toggle_server_extension_python('mockext_both', enabled=True, user=True) - toggle_server_extension_python('mockext_both', enabled=False, sys_prefix=True) + toggle_server_extension_python("mockext_both", enabled=True, user=True) + toggle_server_extension_python("mockext_both", enabled=False, sys_prefix=True) # Enable the last extension, mockext_py, using the CLI interface. app = configurable_serverapp( - config_dir=str(env_config_path), - argv=['--ServerApp.jpserver_extensions={"mockext_py":True}'] + config_dir=str(env_config_path), + argv=['--ServerApp.jpserver_extensions={"mockext_py":True}'], ) # Verify that extensions are enabled and merged properly. extensions = app.jpserver_extensions - assert extensions['mockext_user'] - assert extensions['mockext_sys'] - assert extensions['mockext_py'] + assert extensions["mockext_user"] + assert extensions["mockext_sys"] + assert extensions["mockext_py"] # Merging should causes this extension to be disabled. - assert not extensions['mockext_both'] + assert not extensions["mockext_both"] @pytest.fixture @@ -98,27 +94,28 @@ def ordered_server_extensions(): def load_jupyter_server_extension(obj): obj.mockI = True - obj.mock_shared = 'I' + obj.mock_shared = "I" mockextension1.load_jupyter_server_extension = load_jupyter_server_extension def load_jupyter_server_extension(obj): obj.mockII = True - obj.mock_shared = 'II' + obj.mock_shared = "II" mockextension2.load_jupyter_server_extension = load_jupyter_server_extension - sys.modules['mockextension2'] = mockextension2 - sys.modules['mockextension1'] = mockextension1 + sys.modules["mockextension2"] = mockextension2 + sys.modules["mockextension1"] = mockextension1 def test_load_ordered(ordered_server_extensions): app = ServerApp() - app.jpserver_extensions = OrderedDict([('mockextension2',True),('mockextension1',True)]) + app.jpserver_extensions = OrderedDict( + [("mockextension2", True), ("mockextension1", True)] + ) app.init_server_extensions() assert app.mockII is True, "Mock II should have been loaded" assert app.mockI is True, "Mock I should have been loaded" - assert app.mock_shared == 'II', "Mock II should be loaded after Mock I" - + assert app.mock_shared == "II", "Mock II should be loaded after Mock I" diff --git a/tests/nbconvert/test_handlers.py b/tests/nbconvert/test_handlers.py index 12a7256d26..1ee2e269cc 100644 --- a/tests/nbconvert/test_handlers.py +++ b/tests/nbconvert/test_handlers.py @@ -9,7 +9,10 @@ from nbformat import writes from nbformat.v4 import ( - new_notebook, new_markdown_cell, new_code_cell, new_output, + new_notebook, + new_markdown_cell, + new_code_cell, + new_output, ) from ipython_genutils.testing.decorators import onlyif_cmds_exist @@ -21,133 +24,134 @@ from ..conftest import expected_http_error -png_green_pixel = encodebytes(b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00' -b'\x00\x00\x01\x00\x00x00\x01\x08\x02\x00\x00\x00\x90wS\xde\x00\x00\x00\x0cIDAT' -b'\x08\xd7c\x90\xfb\xcf\x00\x00\x02\\\x01\x1e.~d\x87\x00\x00\x00\x00IEND\xaeB`\x82' -).decode('ascii') +png_green_pixel = encodebytes( + b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00" + b"\x00\x00\x01\x00\x00x00\x01\x08\x02\x00\x00\x00\x90wS\xde\x00\x00\x00\x0cIDAT" + b"\x08\xd7c\x90\xfb\xcf\x00\x00\x02\\\x01\x1e.~d\x87\x00\x00\x00\x00IEND\xaeB`\x82" +).decode("ascii") @pytest.fixture def notebook(root_dir): # Build sub directory. - if not root_dir.joinpath('foo').is_dir(): - subdir = root_dir / 'foo' + if not root_dir.joinpath("foo").is_dir(): + subdir = root_dir / "foo" subdir.mkdir() # Build a notebook programmatically. nb = new_notebook() - nb.cells.append(new_markdown_cell(u'Created by test ³')) - cc1 = new_code_cell(source=u'print(2*6)') - cc1.outputs.append(new_output(output_type="stream", text=u'12')) - cc1.outputs.append(new_output(output_type="execute_result", - data={'image/png' : png_green_pixel}, - execution_count=1, - )) + nb.cells.append(new_markdown_cell(u"Created by test ³")) + cc1 = new_code_cell(source=u"print(2*6)") + cc1.outputs.append(new_output(output_type="stream", text=u"12")) + cc1.outputs.append( + new_output( + output_type="execute_result", + data={"image/png": png_green_pixel}, + execution_count=1, + ) + ) nb.cells.append(cc1) # Write file to tmp dir. - nbfile = subdir / 'testnb.ipynb' - nbfile.write_text(writes(nb, version=4), encoding='utf-8') + nbfile = subdir / "testnb.ipynb" + nbfile.write_text(writes(nb, version=4), encoding="utf-8") -@onlyif_cmds_exist('pandoc') +@onlyif_cmds_exist("pandoc") async def test_from_file(fetch, notebook): r = await fetch( - 'nbconvert', 'html', 'foo', 'testnb.ipynb', - method='GET', - params={'download': False} + "nbconvert", + "html", + "foo", + "testnb.ipynb", + method="GET", + params={"download": False}, ) assert r.code == 200 - assert 'text/html' in r.headers['Content-Type'] - assert 'Created by test' in r.body.decode() - assert 'print' in r.body.decode() + assert "text/html" in r.headers["Content-Type"] + assert "Created by test" in r.body.decode() + assert "print" in r.body.decode() r = await fetch( - 'nbconvert', 'python', 'foo', 'testnb.ipynb', - method='GET', - params={'download': False} + "nbconvert", + "python", + "foo", + "testnb.ipynb", + method="GET", + params={"download": False}, ) assert r.code == 200 - assert 'text/x-python' in r.headers['Content-Type'] - assert 'print(2*6)' in r.body.decode() + assert "text/x-python" in r.headers["Content-Type"] + assert "print(2*6)" in r.body.decode() -@onlyif_cmds_exist('pandoc') +@onlyif_cmds_exist("pandoc") async def test_from_file_404(fetch, notebook): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await fetch( - 'nbconvert', 'html', 'foo', 'thisdoesntexist.ipynb', - method='GET', - params={'download': False} + "nbconvert", + "html", + "foo", + "thisdoesntexist.ipynb", + method="GET", + params={"download": False}, ) assert expected_http_error(e, 404) -@onlyif_cmds_exist('pandoc') +@onlyif_cmds_exist("pandoc") async def test_from_file_download(fetch, notebook): r = await fetch( - 'nbconvert', 'python', 'foo', 'testnb.ipynb', - method='GET', - params={'download': True} + "nbconvert", + "python", + "foo", + "testnb.ipynb", + method="GET", + params={"download": True}, ) - content_disposition = r.headers['Content-Disposition'] - assert 'attachment' in content_disposition - assert 'testnb.py' in content_disposition + content_disposition = r.headers["Content-Disposition"] + assert "attachment" in content_disposition + assert "testnb.py" in content_disposition -@onlyif_cmds_exist('pandoc') +@onlyif_cmds_exist("pandoc") async def test_from_file_zip(fetch, notebook): r = await fetch( - 'nbconvert', 'latex', 'foo', 'testnb.ipynb', - method='GET', - params={'download': True} + "nbconvert", + "latex", + "foo", + "testnb.ipynb", + method="GET", + params={"download": True}, ) - assert 'application/zip' in r.headers['Content-Type'] - assert '.zip' in r.headers['Content-Disposition'] + assert "application/zip" in r.headers["Content-Type"] + assert ".zip" in r.headers["Content-Disposition"] -@onlyif_cmds_exist('pandoc') +@onlyif_cmds_exist("pandoc") async def test_from_post(fetch, notebook): - r = await fetch( - 'api/contents/foo/testnb.ipynb', - method='GET', - ) + r = await fetch("api/contents/foo/testnb.ipynb", method="GET",) nbmodel = json.loads(r.body.decode()) - r = await fetch( - 'nbconvert', 'html', - method='POST', - body=json.dumps(nbmodel) - ) + r = await fetch("nbconvert", "html", method="POST", body=json.dumps(nbmodel)) assert r.code == 200 - assert 'text/html' in r.headers['Content-Type'] - assert 'Created by test' in r.body.decode() - assert 'print' in r.body.decode() + assert "text/html" in r.headers["Content-Type"] + assert "Created by test" in r.body.decode() + assert "print" in r.body.decode() - r = await fetch( - 'nbconvert', 'python', - method='POST', - body=json.dumps(nbmodel) - ) + r = await fetch("nbconvert", "python", method="POST", body=json.dumps(nbmodel)) assert r.code == 200 - assert u'text/x-python' in r.headers['Content-Type'] - assert 'print(2*6)'in r.body.decode() + assert u"text/x-python" in r.headers["Content-Type"] + assert "print(2*6)" in r.body.decode() -@onlyif_cmds_exist('pandoc') +@onlyif_cmds_exist("pandoc") async def test_from_post_zip(fetch, notebook): - r = await fetch( - 'api/contents/foo/testnb.ipynb', - method='GET', - ) + r = await fetch("api/contents/foo/testnb.ipynb", method="GET",) nbmodel = json.loads(r.body.decode()) - r = await fetch( - 'nbconvert', 'latex', - method='POST', - body=json.dumps(nbmodel) - ) - assert 'application/zip' in r.headers['Content-Type'] - assert '.zip' in r.headers['Content-Disposition'] \ No newline at end of file + r = await fetch("nbconvert", "latex", method="POST", body=json.dumps(nbmodel)) + assert "application/zip" in r.headers["Content-Type"] + assert ".zip" in r.headers["Content-Disposition"] diff --git a/tests/services/api/test_api.py b/tests/services/api/test_api.py index b2c88c3bcd..d9ba74c6d1 100644 --- a/tests/services/api/test_api.py +++ b/tests/services/api/test_api.py @@ -4,11 +4,5 @@ async def test_get_spec(fetch): - response = await fetch( - 'api', 'spec.yaml', - method='GET' - ) + response = await fetch("api", "spec.yaml", method="GET") assert response.code == 200 - - - diff --git a/tests/services/config/test_api.py b/tests/services/config/test_api.py index 356abbc3a1..343227c740 100644 --- a/tests/services/config/test_api.py +++ b/tests/services/config/test_api.py @@ -5,64 +5,41 @@ async def test_create_retrieve_config(fetch): - sample = {'foo': 'bar', 'baz': 73} + sample = {"foo": "bar", "baz": 73} response = await fetch( - 'api', 'config', 'example', - method='PUT', - body=json.dumps(sample) + "api", "config", "example", method="PUT", body=json.dumps(sample) ) assert response.code == 204 - response2 = await fetch( - 'api', 'config', 'example', - method='GET', - ) + response2 = await fetch("api", "config", "example", method="GET",) assert response2.code == 200 assert json.loads(response2.body.decode()) == sample async def test_modify(fetch): - sample = { - 'foo': 'bar', - 'baz': 73, - 'sub': {'a': 6, 'b': 7}, - 'sub2': {'c': 8} - } + sample = {"foo": "bar", "baz": 73, "sub": {"a": 6, "b": 7}, "sub2": {"c": 8}} modified_sample = { - 'foo': None, # should delete foo - 'baz': 75, - 'wib': [1,2,3], - 'sub': {'a': 8, 'b': None, 'd': 9}, - 'sub2': {'c': None} # should delete sub2 + "foo": None, # should delete foo + "baz": 75, + "wib": [1, 2, 3], + "sub": {"a": 8, "b": None, "d": 9}, + "sub2": {"c": None}, # should delete sub2 } - diff = { - 'baz': 75, - 'wib': [1,2,3], - 'sub': {'a': 8, 'd': 9} - } + diff = {"baz": 75, "wib": [1, 2, 3], "sub": {"a": 8, "d": 9}} - await fetch( - 'api', 'config', 'example', - method='PUT', - body=json.dumps(sample) - ) + await fetch("api", "config", "example", method="PUT", body=json.dumps(sample)) response2 = await fetch( - 'api', 'config', 'example', - method='PATCH', - body=json.dumps(modified_sample) + "api", "config", "example", method="PATCH", body=json.dumps(modified_sample) ) assert response2.code == 200 assert json.loads(response2.body.decode()) == diff - + async def test_get_unknown(fetch): - response = await fetch( - 'api', 'config', 'nonexistant', - method='GET', - ) + response = await fetch("api", "config", "nonexistant", method="GET",) assert response.code == 200 - assert json.loads(response.body.decode()) == {} \ No newline at end of file + assert json.loads(response.body.decode()) == {} diff --git a/tests/services/contents/test_api.py b/tests/services/contents/test_api.py index 7dbd3fd759..2b72271ac9 100644 --- a/tests/services/contents/test_api.py +++ b/tests/services/contents/test_api.py @@ -8,7 +8,8 @@ from nbformat import writes, from_dict from nbformat.v4 import ( - new_notebook, new_markdown_cell, + new_notebook, + new_markdown_cell, ) from jupyter_server.utils import url_path_join @@ -19,25 +20,26 @@ def notebooks_only(dir_model): - return [nb for nb in dir_model['content'] if nb['type']=='notebook'] + return [nb for nb in dir_model["content"] if nb["type"] == "notebook"] + def dirs_only(dir_model): - return [x for x in dir_model['content'] if x['type']=='directory'] + return [x for x in dir_model["content"] if x["type"] == "directory"] dirs = [ - ('', 'inroot'), - ('Directory with spaces in', 'inspace'), - (u'unicodé', 'innonascii'), - ('foo', 'a'), - ('foo', 'b'), - ('foo', 'name with spaces'), - ('foo', u'unicodé'), - ('foo/bar', 'baz'), - ('ordering', 'A'), - ('ordering', 'b'), - ('ordering', 'C'), - (u'å b', u'ç d'), + ("", "inroot"), + ("Directory with spaces in", "inspace"), + (u"unicodé", "innonascii"), + ("foo", "a"), + ("foo", "b"), + ("foo", "name with spaces"), + ("foo", u"unicodé"), + ("foo/bar", "baz"), + ("ordering", "A"), + ("ordering", "b"), + ("ordering", "C"), + (u"å b", u"ç d"), ] @@ -55,17 +57,17 @@ def contents(contents_dir): # Create a notebook nb = writes(new_notebook(), version=4) - nbname = p.joinpath('{}.ipynb'.format(name)) - nbname.write_text(nb, encoding='utf-8') + nbname = p.joinpath("{}.ipynb".format(name)) + nbname.write_text(nb, encoding="utf-8") # Create a text file - txt = '{} text file'.format(name) - txtname = p.joinpath('{}.txt'.format(name)) - txtname.write_text(txt, encoding='utf-8') + txt = "{} text file".format(name) + txtname = p.joinpath("{}.txt".format(name)) + txtname.write_text(txt, encoding="utf-8") # Create a random blob - blob = name.encode('utf-8') + b'\xFF' - blobname = p.joinpath('{}.blob'.format(name)) + blob = name.encode("utf-8") + b"\xFF" + blobname = p.joinpath("{}.blob".format(name)) blobname.write_bytes(blob) @@ -74,434 +76,389 @@ def folders(): return list(set(item[0] for item in dirs)) -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_list_notebooks(fetch, contents, path, name): - response = await fetch( - 'api', 'contents', path, - method='GET', - ) + response = await fetch("api", "contents", path, method="GET",) data = json.loads(response.body.decode()) nbs = notebooks_only(data) assert len(nbs) > 0 - assert name+'.ipynb' in [n['name'] for n in nbs] - assert url_path_join(path, name+'.ipynb') in [n['path'] for n in nbs] + assert name + ".ipynb" in [n["name"] for n in nbs] + assert url_path_join(path, name + ".ipynb") in [n["path"] for n in nbs] -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_dir_no_contents(fetch, contents, path, name): response = await fetch( - 'api', 'contents', path, - method='GET', - params=dict( - content='0', - ) + "api", "contents", path, method="GET", params=dict(content="0",) ) model = json.loads(response.body.decode()) - assert model['path'] == path - assert model['type'] == 'directory' - assert 'content' in model - assert model['content'] == None + assert model["path"] == path + assert model["type"] == "directory" + assert "content" in model + assert model["content"] == None async def test_list_nonexistant_dir(fetch, contents): with pytest.raises(tornado.httpclient.HTTPClientError): await fetch( - 'api', 'contents', 'nonexistant', - method='GET', + "api", "contents", "nonexistant", method="GET", ) -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_nb_contents(fetch, contents, path, name): - nbname = name+'.ipynb' - nbpath = (path + '/' + nbname).lstrip('/') - r = await fetch( - 'api', 'contents', nbpath, - method='GET', - params=dict(content='1') - ) + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await fetch("api", "contents", nbpath, method="GET", params=dict(content="1")) model = json.loads(r.body.decode()) - assert model['name'] == nbname - assert model['path'] == nbpath - assert model['type'] == 'notebook' - assert 'content' in model - assert model['format'] == 'json' - assert 'metadata' in model['content'] - assert isinstance(model['content']['metadata'], dict) + assert model["name"] == nbname + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert "content" in model + assert model["format"] == "json" + assert "metadata" in model["content"] + assert isinstance(model["content"]["metadata"], dict) -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_nb_no_contents(fetch, contents, path, name): - nbname = name+'.ipynb' - nbpath = (path + '/' + nbname).lstrip('/') - r = await fetch( - 'api', 'contents', nbpath, - method='GET', - params=dict(content='0') - ) + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await fetch("api", "contents", nbpath, method="GET", params=dict(content="0")) model = json.loads(r.body.decode()) - assert model['name'] == nbname - assert model['path'] == nbpath - assert model['type'] == 'notebook' - assert 'content' in model - assert model['content'] == None + assert model["name"] == nbname + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert "content" in model + assert model["content"] == None async def test_get_nb_invalid(contents_dir, fetch, contents): nb = { - 'nbformat': 4, - 'metadata': {}, - 'cells': [{ - 'cell_type': 'wrong', - 'metadata': {}, - }], + "nbformat": 4, + "metadata": {}, + "cells": [{"cell_type": "wrong", "metadata": {},}], } - nbpath = u'å b/Validate tést.ipynb' + nbpath = u"å b/Validate tést.ipynb" (contents_dir / nbpath).write_text(json.dumps(nb)) - r = await fetch( - 'api', 'contents', nbpath, - method='GET', - ) + r = await fetch("api", "contents", nbpath, method="GET",) model = json.loads(r.body.decode()) - assert model['path'] == nbpath - assert model['type'] == 'notebook' - assert 'content' in model - assert 'message' in model - assert 'validation failed' in model['message'].lower() + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert "content" in model + assert "message" in model + assert "validation failed" in model["message"].lower() async def test_get_contents_no_such_file(fetch): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await fetch( - 'api', 'contents', 'foo/q.ipynb', - method='GET', + "api", "contents", "foo/q.ipynb", method="GET", ) assert e.value.code == 404 -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_text_file_contents(fetch, contents, path, name): - txtname = name+'.txt' - txtpath = (path + '/' + txtname).lstrip('/') - r = await fetch( - 'api', 'contents', txtpath, - method='GET', - params=dict(content='1') - ) + txtname = name + ".txt" + txtpath = (path + "/" + txtname).lstrip("/") + r = await fetch("api", "contents", txtpath, method="GET", params=dict(content="1")) model = json.loads(r.body.decode()) - assert model['name'] == txtname - assert model['path'] == txtpath - assert 'content' in model - assert model['format'] == 'text' - assert model['type'] == 'file' - assert model['content'] == '{} text file'.format(name) + assert model["name"] == txtname + assert model["path"] == txtpath + assert "content" in model + assert model["format"] == "text" + assert model["type"] == "file" + assert model["content"] == "{} text file".format(name) with pytest.raises(tornado.httpclient.HTTPClientError) as e: await fetch( - 'api', 'contents', 'foo/q.txt', - method='GET', + "api", "contents", "foo/q.txt", method="GET", ) assert expected_http_error(e, 404) with pytest.raises(tornado.httpclient.HTTPClientError) as e: await fetch( - 'api', 'contents', 'foo/bar/baz.blob', - method='GET', - params=dict( - type='file', - format='text' - ) + "api", + "contents", + "foo/bar/baz.blob", + method="GET", + params=dict(type="file", format="text"), ) assert expected_http_error(e, 400) - -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_get_binary_file_contents(fetch, contents, path, name): - blobname = name+'.blob' - blobpath = (path + '/' + blobname).lstrip('/') - r = await fetch( - 'api', 'contents', blobpath, - method='GET', - params=dict(content='1') - ) + blobname = name + ".blob" + blobpath = (path + "/" + blobname).lstrip("/") + r = await fetch("api", "contents", blobpath, method="GET", params=dict(content="1")) model = json.loads(r.body.decode()) - assert model['name'] == blobname - assert model['path'] == blobpath - assert 'content' in model - assert model['format'] == 'base64' - assert model['type'] == 'file' - data_out = decodebytes(model['content'].encode('ascii')) - data_in = name.encode('utf-8') + b'\xFF' + assert model["name"] == blobname + assert model["path"] == blobpath + assert "content" in model + assert model["format"] == "base64" + assert model["type"] == "file" + data_out = decodebytes(model["content"].encode("ascii")) + data_in = name.encode("utf-8") + b"\xFF" assert data_in == data_out with pytest.raises(tornado.httpclient.HTTPClientError) as e: await fetch( - 'api', 'contents', 'foo/q.txt', - method='GET', + "api", "contents", "foo/q.txt", method="GET", ) assert expected_http_error(e, 404) async def test_get_bad_type(fetch, contents): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - path = 'unicodé' - type = 'file' + path = "unicodé" + type = "file" await fetch( - 'api', 'contents', path, - method='GET', - params=dict(type=type) # This should be a directory, and thus throw and error + "api", + "contents", + path, + method="GET", + params=dict( + type=type + ), # This should be a directory, and thus throw and error ) - assert expected_http_error(e, 400, '%s is a directory, not a %s' % (path, type)) + assert expected_http_error(e, 400, "%s is a directory, not a %s" % (path, type)) with pytest.raises(tornado.httpclient.HTTPClientError) as e: - path = 'unicodé/innonascii.ipynb' - type = 'directory' + path = "unicodé/innonascii.ipynb" + type = "directory" await fetch( - 'api', 'contents', path, - method='GET', - params=dict(type=type) # This should be a file, and thus throw and error + "api", + "contents", + path, + method="GET", + params=dict(type=type), # This should be a file, and thus throw and error ) - assert expected_http_error(e, 400, '%s is not a directory' % path) + assert expected_http_error(e, 400, "%s is not a directory" % path) -def _check_created(r, contents_dir, path, name, type='notebook'): - fpath = path+'/'+name +def _check_created(r, contents_dir, path, name, type="notebook"): + fpath = path + "/" + name assert r.code == 201 - location = '/api/contents/' + tornado.escape.url_escape(fpath, plus=False) - assert r.headers['Location'] == location + location = "/api/contents/" + tornado.escape.url_escape(fpath, plus=False) + assert r.headers["Location"] == location model = json.loads(r.body.decode()) - assert model['name'] == name - assert model['path'] == fpath - assert model['type'] == type - path = contents_dir + '/' + fpath - if type == 'directory': + assert model["name"] == name + assert model["path"] == fpath + assert model["type"] == type + path = contents_dir + "/" + fpath + if type == "directory": assert pathlib.Path(path).is_dir() else: assert pathlib.Path(path).is_file() async def test_create_untitled(fetch, contents, contents_dir): - path = 'å b' - name = 'Untitled.ipynb' + path = "å b" + name = "Untitled.ipynb" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'ext': '.ipynb'}) + "api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"}) ) - _check_created(r, str(contents_dir), path, name, type='notebook') + _check_created(r, str(contents_dir), path, name, type="notebook") - name = 'Untitled1.ipynb' + name = "Untitled1.ipynb" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'ext': '.ipynb'}) + "api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"}) ) - _check_created(r, str(contents_dir), path, name, type='notebook') + _check_created(r, str(contents_dir), path, name, type="notebook") - path = 'foo/bar' - name = 'Untitled.ipynb' + path = "foo/bar" + name = "Untitled.ipynb" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'ext': '.ipynb'}) + "api", "contents", path, method="POST", body=json.dumps({"ext": ".ipynb"}) ) - _check_created(r, str(contents_dir), path, name, type='notebook') + _check_created(r, str(contents_dir), path, name, type="notebook") async def test_create_untitled_txt(fetch, contents, contents_dir): - name = 'untitled.txt' - path = 'foo/bar' + name = "untitled.txt" + path = "foo/bar" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'ext': '.txt'}) + "api", "contents", path, method="POST", body=json.dumps({"ext": ".txt"}) ) - _check_created(r, str(contents_dir), path, name, type='file') + _check_created(r, str(contents_dir), path, name, type="file") - r = await fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) - assert model['type'] == 'file' - assert model['format'] == 'text' - assert model['content'] == '' + assert model["type"] == "file" + assert model["format"] == "text" + assert model["content"] == "" async def test_upload(fetch, contents, contents_dir): nb = new_notebook() - nbmodel = {'content': nb, 'type': 'notebook'} - path = 'å b' - name = 'Upload tést.ipynb' + nbmodel = {"content": nb, "type": "notebook"} + path = "å b" + name = "Upload tést.ipynb" r = await fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps(nbmodel) + "api", "contents", path, name, method="PUT", body=json.dumps(nbmodel) ) _check_created(r, str(contents_dir), path, name) async def test_mkdir_untitled(fetch, contents, contents_dir): - name = 'Untitled Folder' - path = 'å b' + name = "Untitled Folder" + path = "å b" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'type': 'directory'}) + "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) ) - _check_created(r, str(contents_dir), path, name, type='directory') + _check_created(r, str(contents_dir), path, name, type="directory") - name = 'Untitled Folder 1' + name = "Untitled Folder 1" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'type': 'directory'}) + "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) ) - _check_created(r, str(contents_dir), path, name, type='directory') + _check_created(r, str(contents_dir), path, name, type="directory") - name = 'Untitled Folder' - path = 'foo/bar' + name = "Untitled Folder" + path = "foo/bar" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'type': 'directory'}) + "api", "contents", path, method="POST", body=json.dumps({"type": "directory"}) ) - _check_created(r, str(contents_dir), path, name, type='directory') + _check_created(r, str(contents_dir), path, name, type="directory") async def test_mkdir(fetch, contents, contents_dir): - name = 'New ∂ir' - path = 'å b' + name = "New ∂ir" + path = "å b" r = await fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps({'type': 'directory'}) + "api", + "contents", + path, + name, + method="PUT", + body=json.dumps({"type": "directory"}), ) - _check_created(r, str(contents_dir), path, name, type='directory') + _check_created(r, str(contents_dir), path, name, type="directory") async def test_mkdir_hidden_400(fetch): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await fetch( - 'api', 'contents', 'å b/.hidden', - method='PUT', - body=json.dumps({'type': 'directory'}) + "api", + "contents", + "å b/.hidden", + method="PUT", + body=json.dumps({"type": "directory"}), ) assert expected_http_error(e, 400) async def test_upload_txt(fetch, contents, contents_dir): - body = 'ünicode téxt' + body = "ünicode téxt" model = { - 'content' : body, - 'format' : 'text', - 'type' : 'file', + "content": body, + "format": "text", + "type": "file", } - path = 'å b' - name = 'Upload tést.txt' - await fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps(model) - ) + path = "å b" + name = "Upload tést.txt" + await fetch("api", "contents", path, name, method="PUT", body=json.dumps(model)) # check roundtrip - r = await fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) - assert model['type'] == 'file' - assert model['format'] == 'text' - assert model['path'] == path+'/'+name - assert model['content'] == body + assert model["type"] == "file" + assert model["format"] == "text" + assert model["path"] == path + "/" + name + assert model["content"] == body async def test_upload_b64(fetch, contents, contents_dir): - body = b'\xFFblob' - b64body = encodebytes(body).decode('ascii') + body = b"\xFFblob" + b64body = encodebytes(body).decode("ascii") model = { - 'content' : b64body, - 'format' : 'base64', - 'type' : 'file', + "content": b64body, + "format": "base64", + "type": "file", } - path = 'å b' - name = 'Upload tést.blob' - await fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps(model) - ) + path = "å b" + name = "Upload tést.blob" + await fetch("api", "contents", path, name, method="PUT", body=json.dumps(model)) # check roundtrip - r = await fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) - assert model['type'] == 'file' - assert model['path'] == path+'/'+name - assert model['format'] == 'base64' - decoded = decodebytes(model['content'].encode('ascii')) + assert model["type"] == "file" + assert model["path"] == path + "/" + name + assert model["format"] == "base64" + decoded = decodebytes(model["content"].encode("ascii")) assert decoded == body async def test_copy(fetch, contents, contents_dir): - path = 'å b' - name = 'ç d.ipynb' - copy = 'ç d-Copy1.ipynb' + path = "å b" + name = "ç d.ipynb" + copy = "ç d-Copy1.ipynb" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'copy_from': path+'/'+name}) + "api", + "contents", + path, + method="POST", + body=json.dumps({"copy_from": path + "/" + name}), ) - _check_created(r, str(contents_dir), path, copy, type='notebook') - + _check_created(r, str(contents_dir), path, copy, type="notebook") + # Copy the same file name - copy2 = 'ç d-Copy2.ipynb' + copy2 = "ç d-Copy2.ipynb" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'copy_from': path+'/'+name}) + "api", + "contents", + path, + method="POST", + body=json.dumps({"copy_from": path + "/" + name}), ) - _check_created(r, str(contents_dir), path, copy2, type='notebook') + _check_created(r, str(contents_dir), path, copy2, type="notebook") # copy a copy. - copy3 = 'ç d-Copy3.ipynb' + copy3 = "ç d-Copy3.ipynb" r = await fetch( - 'api', 'contents', path, - method='POST', - body=json.dumps({'copy_from': path+'/'+copy2}) + "api", + "contents", + path, + method="POST", + body=json.dumps({"copy_from": path + "/" + copy2}), ) - _check_created(r, str(contents_dir), path, copy3, type='notebook') + _check_created(r, str(contents_dir), path, copy3, type="notebook") async def test_copy_path(fetch, contents, contents_dir): - path1 = 'foo' - path2 = 'å b' - name = 'a.ipynb' - copy = 'a-Copy1.ipynb' + path1 = "foo" + path2 = "å b" + name = "a.ipynb" + copy = "a-Copy1.ipynb" r = await fetch( - 'api', 'contents', path2, - method='POST', - body=json.dumps({'copy_from': path1+'/'+name}) + "api", + "contents", + path2, + method="POST", + body=json.dumps({"copy_from": path1 + "/" + name}), ) - _check_created(r, str(contents_dir), path2, name, type='notebook') + _check_created(r, str(contents_dir), path2, name, type="notebook") r = await fetch( - 'api', 'contents', path2, - method='POST', - body=json.dumps({'copy_from': path1+'/'+name}) + "api", + "contents", + path2, + method="POST", + body=json.dumps({"copy_from": path1 + "/" + name}), ) - _check_created(r, str(contents_dir), path2, copy, type='notebook') + _check_created(r, str(contents_dir), path2, copy, type="notebook") async def test_copy_put_400(fetch, contents, contents_dir): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await fetch( - 'api', 'contents', 'å b/cøpy.ipynb', - method='PUT', - body=json.dumps({'copy_from': 'å b/ç d.ipynb'}) + "api", + "contents", + "å b/cøpy.ipynb", + method="PUT", + body=json.dumps({"copy_from": "å b/ç d.ipynb"}), ) assert expected_http_error(e, 400) @@ -509,333 +466,277 @@ async def test_copy_put_400(fetch, contents, contents_dir): async def test_copy_dir_400(fetch, contents, contents_dir): with pytest.raises(tornado.httpclient.HTTPClientError) as e: await fetch( - 'api', 'contents', 'foo', - method='POST', - body=json.dumps({'copy_from': 'å b'}) + "api", + "contents", + "foo", + method="POST", + body=json.dumps({"copy_from": "å b"}), ) assert expected_http_error(e, 400) -@pytest.mark.parametrize('path,name', dirs) +@pytest.mark.parametrize("path,name", dirs) async def test_delete(fetch, contents, contents_dir, path, name): - nbname = name+'.ipynb' - nbpath = (path + '/' + nbname).lstrip('/') - r = await fetch( - 'api', 'contents', nbpath, - method='DELETE', - ) + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await fetch("api", "contents", nbpath, method="DELETE",) assert r.code == 204 async def test_delete_dirs(fetch, contents, folders): # Iterate over folders - for name in sorted(folders + ['/'], key=len, reverse=True): - r = await fetch( - 'api', 'contents', name, - method='GET' - ) + for name in sorted(folders + ["/"], key=len, reverse=True): + r = await fetch("api", "contents", name, method="GET") # Get JSON blobs for each content. - listing = json.loads(r.body.decode())['content'] + listing = json.loads(r.body.decode())["content"] # Delete all content for model in listing: - await fetch( - 'api', 'contents', model['path'], - method='DELETE' - ) + await fetch("api", "contents", model["path"], method="DELETE") # Make sure all content has been deleted. - r = await fetch( - 'api', 'contents', - method='GET' - ) + r = await fetch("api", "contents", method="GET") model = json.loads(r.body.decode()) - assert model['content'] == [] + assert model["content"] == [] -@pytest.mark.skipif(sys.platform == 'win32', reason="Disabled deleting non-empty dirs on Windows") +@pytest.mark.skipif( + sys.platform == "win32", reason="Disabled deleting non-empty dirs on Windows" +) async def test_delete_non_empty_dir(fetch, contents): # Delete a folder - await fetch( - 'api', 'contents', 'å b', - method='DELETE' - ) + await fetch("api", "contents", "å b", method="DELETE") # Check that the folder was been deleted. with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await fetch( - 'api', 'contents', 'å b', - method='GET' - ) + await fetch("api", "contents", "å b", method="GET") assert expected_http_error(e, 404) async def test_rename(fetch, contents, contents_dir): - path = 'foo' - name = 'a.ipynb' - new_name = 'z.ipynb' + path = "foo" + name = "a.ipynb" + new_name = "z.ipynb" # Rename the file r = await fetch( - 'api', 'contents', path, name, - method='PATCH', - body=json.dumps({'path': path+'/'+new_name}) + "api", + "contents", + path, + name, + method="PATCH", + body=json.dumps({"path": path + "/" + new_name}), ) - fpath = path+'/'+new_name + fpath = path + "/" + new_name assert r.code == 200 - location = '/api/contents/' + fpath - assert r.headers['Location'] == location + location = "/api/contents/" + fpath + assert r.headers["Location"] == location model = json.loads(r.body.decode()) - assert model['name'] == new_name - assert model['path'] == fpath + assert model["name"] == new_name + assert model["path"] == fpath fpath = str(contents_dir / fpath) assert pathlib.Path(fpath).is_file() # Check that the files have changed - r = await fetch( - 'api', 'contents', path, - method='GET' - ) + r = await fetch("api", "contents", path, method="GET") listing = json.loads(r.body.decode()) - nbnames = [name['name'] for name in listing['content']] - assert 'z.ipynb' in nbnames - assert 'a.ipynb' not in nbnames + nbnames = [name["name"] for name in listing["content"]] + assert "z.ipynb" in nbnames + assert "a.ipynb" not in nbnames async def test_checkpoints_follow_file(fetch, contents): - path = 'foo' - name = 'a.ipynb' - + path = "foo" + name = "a.ipynb" + # Read initial file. - r = await fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) - + # Create a checkpoint of initial state r = await fetch( - 'api', 'contents', path, name, 'checkpoints', - method='POST', - allow_nonstandard_methods=True + "api", + "contents", + path, + name, + "checkpoints", + method="POST", + allow_nonstandard_methods=True, ) cp1 = json.loads(r.body.decode()) - + # Modify file and save. - nbcontent = model['content'] + nbcontent = model["content"] nb = from_dict(nbcontent) - hcell = new_markdown_cell('Created by test') + hcell = new_markdown_cell("Created by test") nb.cells.append(hcell) - nbmodel = {'content': nb, 'type': 'notebook'} + nbmodel = {"content": nb, "type": "notebook"} r = await fetch( - 'api', 'contents', path, name, - method='PUT', - body=json.dumps(nbmodel) + "api", "contents", path, name, method="PUT", body=json.dumps(nbmodel) ) # List checkpoints - r = await fetch( - 'api', 'contents', path, name, 'checkpoints', - method='GET', - ) + r = await fetch("api", "contents", path, name, "checkpoints", method="GET",) cps = json.loads(r.body.decode()) assert cps == [cp1] - r = await fetch( - 'api', 'contents', path, name, - method='GET' - ) + r = await fetch("api", "contents", path, name, method="GET") model = json.loads(r.body.decode()) - nbcontent = model['content'] + nbcontent = model["content"] nb = from_dict(nbcontent) assert nb.cells[0].source == "Created by test" async def test_rename_existing(fetch, contents): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - path = 'foo' - name = 'a.ipynb' - new_name = 'b.ipynb' + path = "foo" + name = "a.ipynb" + new_name = "b.ipynb" # Rename the file r = await fetch( - 'api', 'contents', path, name, - method='PATCH', - body=json.dumps({'path': path+'/'+new_name}) + "api", + "contents", + path, + name, + method="PATCH", + body=json.dumps({"path": path + "/" + new_name}), ) assert expected_http_error(e, 409) async def test_save(fetch, contents): - r = await fetch( - 'api', 'contents', 'foo/a.ipynb', - method='GET' - ) + r = await fetch("api", "contents", "foo/a.ipynb", method="GET") model = json.loads(r.body.decode()) - nbmodel = model['content'] + nbmodel = model["content"] nb = from_dict(nbmodel) - nb.cells.append(new_markdown_cell('Created by test ³')) - nbmodel = {'content': nb, 'type': 'notebook'} + nb.cells.append(new_markdown_cell("Created by test ³")) + nbmodel = {"content": nb, "type": "notebook"} r = await fetch( - 'api', 'contents', 'foo/a.ipynb', - method='PUT', - body=json.dumps(nbmodel) + "api", "contents", "foo/a.ipynb", method="PUT", body=json.dumps(nbmodel) ) # Round trip. - r = await fetch( - 'api', 'contents', 'foo/a.ipynb', - method='GET' - ) + r = await fetch("api", "contents", "foo/a.ipynb", method="GET") model = json.loads(r.body.decode()) - newnb = from_dict(model['content']) - assert newnb.cells[0].source == 'Created by test ³' + newnb = from_dict(model["content"]) + assert newnb.cells[0].source == "Created by test ³" async def test_checkpoints(fetch, contents): - path = 'foo/a.ipynb' - resp = await fetch( - 'api', 'contents', path, - method='GET' - ) + path = "foo/a.ipynb" + resp = await fetch("api", "contents", path, method="GET") model = json.loads(resp.body.decode()) r = await fetch( - 'api', 'contents', path, 'checkpoints', - method='POST', - allow_nonstandard_methods=True + "api", + "contents", + path, + "checkpoints", + method="POST", + allow_nonstandard_methods=True, ) assert r.code == 201 cp1 = json.loads(r.body.decode()) - assert set(cp1) == {'id', 'last_modified'} - assert r.headers['Location'].split('/')[-1] == cp1['id'] + assert set(cp1) == {"id", "last_modified"} + assert r.headers["Location"].split("/")[-1] == cp1["id"] # Modify it. - nbcontent = model['content'] + nbcontent = model["content"] nb = from_dict(nbcontent) - hcell = new_markdown_cell('Created by test') + hcell = new_markdown_cell("Created by test") nb.cells.append(hcell) - + # Save it. - nbmodel = {'content': nb, 'type': 'notebook'} - resp = await fetch( - 'api', 'contents', path, - method='PUT', - body=json.dumps(nbmodel) - ) - + nbmodel = {"content": nb, "type": "notebook"} + resp = await fetch("api", "contents", path, method="PUT", body=json.dumps(nbmodel)) + # List checkpoints - r = await fetch( - 'api', 'contents', path, 'checkpoints', - method='GET' - ) + r = await fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [cp1] - r = await fetch( - 'api', 'contents', path, - method='GET' - ) - nbcontent = json.loads(r.body.decode())['content'] + r = await fetch("api", "contents", path, method="GET") + nbcontent = json.loads(r.body.decode())["content"] nb = from_dict(nbcontent) - assert nb.cells[0].source == 'Created by test' + assert nb.cells[0].source == "Created by test" # Restore Checkpoint cp1 r = await fetch( - 'api', 'contents', path, 'checkpoints', cp1['id'], - method='POST', - allow_nonstandard_methods=True + "api", + "contents", + path, + "checkpoints", + cp1["id"], + method="POST", + allow_nonstandard_methods=True, ) assert r.code == 204 - r = await fetch( - 'api', 'contents', path, - method='GET' - ) - nbcontent = json.loads(r.body.decode())['content'] + r = await fetch("api", "contents", path, method="GET") + nbcontent = json.loads(r.body.decode())["content"] nb = from_dict(nbcontent) assert nb.cells == [] # Delete cp1 - r = await fetch( - 'api', 'contents', path, 'checkpoints', cp1['id'], - method='DELETE' - ) + r = await fetch("api", "contents", path, "checkpoints", cp1["id"], method="DELETE") assert r.code == 204 - r = await fetch( - 'api', 'contents', path, 'checkpoints', - method='GET' - ) + r = await fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [] async def test_file_checkpoints(fetch, contents): - path = 'foo/a.txt' - resp = await fetch( - 'api', 'contents', path, - method='GET' - ) - orig_content = json.loads(resp.body.decode())['content'] + path = "foo/a.txt" + resp = await fetch("api", "contents", path, method="GET") + orig_content = json.loads(resp.body.decode())["content"] r = await fetch( - 'api', 'contents', path, 'checkpoints', - method='POST', - allow_nonstandard_methods=True + "api", + "contents", + path, + "checkpoints", + method="POST", + allow_nonstandard_methods=True, ) assert r.code == 201 cp1 = json.loads(r.body.decode()) - assert set(cp1) == {'id', 'last_modified'} - assert r.headers['Location'].split('/')[-1] == cp1['id'] + assert set(cp1) == {"id", "last_modified"} + assert r.headers["Location"].split("/")[-1] == cp1["id"] # Modify it. - new_content = orig_content + '\nsecond line' + new_content = orig_content + "\nsecond line" model = { - 'content': new_content, - 'type': 'file', - 'format': 'text', + "content": new_content, + "type": "file", + "format": "text", } - + # Save it. - resp = await fetch( - 'api', 'contents', path, - method='PUT', - body=json.dumps(model) - ) - + resp = await fetch("api", "contents", path, method="PUT", body=json.dumps(model)) + # List checkpoints - r = await fetch( - 'api', 'contents', path, 'checkpoints', - method='GET' - ) + r = await fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) assert cps == [cp1] - r = await fetch( - 'api', 'contents', path, - method='GET' - ) - content = json.loads(r.body.decode())['content'] + r = await fetch("api", "contents", path, method="GET") + content = json.loads(r.body.decode())["content"] assert content == new_content # Restore Checkpoint cp1 r = await fetch( - 'api', 'contents', path, 'checkpoints', cp1['id'], - method='POST', - allow_nonstandard_methods=True + "api", + "contents", + path, + "checkpoints", + cp1["id"], + method="POST", + allow_nonstandard_methods=True, ) assert r.code == 204 - r = await fetch( - 'api', 'contents', path, - method='GET' - ) - restored_content = json.loads(r.body.decode())['content'] + r = await fetch("api", "contents", path, method="GET") + restored_content = json.loads(r.body.decode())["content"] assert restored_content == orig_content # Delete cp1 - r = await fetch( - 'api', 'contents', path, 'checkpoints', cp1['id'], - method='DELETE' - ) + r = await fetch("api", "contents", path, "checkpoints", cp1["id"], method="DELETE") assert r.code == 204 - r = await fetch( - 'api', 'contents', path, 'checkpoints', - method='GET' - ) + r = await fetch("api", "contents", path, "checkpoints", method="GET") cps = json.loads(r.body.decode()) - assert cps == [] \ No newline at end of file + assert cps == [] diff --git a/tests/services/contents/test_config.py b/tests/services/contents/test_config.py index a427861dd3..2f9cf5496e 100644 --- a/tests/services/contents/test_config.py +++ b/tests/services/contents/test_config.py @@ -6,8 +6,8 @@ @pytest.fixture def config(): - return {'FileContentsManager': {'checkpoints_class': GenericFileCheckpoints}} + return {"FileContentsManager": {"checkpoints_class": GenericFileCheckpoints}} def test_config_did_something(serverapp): - assert isinstance(serverapp.contents_manager.checkpoints, GenericFileCheckpoints) \ No newline at end of file + assert isinstance(serverapp.contents_manager.checkpoints, GenericFileCheckpoints) diff --git a/tests/services/contents/test_fileio.py b/tests/services/contents/test_fileio.py index 3a88d4aaa7..ea665f0f5b 100644 --- a/tests/services/contents/test_fileio.py +++ b/tests/services/contents/test_fileio.py @@ -2,7 +2,7 @@ import os import stat import functools -import decorator +import decorator import pytest @@ -19,6 +19,7 @@ def skip_win32(f): def inner(f, *args, **kwargs): decorated_f = _skip_win32(f) return decorated_f(*args, **kwargs) + return decorator.decorator(inner, f) @@ -26,16 +27,17 @@ def inner(f, *args, **kwargs): def test_atomic_writing(tmp_path): - class CustomExc(Exception): pass + class CustomExc(Exception): + pass - f1 = tmp_path / 'penguin' - f1.write_text('Before') + f1 = tmp_path / "penguin" + f1.write_text("Before") - if os.name != 'nt': + if os.name != "nt": os.chmod(str(f1), 0o701) orig_mode = stat.S_IMODE(os.stat(str(f1)).st_mode) - f2 = tmp_path / 'flamingo' + f2 = tmp_path / "flamingo" try: os.symlink(str(f1), str(f2)) have_symlink = True @@ -47,29 +49,29 @@ class CustomExc(Exception): pass with pytest.raises(CustomExc): with atomic_writing(str(f1)) as f: - f.write('Failing write') + f.write("Failing write") raise CustomExc - with io.open(str(f1), 'r') as f: - assert f.read() == 'Before' - + with io.open(str(f1), "r") as f: + assert f.read() == "Before" + with atomic_writing(str(f1)) as f: - f.write('Overwritten') + f.write("Overwritten") - with io.open(str(f1), 'r') as f: - assert f.read() == 'Overwritten' + with io.open(str(f1), "r") as f: + assert f.read() == "Overwritten" - if os.name != 'nt': + if os.name != "nt": mode = stat.S_IMODE(os.stat(str(f1)).st_mode) assert mode == orig_mode if have_symlink: # Check that writing over a file preserves a symlink with atomic_writing(str(f2)) as f: - f.write(u'written from symlink') - - with io.open(str(f1), 'r') as f: - assert f.read() == 'written from symlink' + f.write(u"written from symlink") + + with io.open(str(f1), "r") as f: + assert f.read() == "written from symlink" @pytest.fixture @@ -85,54 +87,54 @@ def handle_umask(): def test_atomic_writing_umask(handle_umask, tmp_path): os.umask(0o022) - f1 = str(tmp_path / '1') + f1 = str(tmp_path / "1") with atomic_writing(f1) as f: - f.write('1') + f.write("1") mode = stat.S_IMODE(os.stat(f1).st_mode) assert mode == 0o644 os.umask(0o057) - f2 = str(tmp_path / '2') + f2 = str(tmp_path / "2") with atomic_writing(f2) as f: - f.write('2') + f.write("2") mode = stat.S_IMODE(os.stat(f2).st_mode) assert mode == 0o620 def test_atomic_writing_newlines(tmp_path): - path = str(tmp_path / 'testfile') + path = str(tmp_path / "testfile") - lf = u'a\nb\nc\n' - plat = lf.replace(u'\n', os.linesep) - crlf = lf.replace(u'\n', u'\r\n') + lf = u"a\nb\nc\n" + plat = lf.replace(u"\n", os.linesep) + crlf = lf.replace(u"\n", u"\r\n") # test default - with io.open(path, 'w') as f: + with io.open(path, "w") as f: f.write(lf) - with io.open(path, 'r', newline='') as f: + with io.open(path, "r", newline="") as f: read = f.read() assert read == plat # test newline=LF - with io.open(path, 'w', newline='\n') as f: + with io.open(path, "w", newline="\n") as f: f.write(lf) - with io.open(path, 'r', newline='') as f: + with io.open(path, "r", newline="") as f: read = f.read() assert read == lf # test newline=CRLF - with atomic_writing(str(path), newline='\r\n') as f: + with atomic_writing(str(path), newline="\r\n") as f: f.write(lf) - with io.open(path, 'r', newline='') as f: + with io.open(path, "r", newline="") as f: read = f.read() assert read == crlf # test newline=no convert - text = u'crlf\r\ncr\rlf\n' - with atomic_writing(str(path), newline='') as f: + text = u"crlf\r\ncr\rlf\n" + with atomic_writing(str(path), newline="") as f: f.write(text) - with io.open(path, 'r', newline='') as f: + with io.open(path, "r", newline="") as f: read = f.read() - assert read == text \ No newline at end of file + assert read == text diff --git a/tests/services/contents/test_largefilemanager.py b/tests/services/contents/test_largefilemanager.py index bf4e3d24c9..99ababfa75 100644 --- a/tests/services/contents/test_largefilemanager.py +++ b/tests/services/contents/test_largefilemanager.py @@ -4,88 +4,101 @@ from jupyter_server.services.contents.largefilemanager import LargeFileManager from ...conftest import expected_http_error -contents_manager = pytest.fixture(lambda tmp_path: LargeFileManager(root_dir=str(tmp_path))) +contents_manager = pytest.fixture( + lambda tmp_path: LargeFileManager(root_dir=str(tmp_path)) +) def test_save(contents_manager): cm = contents_manager - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] + model = cm.new_untitled(type="notebook") + name = model["name"] + path = model["path"] # Get the model with 'content' full_model = cm.get(path) # Save the notebook model = cm.save(full_model, path) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == name - assert model['path'] == path + assert "name" in model + assert "path" in model + assert model["name"] == name + assert model["path"] == path @pytest.mark.parametrize( - 'model,err_message', + "model,err_message", [ ( - {'name': 'test', 'path': 'test', 'chunk': 1}, - 'HTTP 400: Bad Request (No file type provided)' + {"name": "test", "path": "test", "chunk": 1}, + "HTTP 400: Bad Request (No file type provided)", ), ( - {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'notebook'}, - 'HTTP 400: Bad Request (File type "notebook" is not supported for large file transfer)' + {"name": "test", "path": "test", "chunk": 1, "type": "notebook"}, + 'HTTP 400: Bad Request (File type "notebook" is not supported for large file transfer)', ), ( - {'name': 'test', 'path': 'test', 'chunk': 1, 'type': 'file'}, - 'HTTP 400: Bad Request (No file content provided)', + {"name": "test", "path": "test", "chunk": 1, "type": "file"}, + "HTTP 400: Bad Request (No file content provided)", ), ( - {'name': 'test', 'path': 'test', 'chunk': 2, 'type': 'file', - 'content': u'test', 'format': 'json'}, - "HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')" - ) - ] + { + "name": "test", + "path": "test", + "chunk": 2, + "type": "file", + "content": u"test", + "format": "json", + }, + "HTTP 400: Bad Request (Must specify format of file contents as 'text' or 'base64')", + ), + ], ) def test_bad_save(contents_manager, model, err_message): with pytest.raises(tornado.web.HTTPError) as e: - contents_manager.save(model, model['path']) + contents_manager.save(model, model["path"]) assert expected_http_error(e, 400, expected_message=err_message) def test_saving_different_chunks(contents_manager): cm = contents_manager - model = {'name': 'test', 'path': 'test', 'type': 'file', - 'content': u'test==', 'format': 'text'} - name = model['name'] - path = model['path'] + model = { + "name": "test", + "path": "test", + "type": "file", + "content": u"test==", + "format": "text", + } + name = model["name"] + path = model["path"] cm.save(model, path) for chunk in (1, 2, -1): - for fm in ('text', 'base64'): + for fm in ("text", "base64"): full_model = cm.get(path) - full_model['chunk'] = chunk - full_model['format'] = fm + full_model["chunk"] = chunk + full_model["format"] = fm model_res = cm.save(full_model, path) assert isinstance(model_res, dict) - assert 'name' in model_res - assert 'path' in model_res - assert 'chunk' not in model_res - assert model_res['name'] == name - assert model_res['path'] == path + assert "name" in model_res + assert "path" in model_res + assert "chunk" not in model_res + assert model_res["name"] == name + assert model_res["path"] == path def test_save_in_subdirectory(contents_manager, tmp_path): cm = contents_manager - sub_dir = tmp_path / 'foo' + sub_dir = tmp_path / "foo" sub_dir.mkdir() - model = cm.new_untitled(path='/foo/', type='notebook') - path = model['path'] + model = cm.new_untitled(path="/foo/", type="notebook") + path = model["path"] model = cm.get(path) # Change the name in the model for rename model = cm.save(model, path) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == 'Untitled.ipynb' - assert model['path'] == 'foo/Untitled.ipynb' \ No newline at end of file + assert "name" in model + assert "path" in model + assert model["name"] == "Untitled.ipynb" + assert model["path"] == "foo/Untitled.ipynb" diff --git a/tests/services/contents/test_manager.py b/tests/services/contents/test_manager.py index 526e53050e..63fde91234 100644 --- a/tests/services/contents/test_manager.py +++ b/tests/services/contents/test_manager.py @@ -18,6 +18,7 @@ # contents_manager_atomic = pytest.fixture(lambda tmp_path: FileContentsManager(root_dir=str(tmp_path), use_atomic_writing=True)) # contents_manager_nonatomic = pytest.fixture(lambda tmp_path: FileContentsManager(root_dir=str(tmp_path), use_atomic_writing=False)) + @pytest.fixture(params=[True, False]) def contents_manager(request, tmp_path): return FileContentsManager(root_dir=str(tmp_path), use_atomic_writing=request.param) @@ -46,20 +47,22 @@ def symlink(contents_manager, src, dst): def add_code_cell(notebook): - output = nbformat.new_output("display_data", {'application/javascript': "alert('hi');"}) + output = nbformat.new_output( + "display_data", {"application/javascript": "alert('hi');"} + ) cell = nbformat.new_code_cell("print('hi')", outputs=[output]) notebook.cells.append(cell) def new_notebook(contents_manager): cm = contents_manager - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] + model = cm.new_untitled(type="notebook") + name = model["name"] + path = model["path"] full_model = cm.get(path) - nb = full_model['content'] - nb['metadata']['counter'] = int(1e6 * time.time()) + nb = full_model["content"] + nb["metadata"]["counter"] = int(1e6 * time.time()) add_code_cell(nb) cm.save(full_model, path) @@ -76,206 +79,209 @@ def make_populated_dir(contents_manager, api_path): def check_populated_dir_files(contents_manager, api_path): dir_model = contents_manager.get(api_path) - assert dir_model['path'] == api_path - assert dir_model['type'] == "directory" + assert dir_model["path"] == api_path + assert dir_model["type"] == "directory" - for entry in dir_model['content']: - if entry['type'] == "directory": + for entry in dir_model["content"]: + if entry["type"] == "directory": continue - elif entry['type'] == "file": - assert entry['name'] == "file.txt" + elif entry["type"] == "file": + assert entry["name"] == "file.txt" complete_path = "/".join([api_path, "file.txt"]) assert entry["path"] == complete_path - elif entry['type'] == "notebook": - assert entry['name'] == "nb.ipynb" + elif entry["type"] == "notebook": + assert entry["name"] == "nb.ipynb" complete_path = "/".join([api_path, "nb.ipynb"]) assert entry["path"] == complete_path + # ----------------- Tests ---------------------------------- + def test_root_dir(tmp_path): fm = FileContentsManager(root_dir=str(tmp_path)) assert fm.root_dir == str(tmp_path) def test_missing_root_dir(tmp_path): - root = tmp_path / 'notebook' / 'dir' / 'is' / 'missing' + root = tmp_path / "notebook" / "dir" / "is" / "missing" with pytest.raises(TraitError): FileContentsManager(root_dir=str(root)) def test_invalid_root_dir(tmp_path): - temp_file = tmp_path / 'file.txt' - temp_file.write_text('') + temp_file = tmp_path / "file.txt" + temp_file.write_text("") with pytest.raises(TraitError): FileContentsManager(root_dir=str(temp_file)) + def test_get_os_path(tmp_path): fm = FileContentsManager(root_dir=str(tmp_path)) - path = fm._get_os_path('/path/to/notebook/test.ipynb') - rel_path_list = '/path/to/notebook/test.ipynb'.split('/') + path = fm._get_os_path("/path/to/notebook/test.ipynb") + rel_path_list = "/path/to/notebook/test.ipynb".split("/") fs_path = os.path.join(fm.root_dir, *rel_path_list) assert path == fs_path fm = FileContentsManager(root_dir=str(tmp_path)) - path = fm._get_os_path('test.ipynb') - fs_path = os.path.join(fm.root_dir, 'test.ipynb') + path = fm._get_os_path("test.ipynb") + fs_path = os.path.join(fm.root_dir, "test.ipynb") assert path == fs_path fm = FileContentsManager(root_dir=str(tmp_path)) - path = fm._get_os_path('////test.ipynb') - fs_path = os.path.join(fm.root_dir, 'test.ipynb') + path = fm._get_os_path("////test.ipynb") + fs_path = os.path.join(fm.root_dir, "test.ipynb") assert path == fs_path def test_checkpoint_subdir(tmp_path): - subd = 'sub ∂ir' - cp_name = 'test-cp.ipynb' + subd = "sub ∂ir" + cp_name = "test-cp.ipynb" fm = FileContentsManager(root_dir=str(tmp_path)) tmp_path.joinpath(subd).mkdir() cpm = fm.checkpoints - cp_dir = cpm.checkpoint_path('cp', 'test.ipynb') - cp_subdir = cpm.checkpoint_path('cp', '/%s/test.ipynb' % subd) + cp_dir = cpm.checkpoint_path("cp", "test.ipynb") + cp_subdir = cpm.checkpoint_path("cp", "/%s/test.ipynb" % subd) assert cp_dir != cp_subdir assert cp_dir == os.path.join(str(tmp_path), cpm.checkpoint_dir, cp_name) @pytest.mark.skipif( - sys.platform == 'win32' and sys.version_info[0] < 3, - reason="System platform is Windows, version < 3" + sys.platform == "win32" and sys.version_info[0] < 3, + reason="System platform is Windows, version < 3", ) def test_bad_symlink(tmp_path): td = str(tmp_path) cm = FileContentsManager(root_dir=td) - path = 'test bad symlink' + path = "test bad symlink" _make_dir(cm, path) - file_model = cm.new_untitled(path=path, ext='.txt') + file_model = cm.new_untitled(path=path, ext=".txt") # create a broken symlink - symlink(cm, "target", '%s/%s' % (path, 'bad symlink')) + symlink(cm, "target", "%s/%s" % (path, "bad symlink")) model = cm.get(path) - contents = { - content['name']: content for content in model['content'] - } - assert 'untitled.txt' in contents - assert contents['untitled.txt'] == file_model - assert 'bad symlink' in contents + contents = {content["name"]: content for content in model["content"]} + assert "untitled.txt" in contents + assert contents["untitled.txt"] == file_model + assert "bad symlink" in contents @pytest.mark.skipif( - sys.platform == 'win32' and sys.version_info[0] < 3, - reason="System platform is Windows, version < 3" + sys.platform == "win32" and sys.version_info[0] < 3, + reason="System platform is Windows, version < 3", ) def test_good_symlink(tmp_path): td = str(tmp_path) cm = FileContentsManager(root_dir=td) - parent = 'test good symlink' - name = 'good symlink' - path = '{0}/{1}'.format(parent, name) + parent = "test good symlink" + name = "good symlink" + path = "{0}/{1}".format(parent, name) _make_dir(cm, parent) - file_model = cm.new(path=parent + '/zfoo.txt') + file_model = cm.new(path=parent + "/zfoo.txt") # create a good symlink - symlink(cm, file_model['path'], path) + symlink(cm, file_model["path"], path) symlink_model = cm.get(path, content=False) dir_model = cm.get(parent) - assert sorted(dir_model['content'], key=lambda x: x['name']) == [symlink_model, file_model] + assert sorted(dir_model["content"], key=lambda x: x["name"]) == [ + symlink_model, + file_model, + ] def test_403(tmp_path): - if hasattr(os, 'getuid'): + if hasattr(os, "getuid"): if os.getuid() == 0: raise pytest.skip("Can't test permissions as root") - if sys.platform.startswith('win'): + if sys.platform.startswith("win"): raise pytest.skip("Can't test permissions on Windows") td = str(tmp_path) cm = FileContentsManager(root_dir=td) - model = cm.new_untitled(type='file') - os_path = cm._get_os_path(model['path']) + model = cm.new_untitled(type="file") + os_path = cm._get_os_path(model["path"]) os.chmod(os_path, 0o400) try: - with cm.open(os_path, 'w') as f: + with cm.open(os_path, "w") as f: f.write(u"don't care") except HTTPError as e: assert e.status_code == 403 + def test_escape_root(tmp_path): td = str(tmp_path) cm = FileContentsManager(root_dir=td) # make foo, bar next to root - with open(os.path.join(cm.root_dir, '..', 'foo'), 'w') as f: - f.write('foo') - with open(os.path.join(cm.root_dir, '..', 'bar'), 'w') as f: - f.write('bar') + with open(os.path.join(cm.root_dir, "..", "foo"), "w") as f: + f.write("foo") + with open(os.path.join(cm.root_dir, "..", "bar"), "w") as f: + f.write("bar") with pytest.raises(HTTPError) as e: - cm.get('..') + cm.get("..") expected_http_error(e, 404) with pytest.raises(HTTPError) as e: - cm.get('foo/../../../bar') + cm.get("foo/../../../bar") expected_http_error(e, 404) with pytest.raises(HTTPError) as e: - cm.delete('../foo') + cm.delete("../foo") expected_http_error(e, 404) with pytest.raises(HTTPError) as e: - cm.rename('../foo', '../bar') + cm.rename("../foo", "../bar") expected_http_error(e, 404) with pytest.raises(HTTPError) as e: - cm.save(model={ - 'type': 'file', - 'content': u'', - 'format': 'text', - }, path='../foo') + cm.save( + model={"type": "file", "content": u"", "format": "text",}, path="../foo" + ) expected_http_error(e, 404) def test_new_untitled(contents_manager): cm = contents_manager # Test in root directory - model = cm.new_untitled(type='notebook') + model = cm.new_untitled(type="notebook") assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert 'type' in model - assert model['type'] == 'notebook' - assert model['name'] == 'Untitled.ipynb' - assert model['path'] == 'Untitled.ipynb' + assert "name" in model + assert "path" in model + assert "type" in model + assert model["type"] == "notebook" + assert model["name"] == "Untitled.ipynb" + assert model["path"] == "Untitled.ipynb" # Test in sub-directory - model = cm.new_untitled(type='directory') + model = cm.new_untitled(type="directory") assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert 'type' in model - assert model['type'] == 'directory' - assert model['name'] == 'Untitled Folder' - assert model['path'] == 'Untitled Folder' - sub_dir = model['path'] + assert "name" in model + assert "path" in model + assert "type" in model + assert model["type"] == "directory" + assert model["name"] == "Untitled Folder" + assert model["path"] == "Untitled Folder" + sub_dir = model["path"] model = cm.new_untitled(path=sub_dir) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert 'type' in model - assert model['type'] == 'file' - assert model['name'] == 'untitled' - assert model['path'] == '%s/untitled' % sub_dir + assert "name" in model + assert "path" in model + assert "type" in model + assert model["type"] == "file" + assert model["name"] == "untitled" + assert model["path"] == "%s/untitled" % sub_dir # Test with a compound extension - model = cm.new_untitled(path=sub_dir, ext='.foo.bar') - assert model['name'] == 'untitled.foo.bar' - model = cm.new_untitled(path=sub_dir, ext='.foo.bar') - assert model['name'] == 'untitled1.foo.bar' + model = cm.new_untitled(path=sub_dir, ext=".foo.bar") + assert model["name"] == "untitled.foo.bar" + model = cm.new_untitled(path=sub_dir, ext=".foo.bar") + assert model["name"] == "untitled1.foo.bar" def test_modified_date(contents_manager): @@ -285,125 +291,124 @@ def test_modified_date(contents_manager): model = cm.get(path) # Add a cell and save. - add_code_cell(model['content']) + add_code_cell(model["content"]) cm.save(model, path) # Reload notebook and verify that last_modified incremented. saved = cm.get(path) - assert saved['last_modified'] >= model['last_modified'] + assert saved["last_modified"] >= model["last_modified"] # Move the notebook and verify that last_modified stayed the same. # (The frontend fires a warning if last_modified increases on the # renamed file.) - new_path = 'renamed.ipynb' + new_path = "renamed.ipynb" cm.rename(path, new_path) renamed = cm.get(new_path) - assert renamed['last_modified'] >= saved['last_modified'] + assert renamed["last_modified"] >= saved["last_modified"] def test_get(contents_manager): cm = contents_manager # Create a notebook - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] + model = cm.new_untitled(type="notebook") + name = model["name"] + path = model["path"] # Check that we 'get' on the notebook we just created model2 = cm.get(path) assert isinstance(model2, dict) - assert 'name' in model2 - assert 'path' in model2 - assert model['name'] == name - assert model['path'] == path + assert "name" in model2 + assert "path" in model2 + assert model["name"] == name + assert model["path"] == path - nb_as_file = cm.get(path, content=True, type='file') - assert nb_as_file['path'] == path - assert nb_as_file['type'] == 'file' - assert nb_as_file['format'] == 'text' - assert not isinstance(nb_as_file['content'], dict) + nb_as_file = cm.get(path, content=True, type="file") + assert nb_as_file["path"] == path + assert nb_as_file["type"] == "file" + assert nb_as_file["format"] == "text" + assert not isinstance(nb_as_file["content"], dict) - nb_as_bin_file = cm.get(path, content=True, type='file', format='base64') - assert nb_as_bin_file['format'] == 'base64' + nb_as_bin_file = cm.get(path, content=True, type="file", format="base64") + assert nb_as_bin_file["format"] == "base64" # Test in sub-directory - sub_dir = '/foo/' - _make_dir(cm, 'foo') - model = cm.new_untitled(path=sub_dir, ext='.ipynb') + sub_dir = "/foo/" + _make_dir(cm, "foo") + model = cm.new_untitled(path=sub_dir, ext=".ipynb") model2 = cm.get(sub_dir + name) assert isinstance(model2, dict) - assert 'name' in model2 - assert 'path' in model2 - assert 'content' in model2 - assert model2['name'] == 'Untitled.ipynb' - assert model2['path'] == '{0}/{1}'.format(sub_dir.strip('/'), name) - + assert "name" in model2 + assert "path" in model2 + assert "content" in model2 + assert model2["name"] == "Untitled.ipynb" + assert model2["path"] == "{0}/{1}".format(sub_dir.strip("/"), name) # Test with a regular file. - file_model_path = cm.new_untitled(path=sub_dir, ext='.txt')['path'] + file_model_path = cm.new_untitled(path=sub_dir, ext=".txt")["path"] file_model = cm.get(file_model_path) expected_model = { - 'content': u'', - 'format': u'text', - 'mimetype': u'text/plain', - 'name': u'untitled.txt', - 'path': u'foo/untitled.txt', - 'type': u'file', - 'writable': True, + "content": u"", + "format": u"text", + "mimetype": u"text/plain", + "name": u"untitled.txt", + "path": u"foo/untitled.txt", + "type": u"file", + "writable": True, } # Assert expected model is in file_model for key, value in expected_model.items(): assert file_model[key] == value - assert 'created' in file_model - assert 'last_modified' in file_model + assert "created" in file_model + assert "last_modified" in file_model # Create a sub-sub directory to test getting directory contents with a # subdir. - _make_dir(cm, 'foo/bar') - dirmodel = cm.get('foo') - assert dirmodel['type'] == 'directory' - assert isinstance(dirmodel['content'], list) - assert len(dirmodel['content']) == 3 - assert dirmodel['path'] == 'foo' - assert dirmodel['name'] == 'foo' + _make_dir(cm, "foo/bar") + dirmodel = cm.get("foo") + assert dirmodel["type"] == "directory" + assert isinstance(dirmodel["content"], list) + assert len(dirmodel["content"]) == 3 + assert dirmodel["path"] == "foo" + assert dirmodel["name"] == "foo" # Directory contents should match the contents of each individual entry # when requested with content=False. model2_no_content = cm.get(sub_dir + name, content=False) - file_model_no_content = cm.get(u'foo/untitled.txt', content=False) - sub_sub_dir_no_content = cm.get('foo/bar', content=False) - assert sub_sub_dir_no_content['path'] == 'foo/bar' - assert sub_sub_dir_no_content['name'] == 'bar' + file_model_no_content = cm.get(u"foo/untitled.txt", content=False) + sub_sub_dir_no_content = cm.get("foo/bar", content=False) + assert sub_sub_dir_no_content["path"] == "foo/bar" + assert sub_sub_dir_no_content["name"] == "bar" - for entry in dirmodel['content']: + for entry in dirmodel["content"]: # Order isn't guaranteed by the spec, so this is a hacky way of # verifying that all entries are matched. - if entry['path'] == sub_sub_dir_no_content['path']: + if entry["path"] == sub_sub_dir_no_content["path"]: assert entry == sub_sub_dir_no_content - elif entry['path'] == model2_no_content['path']: + elif entry["path"] == model2_no_content["path"]: assert entry == model2_no_content - elif entry['path'] == file_model_no_content['path']: + elif entry["path"] == file_model_no_content["path"]: assert entry == file_model_no_content else: assert False, "Unexpected directory entry: %s" % entry() with pytest.raises(HTTPError): - cm.get('foo', type='file') + cm.get("foo", type="file") def test_update(contents_manager): cm = contents_manager # Create a notebook. - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] + model = cm.new_untitled(type="notebook") + name = model["name"] + path = model["path"] # Change the name in the model for rename - model['path'] = 'test.ipynb' + model["path"] = "test.ipynb" model = cm.update(model, path) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == 'test.ipynb' + assert "name" in model + assert "path" in model + assert model["name"] == "test.ipynb" # Make sure the old name is gone with pytest.raises(HTTPError): @@ -411,20 +416,20 @@ def test_update(contents_manager): # Test in sub-directory # Create a directory and notebook in that directory - sub_dir = '/foo/' - _make_dir(cm, 'foo') - model = cm.new_untitled(path=sub_dir, type='notebook') - path = model['path'] + sub_dir = "/foo/" + _make_dir(cm, "foo") + model = cm.new_untitled(path=sub_dir, type="notebook") + path = model["path"] # Change the name in the model for rename - d = path.rsplit('/', 1)[0] - new_path = model['path'] = d + '/test_in_sub.ipynb' + d = path.rsplit("/", 1)[0] + new_path = model["path"] = d + "/test_in_sub.ipynb" model = cm.update(model, path) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == 'test_in_sub.ipynb' - assert model['path'] == new_path + assert "name" in model + assert "path" in model + assert model["name"] == "test_in_sub.ipynb" + assert model["path"] == new_path # Make sure the old name is gone with pytest.raises(HTTPError): @@ -434,9 +439,9 @@ def test_update(contents_manager): def test_save(contents_manager): cm = contents_manager # Create a notebook - model = cm.new_untitled(type='notebook') - name = model['name'] - path = model['path'] + model = cm.new_untitled(type="notebook") + name = model["name"] + path = model["path"] # Get the model with 'content' full_model = cm.get(path) @@ -444,27 +449,27 @@ def test_save(contents_manager): # Save the notebook model = cm.save(full_model, path) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == name - assert model['path'] == path + assert "name" in model + assert "path" in model + assert model["name"] == name + assert model["path"] == path # Test in sub-directory # Create a directory and notebook in that directory - sub_dir = '/foo/' - _make_dir(cm, 'foo') - model = cm.new_untitled(path=sub_dir, type='notebook') - name = model['name'] - path = model['path'] + sub_dir = "/foo/" + _make_dir(cm, "foo") + model = cm.new_untitled(path=sub_dir, type="notebook") + name = model["name"] + path = model["path"] model = cm.get(path) # Change the name in the model for rename model = cm.save(model, path) assert isinstance(model, dict) - assert 'name' in model - assert 'path' in model - assert model['name'] == 'Untitled.ipynb' - assert model['path'] == 'foo/Untitled.ipynb' + assert "name" in model + assert "path" in model + assert model["name"] == "Untitled.ipynb" + assert model["path"] == "foo/Untitled.ipynb" def test_delete(contents_manager): @@ -499,7 +504,7 @@ def test_rename(contents_manager): assert isinstance(cm.get("changed_path"), dict) # Ported tests on nested directory renaming from pgcontents - all_dirs = ['foo', 'bar', 'foo/bar', 'foo/bar/foo', 'foo/bar/foo/bar'] + all_dirs = ["foo", "bar", "foo/bar", "foo/bar/foo", "foo/bar/foo/bar"] unchanged_dirs = all_dirs[:2] changed_dirs = all_dirs[2:] @@ -516,7 +521,7 @@ def test_rename(contents_manager): # Creating a notebook in a non_existant directory should fail with pytest.raises(HTTPError) as e: cm.new_untitled("foo/bar_diff", ext=".ipynb") - assert expected_http_error(e, 404) + assert expected_http_error(e, 404) cm.rename("foo/bar", "foo/bar_diff") @@ -539,30 +544,30 @@ def test_rename(contents_manager): def test_delete_root(contents_manager): cm = contents_manager with pytest.raises(HTTPError) as e: - cm.delete('') + cm.delete("") assert expected_http_error(e, 400) def test_copy(contents_manager): cm = contents_manager - parent = u'å b' - name = u'nb √.ipynb' - path = u'{0}/{1}'.format(parent, name) + parent = u"å b" + name = u"nb √.ipynb" + path = u"{0}/{1}".format(parent, name) _make_dir(cm, parent) orig = cm.new(path=path) # copy with unspecified name copy = cm.copy(path) - assert copy['name'] == orig['name'].replace('.ipynb', '-Copy1.ipynb') + assert copy["name"] == orig["name"].replace(".ipynb", "-Copy1.ipynb") # copy with specified name - copy2 = cm.copy(path, u'å b/copy 2.ipynb') - assert copy2['name'] == u'copy 2.ipynb' - assert copy2['path'] == u'å b/copy 2.ipynb' + copy2 = cm.copy(path, u"å b/copy 2.ipynb") + assert copy2["name"] == u"copy 2.ipynb" + assert copy2["path"] == u"å b/copy 2.ipynb" # copy with specified path - copy2 = cm.copy(path, u'/') - assert copy2['name'] == name - assert copy2['path'] == name + copy2 = cm.copy(path, u"/") + assert copy2["name"] == name + assert copy2["path"] == name def test_mark_trusted_cells(contents_manager): @@ -571,13 +576,13 @@ def test_mark_trusted_cells(contents_manager): cm.mark_trusted_cells(nb, path) for cell in nb.cells: - if cell.cell_type == 'code': + if cell.cell_type == "code": assert not cell.metadata.trusted cm.trust_notebook(path) - nb = cm.get(path)['content'] + nb = cm.get(path)["content"] for cell in nb.cells: - if cell.cell_type == 'code': + if cell.cell_type == "code": assert cell.metadata.trusted @@ -590,7 +595,7 @@ def test_check_and_sign(contents_manager): assert not cm.notary.check_signature(nb) cm.trust_notebook(path) - nb = cm.get(path)['content'] + nb = cm.get(path)["content"] cm.mark_trusted_cells(nb, path) cm.check_and_sign(nb, path) assert cm.notary.check_signature(nb) diff --git a/tests/services/kernels/test_api.py b/tests/services/kernels/test_api.py index cfdc6b80c1..35c2520220 100644 --- a/tests/services/kernels/test_api.py +++ b/tests/services/kernels/test_api.py @@ -16,105 +16,77 @@ @pytest.fixture def ws_fetch(auth_header, http_port): """fetch fixture that handles auth, base_url, and path""" + def client_fetch(*parts, headers={}, params={}, **kwargs): # Handle URL strings path = url_escape(url_path_join(*parts), plus=False) - urlparts = urllib.parse.urlparse('ws://localhost:{}'.format(http_port)) - urlparts = urlparts._replace( - path=path, - query=urllib.parse.urlencode(params) - ) + urlparts = urllib.parse.urlparse("ws://localhost:{}".format(http_port)) + urlparts = urlparts._replace(path=path, query=urllib.parse.urlencode(params)) url = urlparts.geturl() # Add auth keys to header headers.update(auth_header) # Make request. req = tornado.httpclient.HTTPRequest( - url, - headers=auth_header, - connect_timeout=120 + url, headers=auth_header, connect_timeout=120 ) return tornado.websocket.websocket_connect(req) + return client_fetch async def test_no_kernels(fetch): - r = await fetch( - 'api', 'kernels', - method='GET' - ) + r = await fetch("api", "kernels", method="GET") kernels = json.loads(r.body.decode()) assert kernels == [] async def test_default_kernels(fetch): - r = await fetch( - 'api', 'kernels', - method='POST', - allow_nonstandard_methods=True - ) + r = await fetch("api", "kernels", method="POST", allow_nonstandard_methods=True) kernel = json.loads(r.body.decode()) - assert r.headers['location'] == '/api/kernels/' + kernel['id'] + assert r.headers["location"] == "/api/kernels/" + kernel["id"] assert r.code == 201 assert isinstance(kernel, dict) - report_uri = '/api/security/csp-report' - expected_csp = '; '.join([ - "frame-ancestors 'self'", - 'report-uri ' + report_uri, - "default-src 'none'" - ]) - assert r.headers['Content-Security-Policy'] == expected_csp + report_uri = "/api/security/csp-report" + expected_csp = "; ".join( + ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] + ) + assert r.headers["Content-Security-Policy"] == expected_csp async def test_main_kernel_handler(fetch): # Start the first kernel r = await fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': NATIVE_KERNEL_NAME - }) + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) kernel1 = json.loads(r.body.decode()) - assert r.headers['location'] == '/api/kernels/' + kernel1['id'] + assert r.headers["location"] == "/api/kernels/" + kernel1["id"] assert r.code == 201 assert isinstance(kernel1, dict) - report_uri = '/api/security/csp-report' - expected_csp = '; '.join([ - "frame-ancestors 'self'", - 'report-uri ' + report_uri, - "default-src 'none'" - ]) - assert r.headers['Content-Security-Policy'] == expected_csp + report_uri = "/api/security/csp-report" + expected_csp = "; ".join( + ["frame-ancestors 'self'", "report-uri " + report_uri, "default-src 'none'"] + ) + assert r.headers["Content-Security-Policy"] == expected_csp # Check that the kernel is found in the kernel list - r = await fetch( - 'api', 'kernels', - method='GET' - ) + r = await fetch("api", "kernels", method="GET") kernel_list = json.loads(r.body.decode()) assert r.code == 200 assert isinstance(kernel_list, list) - assert kernel_list[0]['id'] == kernel1['id'] - assert kernel_list[0]['name'] == kernel1['name'] + assert kernel_list[0]["id"] == kernel1["id"] + assert kernel_list[0]["name"] == kernel1["name"] # Start a second kernel r = await fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': NATIVE_KERNEL_NAME - }) + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) kernel2 = json.loads(r.body.decode()) assert isinstance(kernel2, dict) # Get kernel list again - r = await fetch( - 'api', 'kernels', - method='GET' - ) + r = await fetch("api", "kernels", method="GET") kernel_list = json.loads(r.body.decode()) assert r.code == 200 assert isinstance(kernel_list, list) @@ -122,133 +94,101 @@ async def test_main_kernel_handler(fetch): # Interrupt a kernel r = await fetch( - 'api', 'kernels', kernel2['id'], 'interrupt', - method='POST', - allow_nonstandard_methods=True + "api", + "kernels", + kernel2["id"], + "interrupt", + method="POST", + allow_nonstandard_methods=True, ) assert r.code == 204 # Restart a kernel r = await fetch( - 'api', 'kernels', kernel2['id'], 'restart', - method='POST', - allow_nonstandard_methods=True + "api", + "kernels", + kernel2["id"], + "restart", + method="POST", + allow_nonstandard_methods=True, ) restarted_kernel = json.loads(r.body.decode()) - assert restarted_kernel['id'] == kernel2['id'] - assert restarted_kernel['name'] == kernel2['name'] + assert restarted_kernel["id"] == kernel2["id"] + assert restarted_kernel["name"] == kernel2["name"] async def test_kernel_handler(fetch): # Create a kernel r = await fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': NATIVE_KERNEL_NAME - }) - ) - kernel_id = json.loads(r.body.decode())['id'] - r = await fetch( - 'api', 'kernels', kernel_id, - method='GET' + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) + kernel_id = json.loads(r.body.decode())["id"] + r = await fetch("api", "kernels", kernel_id, method="GET") kernel = json.loads(r.body.decode()) assert r.code == 200 assert isinstance(kernel, dict) - assert 'id' in kernel - assert kernel['id'] == kernel_id + assert "id" in kernel + assert kernel["id"] == kernel_id # Requests a bad kernel id. - bad_id = '111-111-111-111-111' + bad_id = "111-111-111-111-111" with pytest.raises(tornado.httpclient.HTTPClientError) as e: - r = await fetch( - 'api', 'kernels', bad_id, - method='GET' - ) + r = await fetch("api", "kernels", bad_id, method="GET") assert expected_http_error(e, 404) # Delete kernel with id. - r = await fetch( - 'api', 'kernels', kernel_id, - method='DELETE', - ) + r = await fetch("api", "kernels", kernel_id, method="DELETE",) assert r.code == 204 # Get list of kernels - r = await fetch( - 'api', 'kernels', - method='GET' - ) + r = await fetch("api", "kernels", method="GET") kernel_list = json.loads(r.body.decode()) assert kernel_list == [] # Request to delete a non-existent kernel id - bad_id = '111-111-111-111-111' + bad_id = "111-111-111-111-111" with pytest.raises(tornado.httpclient.HTTPClientError) as e: - r = await fetch( - 'api', 'kernels', bad_id, - method='DELETE' - ) - assert expected_http_error(e, 404, 'Kernel does not exist: ' + bad_id) + r = await fetch("api", "kernels", bad_id, method="DELETE") + assert expected_http_error(e, 404, "Kernel does not exist: " + bad_id) async def test_connection(fetch, ws_fetch, http_port, auth_header): - print('hello') + print("hello") # Create kernel r = await fetch( - 'api', 'kernels', - method='POST', - body=json.dumps({ - 'name': NATIVE_KERNEL_NAME - }) + "api", "kernels", method="POST", body=json.dumps({"name": NATIVE_KERNEL_NAME}) ) - kid = json.loads(r.body.decode())['id'] - + kid = json.loads(r.body.decode())["id"] + # Get kernel info - r = await fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 0 + assert model["connections"] == 0 time.sleep(1) # Open a websocket connection. - ws = await ws_fetch( - 'api', 'kernels', kid, 'channels' - ) - + ws = await ws_fetch("api", "kernels", kid, "channels") + # Test that it was opened. - r = await fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 1 + assert model["connections"] == 1 # Close websocket ws.close() # give it some time to close on the other side: for i in range(10): - r = await fetch( - 'api', 'kernels', kid, - method='GET' - ) + r = await fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - if model['connections'] > 0: + if model["connections"] > 0: time.sleep(0.1) else: break - - r = await fetch( - 'api', 'kernels', kid, - method='GET' - ) + + r = await fetch("api", "kernels", kid, method="GET") model = json.loads(r.body.decode()) - assert model['connections'] == 0 + assert model["connections"] == 0 async def test_config2(serverapp): assert serverapp.kernel_manager.allowed_message_types == [] - diff --git a/tests/services/kernels/test_config.py b/tests/services/kernels/test_config.py index 051727fbad..5487d99d52 100644 --- a/tests/services/kernels/test_config.py +++ b/tests/services/kernels/test_config.py @@ -4,14 +4,16 @@ @pytest.fixture def config(): - return Config({ - 'ServerApp': { - 'MappingKernelManager': { - 'allowed_message_types': ['kernel_info_request'] + return Config( + { + "ServerApp": { + "MappingKernelManager": { + "allowed_message_types": ["kernel_info_request"] + } } } - }) + ) def test_config(serverapp): - assert serverapp.kernel_manager.allowed_message_types == ['kernel_info_request'] \ No newline at end of file + assert serverapp.kernel_manager.allowed_message_types == ["kernel_info_request"] diff --git a/tests/services/kernelspecs/test_api.py b/tests/services/kernelspecs/test_api.py index 0d3a2ba387..1572c089e7 100644 --- a/tests/services/kernelspecs/test_api.py +++ b/tests/services/kernelspecs/test_api.py @@ -9,116 +9,94 @@ sample_kernel_json = { - 'argv':['cat', '{connection_file}'], - 'display_name':'Test kernel', + "argv": ["cat", "{connection_file}"], + "display_name": "Test kernel", } some_resource = u"The very model of a modern major general" @pytest.fixture def kernelspecs(data_dir): - spec_names = ['sample', 'sample 2'] + spec_names = ["sample", "sample 2"] for name in spec_names: - sample_kernel_dir = data_dir.joinpath('kernels', name) + sample_kernel_dir = data_dir.joinpath("kernels", name) sample_kernel_dir.mkdir(parents=True) # Create kernel json file - sample_kernel_file = sample_kernel_dir.joinpath('kernel.json') + sample_kernel_file = sample_kernel_dir.joinpath("kernel.json") sample_kernel_file.write_text(json.dumps(sample_kernel_json)) # Create resources text - sample_kernel_resources = sample_kernel_dir.joinpath('resource.txt') + sample_kernel_resources = sample_kernel_dir.joinpath("resource.txt") sample_kernel_resources.write_text(some_resource) async def test_list_kernelspecs_bad(fetch, kernelspecs, data_dir): - bad_kernel_dir = data_dir.joinpath(data_dir, 'kernels', 'bad') + bad_kernel_dir = data_dir.joinpath(data_dir, "kernels", "bad") bad_kernel_dir.mkdir(parents=True) - bad_kernel_json = bad_kernel_dir.joinpath('kernel.json') - bad_kernel_json.write_text('garbage') + bad_kernel_json = bad_kernel_dir.joinpath("kernel.json") + bad_kernel_json.write_text("garbage") - r = await fetch( - 'api', 'kernelspecs', - method='GET' - ) + r = await fetch("api", "kernelspecs", method="GET") model = json.loads(r.body.decode()) assert isinstance(model, dict) - assert model['default'] == NATIVE_KERNEL_NAME - specs = model['kernelspecs'] + assert model["default"] == NATIVE_KERNEL_NAME + specs = model["kernelspecs"] assert isinstance(specs, dict) assert len(specs) > 2 async def test_list_kernelspecs(fetch, kernelspecs): - r = await fetch( - 'api', 'kernelspecs', - method='GET' - ) + r = await fetch("api", "kernelspecs", method="GET") model = json.loads(r.body.decode()) assert isinstance(model, dict) - assert model['default'] == NATIVE_KERNEL_NAME - specs = model['kernelspecs'] + assert model["default"] == NATIVE_KERNEL_NAME + specs = model["kernelspecs"] assert isinstance(specs, dict) assert len(specs) > 2 def is_sample_kernelspec(s): - return s['name'] == 'sample' and s['spec']['display_name'] == 'Test kernel' + return s["name"] == "sample" and s["spec"]["display_name"] == "Test kernel" def is_default_kernelspec(s): - return s['name'] == NATIVE_KERNEL_NAME and s['spec']['display_name'].startswith("Python") + return s["name"] == NATIVE_KERNEL_NAME and s["spec"]["display_name"].startswith( + "Python" + ) assert any(is_sample_kernelspec(s) for s in specs.values()), specs assert any(is_default_kernelspec(s) for s in specs.values()), specs async def test_get_kernelspecs(fetch, kernelspecs): - r = await fetch( - 'api', 'kernelspecs', 'Sample', - method='GET' - ) + r = await fetch("api", "kernelspecs", "Sample", method="GET") model = json.loads(r.body.decode()) - assert model['name'].lower() == 'sample' - assert isinstance(model['spec'], dict) - assert model['spec']['display_name'] == 'Test kernel' - assert isinstance(model['resources'], dict) + assert model["name"].lower() == "sample" + assert isinstance(model["spec"], dict) + assert model["spec"]["display_name"] == "Test kernel" + assert isinstance(model["resources"], dict) async def test_get_kernelspec_spaces(fetch, kernelspecs): - r = await fetch( - 'api', 'kernelspecs', 'sample%202', - method='GET' - ) + r = await fetch("api", "kernelspecs", "sample%202", method="GET") model = json.loads(r.body.decode()) - assert model['name'].lower() == 'sample 2' + assert model["name"].lower() == "sample 2" async def test_get_nonexistant_kernelspec(fetch, kernelspecs): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await fetch( - 'api', 'kernelspecs', 'nonexistant', - method='GET' - ) + await fetch("api", "kernelspecs", "nonexistant", method="GET") assert expected_http_error(e, 404) async def test_get_kernel_resource_file(fetch, kernelspecs): - r = await fetch( - 'kernelspecs', 'sAmple', 'resource.txt', - method='GET' - ) - res = r.body.decode('utf-8') + r = await fetch("kernelspecs", "sAmple", "resource.txt", method="GET") + res = r.body.decode("utf-8") assert res == some_resource async def test_get_nonexistant_resource(fetch, kernelspecs): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await fetch( - 'kernelspecs', 'nonexistant', 'resource.txt', - method='GET' - ) + await fetch("kernelspecs", "nonexistant", "resource.txt", method="GET") assert expected_http_error(e, 404) with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await fetch( - 'kernelspecs', 'sample', 'nonexistant.txt', - method='GET' - ) - assert expected_http_error(e, 404) \ No newline at end of file + await fetch("kernelspecs", "sample", "nonexistant.txt", method="GET") + assert expected_http_error(e, 404) diff --git a/tests/services/nbconvert/test_api.py b/tests/services/nbconvert/test_api.py index c0633a492a..a7afb97254 100644 --- a/tests/services/nbconvert/test_api.py +++ b/tests/services/nbconvert/test_api.py @@ -1,13 +1,11 @@ import json import pytest + async def test_list_formats(fetch): - r = await fetch( - 'api', 'nbconvert', - method='GET' - ) + r = await fetch("api", "nbconvert", method="GET") formats = json.loads(r.body.decode()) assert isinstance(formats, dict) - assert 'python' in formats - assert 'html' in formats - assert formats['python']['output_mimetype'] == 'text/x-python' \ No newline at end of file + assert "python" in formats + assert "html" in formats + assert formats["python"]["output_mimetype"] == "text/x-python" diff --git a/tests/services/sessions/test_api.py b/tests/services/sessions/test_api.py index 31b9ef5d87..a475bc537b 100644 --- a/tests/services/sessions/test_api.py +++ b/tests/services/sessions/test_api.py @@ -14,7 +14,6 @@ class SessionClient: - def __init__(self, fetch_callable): self.fetch = fetch_callable @@ -23,7 +22,9 @@ async def _req(self, *args, method, body=None): body = json.dumps(body) r = await self.fetch( - 'api', 'sessions', *args, + "api", + "sessions", + *args, method=method, body=body, allow_nonstandard_methods=True @@ -31,82 +32,65 @@ async def _req(self, *args, method, body=None): return r async def list(self): - return await self._req(method='GET') + return await self._req(method="GET") async def get(self, id): - return await self._req(id, method='GET') - - async def create( - self, - path, - type='notebook', - kernel_name='python', - kernel_id=None): + return await self._req(id, method="GET") + + async def create(self, path, type="notebook", kernel_name="python", kernel_id=None): body = { - 'path': path, - 'type': type, - 'kernel': { - 'name': kernel_name, - 'id': kernel_id - } + "path": path, + "type": type, + "kernel": {"name": kernel_name, "id": kernel_id}, } - return await self._req(method='POST', body=body) + return await self._req(method="POST", body=body) def create_deprecated(self, path): - body = { - 'notebook': { - 'path': path - }, - 'kernel': { - 'name': 'python', - 'id': 'foo' - } - } - return self._req(method='POST', body=body) + body = {"notebook": {"path": path}, "kernel": {"name": "python", "id": "foo"}} + return self._req(method="POST", body=body) def modify_path(self, id, path): - body = {'path': path} - return self._req(id, method='PATCH', body=body) + body = {"path": path} + return self._req(id, method="PATCH", body=body) def modify_path_deprecated(self, id, path): - body = {'notebook': {'path': path}} - return self._req(id, method='PATCH', body=body) + body = {"notebook": {"path": path}} + return self._req(id, method="PATCH", body=body) def modify_type(self, id, type): - body = {'type': type} - return self._req(id, method='PATCH', body=body) + body = {"type": type} + return self._req(id, method="PATCH", body=body) def modify_kernel_name(self, id, kernel_name): - body = {'kernel': {'name': kernel_name}} - return self._req(id, method='PATCH', body=body) + body = {"kernel": {"name": kernel_name}} + return self._req(id, method="PATCH", body=body) def modify_kernel_id(self, id, kernel_id): # Also send a dummy name to show that id takes precedence. - body = {'kernel': {'id': kernel_id, 'name': 'foo'}} - return self._req(id, method='PATCH', body=body) + body = {"kernel": {"id": kernel_id, "name": "foo"}} + return self._req(id, method="PATCH", body=body) async def delete(self, id): - return await self._req(id, method='DELETE') + return await self._req(id, method="DELETE") async def cleanup(self): resp = await self.list() sessions = j(resp) for session in sessions: - await self.delete(session['id']) + await self.delete(session["id"]) time.sleep(0.1) - @pytest.fixture def session_client(root_dir, fetch): - subdir = root_dir.joinpath('foo') + subdir = root_dir.joinpath("foo") subdir.mkdir() # Write a notebook to subdir. nb = new_notebook() nb_str = writes(nb, version=4) - nbpath = subdir.joinpath('nb1.ipynb') - nbpath.write_text(nb_str, encoding='utf-8') + nbpath = subdir.joinpath("nb1.ipynb") + nbpath.write_text(nb_str, encoding="utf-8") # Yield a session client client = SessionClient(fetch) @@ -114,7 +98,7 @@ def session_client(root_dir, fetch): # Remove subdir shutil.rmtree(str(subdir), ignore_errors=True) - + async def test_create(session_client): # Make sure no sessions exist. @@ -123,13 +107,13 @@ async def test_create(session_client): assert len(sessions) == 0 # Create a session. - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") assert resp.code == 201 new_session = j(resp) - assert 'id' in new_session - assert new_session['path'] == 'foo/nb1.ipynb' - assert new_session['type'] == 'notebook' - assert resp.headers['Location'] == '/api/sessions/' + new_session['id'] + assert "id" in new_session + assert new_session["path"] == "foo/nb1.ipynb" + assert new_session["type"] == "notebook" + assert resp.headers["Location"] == "/api/sessions/" + new_session["id"] # Check that the new session appears in list. resp = await session_client.list() @@ -137,7 +121,7 @@ async def test_create(session_client): assert sessions == [new_session] # Retrieve that session. - sid = new_session['id'] + sid = new_session["id"] resp = await session_client.get(sid) got = j(resp) assert got == new_session @@ -147,64 +131,65 @@ async def test_create(session_client): async def test_create_file_session(session_client): - resp = await session_client.create('foo/nb1.py', type='file') + resp = await session_client.create("foo/nb1.py", type="file") assert resp.code == 201 newsession = j(resp) - assert newsession['path'] == 'foo/nb1.py' - assert newsession['type'] == 'file' + assert newsession["path"] == "foo/nb1.py" + assert newsession["type"] == "file" await session_client.cleanup() async def test_create_console_session(session_client): - resp = await session_client.create('foo/abc123', type='console') + resp = await session_client.create("foo/abc123", type="console") assert resp.code == 201 newsession = j(resp) - assert newsession['path'] == 'foo/abc123' - assert newsession['type'] == 'console' + assert newsession["path"] == "foo/abc123" + assert newsession["type"] == "console" # Need to find a better solution to this. await session_client.cleanup() async def test_create_deprecated(session_client): - resp = await session_client.create_deprecated('foo/nb1.ipynb') + resp = await session_client.create_deprecated("foo/nb1.ipynb") assert resp.code == 201 newsession = j(resp) - assert newsession['path'] == 'foo/nb1.ipynb' - assert newsession['type'] == 'notebook' - assert newsession['notebook']['path'] == 'foo/nb1.ipynb' + assert newsession["path"] == "foo/nb1.ipynb" + assert newsession["type"] == "notebook" + assert newsession["notebook"]["path"] == "foo/nb1.ipynb" # Need to find a better solution to this. await session_client.cleanup() async def test_create_with_kernel_id(session_client, fetch): # create a new kernel - resp = await fetch('api/kernels', method='POST', allow_nonstandard_methods=True) + resp = await fetch("api/kernels", method="POST", allow_nonstandard_methods=True) kernel = j(resp) - resp = await session_client.create('foo/nb1.ipynb', kernel_id=kernel['id']) + resp = await session_client.create("foo/nb1.ipynb", kernel_id=kernel["id"]) assert resp.code == 201 newsession = j(resp) - assert 'id' in newsession - assert newsession['path'] == 'foo/nb1.ipynb' - assert newsession['kernel']['id'] == kernel['id'] - assert resp.headers['Location'] == '/api/sessions/{0}'.format(newsession['id']) + assert "id" in newsession + assert newsession["path"] == "foo/nb1.ipynb" + assert newsession["kernel"]["id"] == kernel["id"] + assert resp.headers["Location"] == "/api/sessions/{0}".format(newsession["id"]) resp = await session_client.list() sessions = j(resp) assert sessions == [newsession] # Retrieve it - sid = newsession['id'] + sid = newsession["id"] resp = await session_client.get(sid) got = j(resp) assert got == newsession # Need to find a better solution to this. await session_client.cleanup() + async def test_delete(session_client): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) - sid = newsession['id'] + sid = newsession["id"] resp = await session_client.delete(sid) assert resp.code == 204 @@ -219,89 +204,93 @@ async def test_delete(session_client): # Need to find a better solution to this. await session_client.cleanup() + async def test_modify_path(session_client): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) - sid = newsession['id'] + sid = newsession["id"] - resp = await session_client.modify_path(sid, 'nb2.ipynb') + resp = await session_client.modify_path(sid, "nb2.ipynb") changed = j(resp) - assert changed['id'] == sid - assert changed['path'] == 'nb2.ipynb' + assert changed["id"] == sid + assert changed["path"] == "nb2.ipynb" # Need to find a better solution to this. await session_client.cleanup() + async def test_modify_path_deprecated(session_client): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) - sid = newsession['id'] + sid = newsession["id"] - resp = await session_client.modify_path_deprecated(sid, 'nb2.ipynb') + resp = await session_client.modify_path_deprecated(sid, "nb2.ipynb") changed = j(resp) - assert changed['id'] == sid - assert changed['notebook']['path'] == 'nb2.ipynb' + assert changed["id"] == sid + assert changed["notebook"]["path"] == "nb2.ipynb" # Need to find a better solution to this. await session_client.cleanup() + async def test_modify_type(session_client): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") newsession = j(resp) - sid = newsession['id'] + sid = newsession["id"] - resp = await session_client.modify_type(sid, 'console') + resp = await session_client.modify_type(sid, "console") changed = j(resp) - assert changed['id'] == sid - assert changed['type'] == 'console' + assert changed["id"] == sid + assert changed["type"] == "console" # Need to find a better solution to this. await session_client.cleanup() + async def test_modify_kernel_name(session_client, fetch): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") before = j(resp) - sid = before['id'] + sid = before["id"] - resp = await session_client.modify_kernel_name(sid, before['kernel']['name']) + resp = await session_client.modify_kernel_name(sid, before["kernel"]["name"]) after = j(resp) - assert after['id'] == sid - assert after['path'] == before['path'] - assert after['type'] == before['type'] - assert after['kernel']['id'] != before['kernel']['id'] + assert after["id"] == sid + assert after["path"] == before["path"] + assert after["type"] == before["type"] + assert after["kernel"]["id"] != before["kernel"]["id"] # check kernel list, to be sure previous kernel was cleaned up - resp = await fetch('api/kernels', method='GET') + resp = await fetch("api/kernels", method="GET") kernel_list = j(resp) - after['kernel'].pop('last_activity') - [ k.pop('last_activity') for k in kernel_list ] - assert kernel_list == [after['kernel']] + after["kernel"].pop("last_activity") + [k.pop("last_activity") for k in kernel_list] + assert kernel_list == [after["kernel"]] # Need to find a better solution to this. await session_client.cleanup() async def test_modify_kernel_id(session_client, fetch): - resp = await session_client.create('foo/nb1.ipynb') + resp = await session_client.create("foo/nb1.ipynb") before = j(resp) - sid = before['id'] + sid = before["id"] # create a new kernel - resp = await fetch('api/kernels', method='POST', allow_nonstandard_methods=True) + resp = await fetch("api/kernels", method="POST", allow_nonstandard_methods=True) kernel = j(resp) # Attach our session to the existing kernel - resp = await session_client.modify_kernel_id(sid, kernel['id']) + resp = await session_client.modify_kernel_id(sid, kernel["id"]) after = j(resp) - assert after['id'] == sid - assert after['path'] == before['path'] - assert after['type'] == before['type'] - assert after['kernel']['id'] != before['kernel']['id'] - assert after['kernel']['id'] == kernel['id'] + assert after["id"] == sid + assert after["path"] == before["path"] + assert after["type"] == before["type"] + assert after["kernel"]["id"] != before["kernel"]["id"] + assert after["kernel"]["id"] == kernel["id"] # check kernel list, to be sure previous kernel was cleaned up - resp = await fetch('api/kernels', method='GET') + resp = await fetch("api/kernels", method="GET") kernel_list = j(resp) - kernel.pop('last_activity') - [ k.pop('last_activity') for k in kernel_list ] + kernel.pop("last_activity") + [k.pop("last_activity") for k in kernel_list] assert kernel_list == [kernel] # Need to find a better solution to this. - await session_client.cleanup() \ No newline at end of file + await session_client.cleanup() diff --git a/tests/services/sessions/test_manager.py b/tests/services/sessions/test_manager.py index a37a8c7f0a..37e9d02456 100644 --- a/tests/services/sessions/test_manager.py +++ b/tests/services/sessions/test_manager.py @@ -9,7 +9,7 @@ class DummyKernel(object): - def __init__(self, kernel_name='python'): + def __init__(self, kernel_name="python"): self.kernel_name = kernel_name @@ -19,19 +19,20 @@ def __init__(self, kernel_name='python'): class DummyMKM(MappingKernelManager): """MappingKernelManager interface that doesn't start kernels, for testing""" + def __init__(self, *args, **kwargs): super(DummyMKM, self).__init__(*args, **kwargs) - self.id_letters = iter(u'ABCDEFGHIJK') + self.id_letters = iter(u"ABCDEFGHIJK") def _new_id(self): return next(self.id_letters) - - def start_kernel(self, kernel_id=None, path=None, kernel_name='python', **kwargs): + + def start_kernel(self, kernel_id=None, path=None, kernel_name="python", **kwargs): kernel_id = kernel_id or self._new_id() k = self._kernels[kernel_id] = DummyKernel(kernel_name=kernel_name) self._kernel_connections[kernel_id] = 0 k.last_activity = dummy_date - k.execution_state = 'idle' + k.execution_state = "idle" return kernel_id def shutdown_kernel(self, kernel_id, now=False): @@ -40,15 +41,13 @@ def shutdown_kernel(self, kernel_id, now=False): @pytest.fixture def session_manager(): - return SessionManager( - kernel_manager=DummyMKM(), - contents_manager=ContentsManager()) + return SessionManager(kernel_manager=DummyMKM(), contents_manager=ContentsManager()) async def create_multiple_sessions(session_manager, *kwargs_list): sessions = [] for kwargs in kwargs_list: - kwargs.setdefault('type', 'notebook') + kwargs.setdefault("type", "notebook") session = await session_manager.create_session(**kwargs) sessions.append(session) return sessions @@ -56,49 +55,43 @@ async def create_multiple_sessions(session_manager, *kwargs_list): async def test_get_session(session_manager): session = await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='bar', - type='notebook' + path="/path/to/test.ipynb", kernel_name="bar", type="notebook" ) - session_id = session['id'] + session_id = session["id"] model = await session_manager.get_session(session_id=session_id) expected = { - 'id':session_id, - 'path': u'/path/to/test.ipynb', - 'notebook': {'path': u'/path/to/test.ipynb', 'name': None}, - 'type': 'notebook', - 'name': None, - 'kernel': { - 'id': 'A', - 'name': 'bar', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } + "id": session_id, + "path": u"/path/to/test.ipynb", + "notebook": {"path": u"/path/to/test.ipynb", "name": None}, + "type": "notebook", + "name": None, + "kernel": { + "id": "A", + "name": "bar", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, } assert model == expected async def test_bad_get_session(session_manager): session = await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='foo', - type='notebook' + path="/path/to/test.ipynb", kernel_name="foo", type="notebook" ) with pytest.raises(TypeError): - await session_manager.get_session(bad_id=session['id']) + await session_manager.get_session(bad_id=session["id"]) async def test_get_session_dead_kernel(session_manager): session = await session_manager.create_session( - path='/path/to/1/test1.ipynb', - kernel_name='python', - type='notebook' + path="/path/to/1/test1.ipynb", kernel_name="python", type="notebook" ) # Kill the kernel - session_manager.kernel_manager.shutdown_kernel(session['kernel']['id']) + session_manager.kernel_manager.shutdown_kernel(session["kernel"]["id"]) with pytest.raises(KeyError): - await session_manager.get_session(session_id=session['id']) + await session_manager.get_session(session_id=session["id"]) # no session left listed = await session_manager.list_sessions() assert listed == [] @@ -107,50 +100,52 @@ async def test_get_session_dead_kernel(session_manager): async def test_list_session(session_manager): sessions = await create_multiple_sessions( session_manager, - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.py', type='file', kernel_name='python'), - dict(path='/path/to/3', name='foo', type='console', kernel_name='python'), + dict(path="/path/to/1/test1.ipynb", kernel_name="python"), + dict(path="/path/to/2/test2.py", type="file", kernel_name="python"), + dict(path="/path/to/3", name="foo", type="console", kernel_name="python"), ) sessions = await session_manager.list_sessions() expected = [ { - 'id':sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', - 'type': 'notebook', - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, - 'name': None, - 'kernel': { - 'id': 'A', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - }, { - 'id':sessions[1]['id'], - 'path': u'/path/to/2/test2.py', - 'type': 'file', - 'name': None, - 'kernel': { - 'id': 'B', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - }, { - 'id':sessions[2]['id'], - 'path': u'/path/to/3', - 'type': 'console', - 'name': 'foo', - 'kernel': { - 'id': 'C', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - } + "id": sessions[0]["id"], + "path": u"/path/to/1/test1.ipynb", + "type": "notebook", + "notebook": {"path": u"/path/to/1/test1.ipynb", "name": None}, + "name": None, + "kernel": { + "id": "A", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + { + "id": sessions[1]["id"], + "path": u"/path/to/2/test2.py", + "type": "file", + "name": None, + "kernel": { + "id": "B", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + { + "id": sessions[2]["id"], + "path": u"/path/to/3", + "type": "console", + "name": "foo", + "kernel": { + "id": "C", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, ] assert sessions == expected @@ -158,26 +153,26 @@ async def test_list_session(session_manager): async def test_list_sessions_dead_kernel(session_manager): sessions = await create_multiple_sessions( session_manager, - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.ipynb', kernel_name='python'), + dict(path="/path/to/1/test1.ipynb", kernel_name="python"), + dict(path="/path/to/2/test2.ipynb", kernel_name="python"), ) # kill one of the kernels - session_manager.kernel_manager.shutdown_kernel(sessions[0]['kernel']['id']) + session_manager.kernel_manager.shutdown_kernel(sessions[0]["kernel"]["id"]) listed = await session_manager.list_sessions() expected = [ { - 'id': sessions[1]['id'], - 'path': u'/path/to/2/test2.ipynb', - 'type': 'notebook', - 'name': None, - 'notebook': {'path': u'/path/to/2/test2.ipynb', 'name': None}, - 'kernel': { - 'id': 'B', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } + "id": sessions[1]["id"], + "path": u"/path/to/2/test2.ipynb", + "type": "notebook", + "name": None, + "notebook": {"path": u"/path/to/2/test2.ipynb", "name": None}, + "kernel": { + "id": "B", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, } ] assert listed == expected @@ -185,25 +180,24 @@ async def test_list_sessions_dead_kernel(session_manager): async def test_update_session(session_manager): session = await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='julia', - type='notebook' + path="/path/to/test.ipynb", kernel_name="julia", type="notebook" ) - session_id = session['id'] - await session_manager.update_session(session_id, path='/path/to/new_name.ipynb') + session_id = session["id"] + await session_manager.update_session(session_id, path="/path/to/new_name.ipynb") model = await session_manager.get_session(session_id=session_id) - expected = {'id':session_id, - 'path': u'/path/to/new_name.ipynb', - 'type': 'notebook', - 'name': None, - 'notebook': {'path': u'/path/to/new_name.ipynb', 'name': None}, - 'kernel': { - 'id': 'A', - 'name':'julia', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } + expected = { + "id": session_id, + "path": u"/path/to/new_name.ipynb", + "type": "notebook", + "name": None, + "notebook": {"path": u"/path/to/new_name.ipynb", "name": None}, + "kernel": { + "id": "A", + "name": "julia", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, } assert model == expected @@ -211,50 +205,52 @@ async def test_update_session(session_manager): async def test_bad_update_session(session_manager): # try to update a session with a bad keyword ~ raise error session = await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='ir', - type='notegbook' + path="/path/to/test.ipynb", kernel_name="ir", type="notegbook" ) - session_id = session['id'] + session_id = session["id"] with pytest.raises(TypeError): - await session_manager.update_session(session_id=session_id, bad_kw='test.ipynb') # Bad keyword + await session_manager.update_session( + session_id=session_id, bad_kw="test.ipynb" + ) # Bad keyword async def test_delete_session(session_manager): sessions = await create_multiple_sessions( session_manager, - dict(path='/path/to/1/test1.ipynb', kernel_name='python'), - dict(path='/path/to/2/test2.ipynb', kernel_name='python'), - dict(path='/path/to/3', name='foo', type='console', kernel_name='python'), + dict(path="/path/to/1/test1.ipynb", kernel_name="python"), + dict(path="/path/to/2/test2.ipynb", kernel_name="python"), + dict(path="/path/to/3", name="foo", type="console", kernel_name="python"), ) - await session_manager.delete_session(sessions[1]['id']) + await session_manager.delete_session(sessions[1]["id"]) new_sessions = await session_manager.list_sessions() - expected = [{ - 'id': sessions[0]['id'], - 'path': u'/path/to/1/test1.ipynb', - 'type': 'notebook', - 'name': None, - 'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None}, - 'kernel': { - 'id': 'A', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - }, { - 'id': sessions[2]['id'], - 'type': 'console', - 'path': u'/path/to/3', - 'name': 'foo', - 'kernel': { - 'id': 'C', - 'name':'python', - 'connections': 0, - 'last_activity': dummy_date_s, - 'execution_state': 'idle', - } - } + expected = [ + { + "id": sessions[0]["id"], + "path": u"/path/to/1/test1.ipynb", + "type": "notebook", + "name": None, + "notebook": {"path": u"/path/to/1/test1.ipynb", "name": None}, + "kernel": { + "id": "A", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, + { + "id": sessions[2]["id"], + "type": "console", + "path": u"/path/to/3", + "name": "foo", + "kernel": { + "id": "C", + "name": "python", + "connections": 0, + "last_activity": dummy_date_s, + "execution_state": "idle", + }, + }, ] assert new_sessions == expected @@ -262,12 +258,9 @@ async def test_delete_session(session_manager): async def test_bad_delete_session(session_manager): # try to delete a session that doesn't exist ~ raise error await session_manager.create_session( - path='/path/to/test.ipynb', - kernel_name='python', - type='notebook' + path="/path/to/test.ipynb", kernel_name="python", type="notebook" ) with pytest.raises(TypeError): - await session_manager.delete_session(bad_kwarg='23424') # Bad keyword + await session_manager.delete_session(bad_kwarg="23424") # Bad keyword with pytest.raises(web.HTTPError): - await session_manager.delete_session(session_id='23424') # nonexistent - + await session_manager.delete_session(session_id="23424") # nonexistent diff --git a/tests/test_config_manager.py b/tests/test_config_manager.py index 4384f69999..26b68f0ad2 100644 --- a/tests/test_config_manager.py +++ b/tests/test_config_manager.py @@ -9,44 +9,46 @@ def test_json(tmp_path): tmpdir = str(tmp_path) - root_data = dict(a=1, x=2, nest={'a':1, 'x':2}) - with open(os.path.join(tmpdir, 'foo.json'), 'w') as f: + root_data = dict(a=1, x=2, nest={"a": 1, "x": 2}) + with open(os.path.join(tmpdir, "foo.json"), "w") as f: json.dump(root_data, f) # also make a foo.d/ directory with multiple json files - os.makedirs(os.path.join(tmpdir, 'foo.d')) - with open(os.path.join(tmpdir, 'foo.d', 'a.json'), 'w') as f: - json.dump(dict(a=2, b=1, nest={'a':2, 'b':1}), f) - with open(os.path.join(tmpdir, 'foo.d', 'b.json'), 'w') as f: - json.dump(dict(a=3, b=2, c=3, nest={'a':3, 'b':2, 'c':3}, only_in_b={'x':1}), f) + os.makedirs(os.path.join(tmpdir, "foo.d")) + with open(os.path.join(tmpdir, "foo.d", "a.json"), "w") as f: + json.dump(dict(a=2, b=1, nest={"a": 2, "b": 1}), f) + with open(os.path.join(tmpdir, "foo.d", "b.json"), "w") as f: + json.dump( + dict(a=3, b=2, c=3, nest={"a": 3, "b": 2, "c": 3}, only_in_b={"x": 1}), f + ) manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) - data = manager.get('foo') - assert 'a' in data - assert 'x' in data - assert 'b' not in data - assert 'c' not in data - assert data['a'] == 1 - assert 'x' in data['nest'] + data = manager.get("foo") + assert "a" in data + assert "x" in data + assert "b" not in data + assert "c" not in data + assert data["a"] == 1 + assert "x" in data["nest"] # if we write it out, it also shouldn't pick up the subdirectoy - manager.set('foo', data) - data = manager.get('foo') + manager.set("foo", data) + data = manager.get("foo") assert data == root_data manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=True) - data = manager.get('foo') - assert 'a' in data - assert 'b' in data - assert 'c' in data + data = manager.get("foo") + assert "a" in data + assert "b" in data + assert "c" in data # files should be read in order foo.d/a.json foo.d/b.json foo.json - assert data['a'] == 1 - assert data['b'] == 2 - assert data['c'] == 3 - assert data['nest']['a'] == 1 - assert data['nest']['b'] == 2 - assert data['nest']['c'] == 3 - assert data['nest']['x'] == 2 + assert data["a"] == 1 + assert data["b"] == 2 + assert data["c"] == 3 + assert data["nest"]["a"] == 1 + assert data["nest"]["b"] == 2 + assert data["nest"]["c"] == 3 + assert data["nest"]["x"] == 2 # when writing out, we don't want foo.d/*.json data to be included in the root foo.json - manager.set('foo', data) + manager.set("foo", data) manager = BaseJSONConfigManager(config_dir=tmpdir, read_directory=False) - data = manager.get('foo') + data = manager.get("foo") assert data == root_data diff --git a/tests/test_files.py b/tests/test_files.py index 0185c0e2fa..82ea10b06e 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -5,81 +5,59 @@ from .conftest import expected_http_error from nbformat import writes -from nbformat.v4 import (new_notebook, - new_markdown_cell, new_code_cell, - new_output) +from nbformat.v4 import new_notebook, new_markdown_cell, new_code_cell, new_output async def test_hidden_files(fetch, serverapp, root_dir): not_hidden = [ - u'å b', - u'å b/ç. d', + u"å b", + u"å b/ç. d", ] hidden = [ - u'.å b', - u'å b/.ç d', + u".å b", + u"å b/.ç d", ] dirs = not_hidden + hidden for d in dirs: - path = root_dir / d.replace('/', os.sep) + path = root_dir / d.replace("/", os.sep) path.mkdir(parents=True, exist_ok=True) - path.joinpath('foo').write_text('foo') - path.joinpath('.foo').write_text('.foo') - + path.joinpath("foo").write_text("foo") + path.joinpath(".foo").write_text(".foo") for d in not_hidden: - path = root_dir / d.replace('/', os.sep) + path = root_dir / d.replace("/", os.sep) - r = await fetch( - 'files', d, 'foo', - method='GET' - ) - assert r.body.decode() == 'foo' + r = await fetch("files", d, "foo", method="GET") + assert r.body.decode() == "foo" with pytest.raises(tornado.httpclient.HTTPClientError) as e: - r = await fetch( - 'files', d, '.foo', - method='GET' - ) + r = await fetch("files", d, ".foo", method="GET") assert expected_http_error(e, 404) - for d in hidden: - path = root_dir / d.replace('/', os.sep) - for foo in ('foo', '.foo'): + path = root_dir / d.replace("/", os.sep) + for foo in ("foo", ".foo"): with pytest.raises(tornado.httpclient.HTTPClientError) as e: - r = await fetch( - 'files', d, foo, - method='GET' - ) + r = await fetch("files", d, foo, method="GET") assert expected_http_error(e, 404) serverapp.contents_manager.allow_hidden = True for d in not_hidden: - path = root_dir / d.replace('/', os.sep) + path = root_dir / d.replace("/", os.sep) - r = await fetch( - 'files', d, 'foo', - method='GET' - ) - assert r.body.decode() == 'foo' + r = await fetch("files", d, "foo", method="GET") + assert r.body.decode() == "foo" - r = await fetch( - 'files', d, '.foo', - method='GET' - ) - assert r.body.decode() == '.foo' + r = await fetch("files", d, ".foo", method="GET") + assert r.body.decode() == ".foo" for d in hidden: - path = root_dir / d.replace('/', os.sep) + path = root_dir / d.replace("/", os.sep) - for foo in ('foo', '.foo'): - r = await fetch( - 'files', d, foo, - method='GET' - ) + for foo in ("foo", ".foo"): + r = await fetch("files", d, foo, method="GET") assert r.body.decode() == foo @@ -87,73 +65,56 @@ async def test_contents_manager(fetch, serverapp, root_dir): "make sure ContentsManager returns right files (ipynb, bin, txt)." nb = new_notebook( cells=[ - new_markdown_cell(u'Created by test ³'), - new_code_cell("print(2*6)", outputs=[ - new_output("stream", text="12"), - ]) + new_markdown_cell(u"Created by test ³"), + new_code_cell("print(2*6)", outputs=[new_output("stream", text="12"),]), ] ) - root_dir.joinpath('testnb.ipynb').write_text(writes(nb, version=4), encoding='utf-8') - root_dir.joinpath('test.bin').write_bytes(b'\xff' + os.urandom(5)) - root_dir.joinpath('test.txt').write_text('foobar') - - r = await fetch( - 'files/testnb.ipynb', - method='GET' + root_dir.joinpath("testnb.ipynb").write_text( + writes(nb, version=4), encoding="utf-8" ) + root_dir.joinpath("test.bin").write_bytes(b"\xff" + os.urandom(5)) + root_dir.joinpath("test.txt").write_text("foobar") + + r = await fetch("files/testnb.ipynb", method="GET") assert r.code == 200 - assert 'print(2*6)' in r.body.decode('utf-8') + assert "print(2*6)" in r.body.decode("utf-8") - r = await fetch( - 'files/test.bin', - method='GET' - ) + r = await fetch("files/test.bin", method="GET") assert r.code == 200 - assert r.headers['content-type'] == 'application/octet-stream' - assert r.body[:1] == b'\xff' + assert r.headers["content-type"] == "application/octet-stream" + assert r.body[:1] == b"\xff" assert len(r.body) == 6 - - r = await fetch( - 'files/test.txt', - method='GET' - ) + + r = await fetch("files/test.txt", method="GET") assert r.code == 200 - assert r.headers['content-type'] == 'text/plain; charset=UTF-8' - assert r.body.decode() == 'foobar' + assert r.headers["content-type"] == "text/plain; charset=UTF-8" + assert r.body.decode() == "foobar" async def test_download(fetch, serverapp, root_dir): - text = 'hello' - root_dir.joinpath('test.txt').write_text(text) + text = "hello" + root_dir.joinpath("test.txt").write_text(text) - r = await fetch( - 'files', 'test.txt', - method='GET' - ) - disposition = r.headers.get('Content-Disposition', '') - assert 'attachment' not in disposition + r = await fetch("files", "test.txt", method="GET") + disposition = r.headers.get("Content-Disposition", "") + assert "attachment" not in disposition - r = await fetch( - 'files', 'test.txt', - method='GET', - params={'download': True} - ) - disposition = r.headers.get('Content-Disposition', '') - assert 'attachment' in disposition + r = await fetch("files", "test.txt", method="GET", params={"download": True}) + disposition = r.headers.get("Content-Disposition", "") + assert "attachment" in disposition assert "filename*=utf-8''test.txt" in disposition async def test_old_files_redirect(fetch, serverapp, root_dir): """pre-2.0 'files/' prefixed links are properly redirected""" - root_dir.joinpath('files').mkdir(parents=True, exist_ok=True) - root_dir.joinpath('sub', 'files').mkdir(parents=True, exist_ok=True) - + root_dir.joinpath("files").mkdir(parents=True, exist_ok=True) + root_dir.joinpath("sub", "files").mkdir(parents=True, exist_ok=True) - for prefix in ('', 'sub'): - root_dir.joinpath(prefix, 'files', 'f1.txt').write_text(prefix + '/files/f1') - root_dir.joinpath(prefix, 'files', 'f2.txt').write_text(prefix + '/files/f2') - root_dir.joinpath(prefix, 'f2.txt').write_text(prefix + '/f2') - root_dir.joinpath(prefix, 'f3.txt').write_text(prefix + '/f3') + for prefix in ("", "sub"): + root_dir.joinpath(prefix, "files", "f1.txt").write_text(prefix + "/files/f1") + root_dir.joinpath(prefix, "files", "f2.txt").write_text(prefix + "/files/f2") + root_dir.joinpath(prefix, "f2.txt").write_text(prefix + "/f2") + root_dir.joinpath(prefix, "f3.txt").write_text(prefix + "/f3") # These depend on the tree handlers # @@ -171,4 +132,4 @@ async def test_old_files_redirect(fetch, serverapp, root_dir): # r = self.request('GET', 'files/test.txt?download=1') # disposition = r.headers.get('Content-Disposition', '') # self.assertIn('attachment', disposition) - # self.assertIn("filename*=utf-8''test.txt", disposition) \ No newline at end of file + # self.assertIn("filename*=utf-8''test.txt", disposition) diff --git a/tests/test_gateway.py b/tests/test_gateway.py index 4ad4d71a68..944483f62a 100644 --- a/tests/test_gateway.py +++ b/tests/test_gateway.py @@ -18,14 +18,29 @@ def generate_kernelspec(name): - argv_stanza = ['python', '-m', 'ipykernel_launcher', '-f', '{connection_file}'] - spec_stanza = {'spec': {'argv': argv_stanza, 'env': {}, 'display_name': name, 'language': 'python', 'interrupt_mode': 'signal', 'metadata': {}}} - kernelspec_stanza = {'name': name, 'spec': spec_stanza, 'resources': {}} + argv_stanza = ["python", "-m", "ipykernel_launcher", "-f", "{connection_file}"] + spec_stanza = { + "spec": { + "argv": argv_stanza, + "env": {}, + "display_name": name, + "language": "python", + "interrupt_mode": "signal", + "metadata": {}, + } + } + kernelspec_stanza = {"name": name, "spec": spec_stanza, "resources": {}} return kernelspec_stanza # We'll mock up two kernelspecs - kspec_foo and kspec_bar -kernelspecs = {'default': 'kspec_foo', 'kernelspecs': {'kspec_foo': generate_kernelspec('kspec_foo'), 'kspec_bar': generate_kernelspec('kspec_bar')}} +kernelspecs = { + "default": "kspec_foo", + "kernelspecs": { + "kspec_foo": generate_kernelspec("kspec_foo"), + "kspec_bar": generate_kernelspec("kspec_bar"), + }, +} # maintain a dictionary of expected running kernels. Key = kernel_id, Value = model. @@ -34,102 +49,140 @@ def generate_kernelspec(name): def generate_model(name): """Generate a mocked kernel model. Caller is responsible for adding model to running_kernels dictionary.""" - dt = datetime.utcnow().isoformat() + 'Z' + dt = datetime.utcnow().isoformat() + "Z" kernel_id = str(uuid.uuid4()) - model = {'id': kernel_id, 'name': name, 'last_activity': str(dt), 'execution_state': 'idle', 'connections': 1} + model = { + "id": kernel_id, + "name": name, + "last_activity": str(dt), + "execution_state": "idle", + "connections": 1, + } return model async def mock_gateway_request(url, **kwargs): - method = 'GET' - if kwargs['method']: - method = kwargs['method'] + method = "GET" + if kwargs["method"]: + method = kwargs["method"] request = HTTPRequest(url=url, **kwargs) endpoint = str(url) # Fetch all kernelspecs - if endpoint.endswith('/api/kernelspecs') and method == 'GET': + if endpoint.endswith("/api/kernelspecs") and method == "GET": response_buf = StringIO(str_to_unicode(json.dumps(kernelspecs))) - response = await gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + response = await gen.maybe_future( + HTTPResponse(request, 200, buffer=response_buf) + ) return response # Fetch named kernelspec - if endpoint.rfind('/api/kernelspecs/') >= 0 and method == 'GET': - requested_kernelspec = endpoint.rpartition('/')[2] - kspecs = kernelspecs.get('kernelspecs') + if endpoint.rfind("/api/kernelspecs/") >= 0 and method == "GET": + requested_kernelspec = endpoint.rpartition("/")[2] + kspecs = kernelspecs.get("kernelspecs") if requested_kernelspec in kspecs: - response_buf = StringIO(str_to_unicode(json.dumps(kspecs.get(requested_kernelspec)))) - response = await gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + response_buf = StringIO( + str_to_unicode(json.dumps(kspecs.get(requested_kernelspec))) + ) + response = await gen.maybe_future( + HTTPResponse(request, 200, buffer=response_buf) + ) return response else: - raise HTTPError(404, message='Kernelspec does not exist: %s' % requested_kernelspec) + raise HTTPError( + 404, message="Kernelspec does not exist: %s" % requested_kernelspec + ) # Create kernel - if endpoint.endswith('/api/kernels') and method == 'POST': - json_body = json.loads(kwargs['body']) - name = json_body.get('name') - env = json_body.get('env') - kspec_name = env.get('KERNEL_KSPEC_NAME') - assert name == kspec_name # Ensure that KERNEL_ env values get propagated + if endpoint.endswith("/api/kernels") and method == "POST": + json_body = json.loads(kwargs["body"]) + name = json_body.get("name") + env = json_body.get("env") + kspec_name = env.get("KERNEL_KSPEC_NAME") + assert name == kspec_name # Ensure that KERNEL_ env values get propagated model = generate_model(name) - running_kernels[model.get('id')] = model # Register model as a running kernel + running_kernels[model.get("id")] = model # Register model as a running kernel response_buf = StringIO(str_to_unicode(json.dumps(model))) - response = await gen.maybe_future(HTTPResponse(request, 201, buffer=response_buf)) + response = await gen.maybe_future( + HTTPResponse(request, 201, buffer=response_buf) + ) return response # Fetch list of running kernels - if endpoint.endswith('/api/kernels') and method == 'GET': + if endpoint.endswith("/api/kernels") and method == "GET": kernels = [] for kernel_id in running_kernels.keys(): model = running_kernels.get(kernel_id) kernels.append(model) response_buf = StringIO(str_to_unicode(json.dumps(kernels))) - response = await gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + response = await gen.maybe_future( + HTTPResponse(request, 200, buffer=response_buf) + ) return response # Interrupt or restart existing kernel - if endpoint.rfind('/api/kernels/') >= 0 and method == 'POST': - requested_kernel_id, sep, action = endpoint.rpartition('/api/kernels/')[2].rpartition('/') + if endpoint.rfind("/api/kernels/") >= 0 and method == "POST": + requested_kernel_id, sep, action = endpoint.rpartition("/api/kernels/")[ + 2 + ].rpartition("/") - if action == 'interrupt': + if action == "interrupt": if requested_kernel_id in running_kernels: response = await gen.maybe_future(HTTPResponse(request, 204)) return response else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) - elif action == 'restart': + raise HTTPError( + 404, message="Kernel does not exist: %s" % requested_kernel_id + ) + elif action == "restart": if requested_kernel_id in running_kernels: - response_buf = StringIO(str_to_unicode(json.dumps(running_kernels.get(requested_kernel_id)))) - response = await gen.maybe_future(HTTPResponse(request, 204, buffer=response_buf)) + response_buf = StringIO( + str_to_unicode(json.dumps(running_kernels.get(requested_kernel_id))) + ) + response = await gen.maybe_future( + HTTPResponse(request, 204, buffer=response_buf) + ) return response else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + raise HTTPError( + 404, message="Kernel does not exist: %s" % requested_kernel_id + ) else: - raise HTTPError(404, message='Bad action detected: %s' % action) + raise HTTPError(404, message="Bad action detected: %s" % action) # Shutdown existing kernel - if endpoint.rfind('/api/kernels/') >= 0 and method == 'DELETE': - requested_kernel_id = endpoint.rpartition('/')[2] - running_kernels.pop(requested_kernel_id) # Simulate shutdown by removing kernel from running set + if endpoint.rfind("/api/kernels/") >= 0 and method == "DELETE": + requested_kernel_id = endpoint.rpartition("/")[2] + running_kernels.pop( + requested_kernel_id + ) # Simulate shutdown by removing kernel from running set response = await gen.maybe_future(HTTPResponse(request, 204)) return response # Fetch existing kernel - if endpoint.rfind('/api/kernels/') >= 0 and method == 'GET': - requested_kernel_id = endpoint.rpartition('/')[2] + if endpoint.rfind("/api/kernels/") >= 0 and method == "GET": + requested_kernel_id = endpoint.rpartition("/")[2] if requested_kernel_id in running_kernels: - response_buf = StringIO(str_to_unicode(json.dumps(running_kernels.get(requested_kernel_id)))) - response = await gen.maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + response_buf = StringIO( + str_to_unicode(json.dumps(running_kernels.get(requested_kernel_id))) + ) + response = await gen.maybe_future( + HTTPResponse(request, 200, buffer=response_buf) + ) return response else: - raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) + raise HTTPError( + 404, message="Kernel does not exist: %s" % requested_kernel_id + ) -mocked_gateway = patch('jupyter_server.gateway.managers.gateway_request', mock_gateway_request) -mock_gateway_url = 'http://mock-gateway-server:8889' -mock_http_user = 'alice' +mocked_gateway = patch( + "jupyter_server.gateway.managers.gateway_request", mock_gateway_request +) +mock_gateway_url = "http://mock-gateway-server:8889" +mock_http_user = "alice" @pytest.fixture @@ -137,10 +190,10 @@ def init_gateway(monkeypatch): """Initializes the server for use as a gateway client. """ # Clear the singleton first since previous tests may not have used a gateway. GatewayClient.clear_instance() - monkeypatch.setenv('JUPYTER_GATEWAY_URL', mock_gateway_url) - monkeypatch.setenv('JUPYTER_GATEWAY_HTTP_USER', mock_http_user) - monkeypatch.setenv('JUPYTER_GATEWAY_REQUEST_TIMEOUT', '44.4') - monkeypatch.setenv('JUPYTER_GATEWAY_CONNECT_TIMEOUT', '44.4') + monkeypatch.setenv("JUPYTER_GATEWAY_URL", mock_gateway_url) + monkeypatch.setenv("JUPYTER_GATEWAY_HTTP_USER", mock_http_user) + monkeypatch.setenv("JUPYTER_GATEWAY_REQUEST_TIMEOUT", "44.4") + monkeypatch.setenv("JUPYTER_GATEWAY_CONNECT_TIMEOUT", "44.4") yield GatewayClient.clear_instance() @@ -149,7 +202,10 @@ async def test_gateway_env_options(init_gateway, serverapp): assert serverapp.gateway_config.gateway_enabled is True assert serverapp.gateway_config.url == mock_gateway_url assert serverapp.gateway_config.http_user == mock_http_user - assert serverapp.gateway_config.connect_timeout == serverapp.gateway_config.request_timeout + assert ( + serverapp.gateway_config.connect_timeout + == serverapp.gateway_config.request_timeout + ) assert serverapp.gateway_config.connect_timeout == 44.4 @@ -157,11 +213,10 @@ async def test_gateway_cli_options(configurable_serverapp): argv = [ "--gateway-url='" + mock_gateway_url + "'", "--GatewayClient.http_user='" + mock_http_user + "'", - '--GatewayClient.connect_timeout=44.4', - '--GatewayClient.request_timeout=44.4' + "--GatewayClient.connect_timeout=44.4", + "--GatewayClient.request_timeout=44.4", ] - GatewayClient.clear_instance() app = configurable_serverapp(argv=argv) @@ -175,41 +230,32 @@ async def test_gateway_cli_options(configurable_serverapp): async def test_gateway_class_mappings(init_gateway, serverapp): # Ensure appropriate class mappings are in place. - assert serverapp.kernel_manager_class.__name__ == 'GatewayKernelManager' - assert serverapp.session_manager_class.__name__ == 'GatewaySessionManager' - assert serverapp.kernel_spec_manager_class.__name__ == 'GatewayKernelSpecManager' + assert serverapp.kernel_manager_class.__name__ == "GatewayKernelManager" + assert serverapp.session_manager_class.__name__ == "GatewaySessionManager" + assert serverapp.kernel_spec_manager_class.__name__ == "GatewayKernelSpecManager" async def test_gateway_get_kernelspecs(init_gateway, fetch): # Validate that kernelspecs come from gateway. with mocked_gateway: - r = await fetch( - 'api', 'kernelspecs', - method='GET' - ) + r = await fetch("api", "kernelspecs", method="GET") assert r.code == 200 - content = json.loads(r.body.decode('utf-8')) - kspecs = content.get('kernelspecs') + content = json.loads(r.body.decode("utf-8")) + kspecs = content.get("kernelspecs") assert len(kspecs) == 2 - assert kspecs.get('kspec_bar').get('name') == 'kspec_bar' + assert kspecs.get("kspec_bar").get("name") == "kspec_bar" async def test_gateway_get_named_kernelspec(init_gateway, fetch): # Validate that a specific kernelspec can be retrieved from gateway (and an invalid spec can't) with mocked_gateway: - r = await fetch( - 'api', 'kernelspecs', 'kspec_foo', - method='GET' - ) + r = await fetch("api", "kernelspecs", "kspec_foo", method="GET") assert r.code == 200 - kspec_foo = json.loads(r.body.decode('utf-8')) - assert kspec_foo.get('name') == 'kspec_foo' + kspec_foo = json.loads(r.body.decode("utf-8")) + assert kspec_foo.get("name") == "kspec_foo" with pytest.raises(tornado.httpclient.HTTPClientError) as e: - await fetch( - 'api', 'kernelspecs', 'no_such_spec', - method='GET' - ) + await fetch("api", "kernelspecs", "no_such_spec", method="GET") assert expected_http_error(e, 404) @@ -217,7 +263,7 @@ async def test_gateway_session_lifecycle(init_gateway, root_dir, fetch): # Validate session lifecycle functions; create and delete. # create - session_id, kernel_id = await create_session(root_dir, fetch, 'kspec_foo') + session_id, kernel_id = await create_session(root_dir, fetch, "kspec_foo") # ensure kernel still considered running assert await is_kernel_running(fetch, kernel_id) is True @@ -243,7 +289,7 @@ async def test_gateway_kernel_lifecycle(init_gateway, fetch): # Validate kernel lifecycle functions; create, interrupt, restart and delete. # create - kernel_id = await create_kernel(fetch, 'kspec_bar') + kernel_id = await create_kernel(fetch, "kspec_bar") # ensure kernel still considered running assert await is_kernel_running(fetch, kernel_id) is True @@ -273,32 +319,28 @@ async def create_session(root_dir, fetch, kernel_name): which then uses the gateway for kernel management. """ with mocked_gateway: - nb_path = root_dir / 'testgw.ipynb' - body = json.dumps({'path': str(nb_path), - 'type': 'notebook', - 'kernel': {'name': kernel_name}}) + nb_path = root_dir / "testgw.ipynb" + body = json.dumps( + {"path": str(nb_path), "type": "notebook", "kernel": {"name": kernel_name}} + ) # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method - os.environ['KERNEL_KSPEC_NAME'] = kernel_name + os.environ["KERNEL_KSPEC_NAME"] = kernel_name # Create the kernel... (also tests get_kernel) - r = await fetch( - 'api', 'sessions', - method='POST', - body=body - ) + r = await fetch("api", "sessions", method="POST", body=body) assert r.code == 201 - model = json.loads(r.body.decode('utf-8')) - assert model.get('path') == str(nb_path) - kernel_id = model.get('kernel').get('id') + model = json.loads(r.body.decode("utf-8")) + assert model.get("path") == str(nb_path) + kernel_id = model.get("kernel").get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(kernel_id) - assert kernel_id == running_kernel.get('id') - assert model.get('kernel').get('name') == running_kernel.get('name') - session_id = model.get('id') + assert kernel_id == running_kernel.get("id") + assert model.get("kernel").get("name") == running_kernel.get("name") + session_id = model.get("id") # restore env - os.environ.pop('KERNEL_KSPEC_NAME') + os.environ.pop("KERNEL_KSPEC_NAME") return session_id, kernel_id @@ -307,12 +349,9 @@ async def delete_session(fetch, session_id): """ with mocked_gateway: # Delete the session (and kernel) - r = await fetch( - 'api', 'sessions', session_id, - method='DELETE' - ) + r = await fetch("api", "sessions", session_id, method="DELETE") assert r.code == 204 - assert r.reason == 'No Content' + assert r.reason == "No Content" async def is_kernel_running(fetch, kernel_id): @@ -320,15 +359,12 @@ async def is_kernel_running(fetch, kernel_id): """ with mocked_gateway: # Get list of running kernels - r = await fetch( - 'api', 'kernels', - method='GET' - ) + r = await fetch("api", "kernels", method="GET") assert r.code == 200 - kernels = json.loads(r.body.decode('utf-8')) + kernels = json.loads(r.body.decode("utf-8")) assert len(kernels) == len(running_kernels) for model in kernels: - if model.get('id') == kernel_id: + if model.get("id") == kernel_id: return True return False @@ -337,26 +373,22 @@ async def create_kernel(fetch, kernel_name): """Issues request to retart the given kernel """ with mocked_gateway: - body = json.dumps({'name': kernel_name}) + body = json.dumps({"name": kernel_name}) # add a KERNEL_ value to the current env and we'll ensure that that value exists in the mocked method - os.environ['KERNEL_KSPEC_NAME'] = kernel_name + os.environ["KERNEL_KSPEC_NAME"] = kernel_name - r = await fetch( - 'api', 'kernels', - method='POST', - body=body - ) + r = await fetch("api", "kernels", method="POST", body=body) assert r.code == 201 - model = json.loads(r.body.decode('utf-8')) - kernel_id = model.get('id') + model = json.loads(r.body.decode("utf-8")) + kernel_id = model.get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(kernel_id) - assert kernel_id == running_kernel.get('id') - assert model.get('name') == kernel_name + assert kernel_id == running_kernel.get("id") + assert model.get("name") == kernel_name # restore env - os.environ.pop('KERNEL_KSPEC_NAME') + os.environ.pop("KERNEL_KSPEC_NAME") return kernel_id @@ -365,12 +397,15 @@ async def interrupt_kernel(fetch, kernel_id): """ with mocked_gateway: r = await fetch( - 'api', 'kernels', kernel_id, 'interrupt', - method='POST', - allow_nonstandard_methods=True + "api", + "kernels", + kernel_id, + "interrupt", + method="POST", + allow_nonstandard_methods=True, ) assert r.code == 204 - assert r.reason == 'No Content' + assert r.reason == "No Content" async def restart_kernel(fetch, kernel_id): @@ -378,17 +413,20 @@ async def restart_kernel(fetch, kernel_id): """ with mocked_gateway: r = await fetch( - 'api', 'kernels', kernel_id, 'restart', - method='POST', - allow_nonstandard_methods=True + "api", + "kernels", + kernel_id, + "restart", + method="POST", + allow_nonstandard_methods=True, ) assert r.code == 200 - model = json.loads(r.body.decode('utf-8')) - restarted_kernel_id = model.get('id') + model = json.loads(r.body.decode("utf-8")) + restarted_kernel_id = model.get("id") # ensure its in the running_kernels and name matches. running_kernel = running_kernels.get(restarted_kernel_id) - assert restarted_kernel_id == running_kernel.get('id') - assert model.get('name') == running_kernel.get('name') + assert restarted_kernel_id == running_kernel.get("id") + assert model.get("name") == running_kernel.get("name") async def delete_kernel(fetch, kernel_id): @@ -396,9 +434,6 @@ async def delete_kernel(fetch, kernel_id): """ with mocked_gateway: # Delete the session (and kernel) - r = await fetch( - 'api', 'kernels', kernel_id, - method='DELETE' - ) + r = await fetch("api", "kernels", kernel_id, method="DELETE") assert r.code == 204 - assert r.reason == 'No Content' + assert r.reason == "No Content" diff --git a/tests/test_paths.py b/tests/test_paths.py index 45f60b2d23..774ab38c53 100644 --- a/tests/test_paths.py +++ b/tests/test_paths.py @@ -4,28 +4,30 @@ # build regexps that tornado uses: -path_pat = re.compile('^' + '/x%s' % path_regex + '$') +path_pat = re.compile("^" + "/x%s" % path_regex + "$") + def test_path_regex(): for path in ( - '/x', - '/x/', - '/x/foo', - '/x/foo.ipynb', - '/x/foo/bar', - '/x/foo/bar.txt', + "/x", + "/x/", + "/x/foo", + "/x/foo.ipynb", + "/x/foo/bar", + "/x/foo/bar.txt", ): assert re.match(path_pat, path) + def test_path_regex_bad(): for path in ( - '/xfoo', - '/xfoo/', - '/xfoo/bar', - '/xfoo/bar/', - '/x/foo/bar/', - '/x//foo', - '/y', - '/y/x/foo', + "/xfoo", + "/xfoo/", + "/xfoo/bar", + "/xfoo/bar/", + "/x/foo/bar/", + "/x//foo", + "/y", + "/y/x/foo", ): assert re.match(path_pat, path) is None diff --git a/tests/test_serialize.py b/tests/test_serialize.py index 07947dc549..da8d2dc0de 100644 --- a/tests/test_serialize.py +++ b/tests/test_serialize.py @@ -8,18 +8,19 @@ deserialize_binary_message, ) + def test_serialize_binary(): s = Session() - msg = s.msg('data_pub', content={'a': 'b'}) - msg['buffers'] = [ memoryview(os.urandom(3)) for i in range(3) ] + msg = s.msg("data_pub", content={"a": "b"}) + msg["buffers"] = [memoryview(os.urandom(3)) for i in range(3)] bmsg = serialize_binary_message(msg) assert isinstance(bmsg, bytes) def test_deserialize_binary(): s = Session() - msg = s.msg('data_pub', content={'a': 'b'}) - msg['buffers'] = [ memoryview(os.urandom(2)) for i in range(3) ] + msg = s.msg("data_pub", content={"a": "b"}) + msg["buffers"] = [memoryview(os.urandom(2)) for i in range(3)] bmsg = serialize_binary_message(msg) msg2 = deserialize_binary_message(bmsg) - assert msg2 == msg \ No newline at end of file + assert msg2 == msg diff --git a/tests/test_serverapp.py b/tests/test_serverapp.py index 8b1ed09bd3..5521bbaad8 100644 --- a/tests/test_serverapp.py +++ b/tests/test_serverapp.py @@ -1,4 +1,3 @@ - import os import getpass import pathlib @@ -15,17 +14,17 @@ from jupyter_server.serverapp import ( - ServerApp, + ServerApp, list_running_servers, JupyterPasswordApp, - JupyterServerStopApp + JupyterServerStopApp, ) from jupyter_server.auth.security import passwd_check def test_help_output(): """jupyter server --help-all works""" - check_help_all_output('jupyter_server') + check_help_all_output("jupyter_server") def test_server_info_file(tmp_path, configurable_serverapp): @@ -35,12 +34,12 @@ def test_server_info_file(tmp_path, configurable_serverapp): servers = list(list_running_servers(app.runtime_dir)) assert len(servers) == 1 - sinfo = servers[0] - - assert sinfo['port'] == app.port - assert sinfo['url'] == app.connection_url - assert sinfo['version'] == app.version - + sinfo = servers[0] + + assert sinfo["port"] == app.port + assert sinfo["url"] == app.connection_url + assert sinfo["version"] == app.version + app.remove_server_info_file() assert list(list_running_servers(app.runtime_dir)) == [] @@ -54,17 +53,13 @@ def test_root_dir(tmp_path, configurable_serverapp): # Build a list of invalid paths @pytest.fixture( - params=[ - ('notebooks',), - ('root', 'dir', 'is', 'missing'), - ('test.txt',) - ] + params=[("notebooks",), ("root", "dir", "is", "missing"), ("test.txt",)] ) def invalid_root_dir(tmp_path, request): path = tmp_path.joinpath(*request.param) - # If the path is a file, create it. - if os.path.splitext(str(path))[1] != '': - path.write_text('') + # If the path is a file, create it. + if os.path.splitext(str(path))[1] != "": + path.write_text("") return str(path) @@ -73,13 +68,8 @@ def test_invalid_root_dir(invalid_root_dir, configurable_serverapp): with pytest.raises(TraitError): app.root_dir = invalid_root_dir -@pytest.fixture( - params=[ - ('/',), - ('first-level',), - ('first-level', 'second-level') - ] -) + +@pytest.fixture(params=[("/",), ("first-level",), ("first-level", "second-level")]) def valid_root_dir(tmp_path, request): path = tmp_path.joinpath(*request.param) if not path.exists(): @@ -87,39 +77,39 @@ def valid_root_dir(tmp_path, request): path.mkdir(parents=True) return str(path) + def test_valid_root_dir(valid_root_dir, configurable_serverapp): app = configurable_serverapp(root_dir=valid_root_dir) root_dir = valid_root_dir - # If nested path, the last slash should + # If nested path, the last slash should # be stripped by the root_dir trait. - if root_dir != '/': - root_dir = valid_root_dir.rstrip('/') + if root_dir != "/": + root_dir = valid_root_dir.rstrip("/") assert app.root_dir == root_dir def test_generate_config(tmp_path, configurable_serverapp): app = configurable_serverapp(config_dir=str(tmp_path)) - app.initialize(['--generate-config', '--allow-root']) + app.initialize(["--generate-config", "--allow-root"]) with pytest.raises(NoStart): app.start() - assert tmp_path.joinpath('jupyter_server_config.py').exists() + assert tmp_path.joinpath("jupyter_server_config.py").exists() def test_server_password(tmp_path, configurable_serverapp): - password = 'secret' - with patch.dict( - 'os.environ', {'JUPYTER_CONFIG_DIR': str(tmp_path)} - ), patch.object(getpass, 'getpass', return_value=password): + password = "secret" + with patch.dict("os.environ", {"JUPYTER_CONFIG_DIR": str(tmp_path)}), patch.object( + getpass, "getpass", return_value=password + ): app = JupyterPasswordApp(log_level=logging.ERROR) app.initialize([]) app.start() sv = configurable_serverapp() sv.load_config_file() - assert sv.password != '' + assert sv.password != "" passwd_check(sv.password, password) def test_list_running_servers(serverapp, app): servers = list(list_running_servers(serverapp.runtime_dir)) assert len(servers) >= 1 - diff --git a/tests/test_utils.py b/tests/test_utils.py index 039d86df33..fa994673e7 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,37 +4,36 @@ import pytest from traitlets.tests.utils import check_help_all_output -from jupyter_server.utils import url_escape, url_unescape, is_hidden, is_file_hidden, secure_write +from jupyter_server.utils import ( + url_escape, + url_unescape, + is_hidden, + is_file_hidden, + secure_write, +) from ipython_genutils.py3compat import cast_unicode from ipython_genutils.tempdir import TemporaryDirectory from ipython_genutils.testing.decorators import skip_if_not_win32, skip_win32 def test_help_output(): - check_help_all_output('jupyter_server') - + check_help_all_output("jupyter_server") @pytest.mark.parametrize( - 'unescaped,escaped', + "unescaped,escaped", [ + ("/this is a test/for spaces/", "/this%20is%20a%20test/for%20spaces/"), + ("notebook with space.ipynb", "notebook%20with%20space.ipynb"), ( - '/this is a test/for spaces/', - '/this%20is%20a%20test/for%20spaces/' + "/path with a/notebook and space.ipynb", + "/path%20with%20a/notebook%20and%20space.ipynb", ), ( - 'notebook with space.ipynb', - 'notebook%20with%20space.ipynb' + "/ !@$#%^&* / test %^ notebook @#$ name.ipynb", + "/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb", ), - ( - '/path with a/notebook and space.ipynb', - '/path%20with%20a/notebook%20and%20space.ipynb' - ), - ( - '/ !@$#%^&* / test %^ notebook @#$ name.ipynb', - '/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb' - ) - ] + ], ) def test_url_escaping(unescaped, escaped): # Test escaping. @@ -47,25 +46,25 @@ def test_url_escaping(unescaped, escaped): def test_is_hidden(tmp_path): root = str(tmp_path) - subdir1_path = tmp_path / 'subdir' + subdir1_path = tmp_path / "subdir" subdir1_path.mkdir() subdir1 = str(subdir1_path) assert not is_hidden(subdir1, root) assert not is_file_hidden(subdir1) - subdir2_path = tmp_path / '.subdir2' + subdir2_path = tmp_path / ".subdir2" subdir2_path.mkdir() subdir2 = str(subdir2_path) assert is_hidden(subdir2, root) assert is_file_hidden(subdir2) - subdir34_path = tmp_path / 'subdir3' / '.subdir4' + subdir34_path = tmp_path / "subdir3" / ".subdir4" subdir34_path.mkdir(parents=True) subdir34 = str(subdir34_path) assert is_hidden(subdir34, root) assert is_hidden(subdir34) - subdir56_path = tmp_path / '.subdir5' / 'subdir6' + subdir56_path = tmp_path / ".subdir5" / "subdir6" subdir56_path.mkdir(parents=True) subdir56 = str(subdir56_path) assert is_hidden(subdir56, root) @@ -78,7 +77,7 @@ def test_is_hidden(tmp_path): def test_is_hidden_win32(tmp_path): root = str(tmp_path) root = cast_unicode(root) - subdir1 = tmp_path / 'subdir' + subdir1 = tmp_path / "subdir" subdir1.mkdir() assert not is_hidden(str(subdir1), root) ctypes.windll.kernel32.SetFileAttributesW(str(subdir1), 0x02) diff --git a/tests/test_version.py b/tests/test_version.py index 43f2db1cae..f30863fc38 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -4,38 +4,30 @@ from jupyter_server import __version__ -pep440re = re.compile('^(\d+)\.(\d+)\.(\d+((a|b|rc)\d+)?)(\.post\d+)?(\.dev\d*)?$') +pep440re = re.compile(r"^(\d+)\.(\d+)\.(\d+((a|b|rc)\d+)?)(\.post\d+)?(\.dev\d*)?$") + def raise_on_bad_version(version): if not pep440re.match(version): - raise ValueError("Versions String does apparently not match Pep 440 specification, " - "which might lead to sdist and wheel being seen as 2 different release. " - "E.g: do not use dots for beta/alpha/rc markers.") + raise ValueError( + "Versions String does apparently not match Pep 440 specification, " + "which might lead to sdist and wheel being seen as 2 different release. " + "E.g: do not use dots for beta/alpha/rc markers." + ) + # --------- Meta test to test the versioning tests ------------- + @pytest.mark.parametrize( - 'version', - [ - '4.1.0.b1', - '4.1.b1', - '4.2', - 'X.y.z', - '1.2.3.dev1.post2', - ] + "version", ["4.1.0.b1", "4.1.b1", "4.2", "X.y.z", "1.2.3.dev1.post2",] ) def test_invalid_pep440_versions(version): with pytest.raises(ValueError): raise_on_bad_version(version) -@pytest.mark.parametrize( - 'version', - [ - '4.1.1', - '4.2.1b3', - ] -) +@pytest.mark.parametrize("version", ["4.1.1", "4.2.1b3",]) def test_valid_pep440_versions(version): assert raise_on_bad_version(version) is None @@ -43,6 +35,3 @@ def test_valid_pep440_versions(version): # --------- Test current version -------------- def test_current_version(): raise_on_bad_version(__version__) - - - From cb1d424664905c760af1f3524b4bc1102b1206ef Mon Sep 17 00:00:00 2001 From: Zsailer Date: Thu, 19 Dec 2019 11:21:43 -0800 Subject: [PATCH 2/2] add changelog --- CHANGELOG.md | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000..a2ea13a7e5 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,48 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + + +## [0.2.0] - 2019-12-19 + +### Added +- `extension` submodule ([#48](https://github.com/jupyter/jupyter_server/pull/48)) + - ExtensionApp - configurable JupyterApp-subclass for server extensions + - Most useful for Jupyter frontends, like Notebook, JupyterLab, nteract, voila etc. + - Launch with entrypoints + - Configure from file or CLI + - Add custom templates, static assets, handlers, etc. + - Static assets are served behind a `/static/` endpoint. + - Run server extensions in "standalone mode" ([#70](https://github.com/jupyter/jupyter_server/pull/70) and [#76](https://github.com/jupyter/jupyter_server/pull/76)) + - ExtensionHandler - tornado handlers for extensions. + - Finds static assets at `/static/` + +### Changed +- `jupyter serverextension ` entrypoint has been changed to `jupyter server extension `. +- `toggle_jupyter_server` and `validate_jupyter_server` function no longer take a Logger object as an argument. +- Changed testing framework from nosetests to pytest ([#152](https://github.com/jupyter/jupyter_server/pull/152)) + - Depend on pytest-tornasync extension for handling tornado/asyncio eventloop + - Depend on pytest-console-scripts for testing CLI entrypoints +- Added Github actions as a testing framework along side Travis and Azure ([#146](https://github.com/jupyter/jupyter_server/pull/146)) + +### Removed +- Removed the option to update `root_dir` trait in FileContentsManager and MappingKernelManager in ServerApp ([#135](https://github.com/jupyter/jupyter_server/pull/135)) + +### Fixed +- Synced Jupyter Server with Notebook PRs in batches (ended on 2019-09-27) + - [Batch 1](https://github.com/jupyter/jupyter_server/pull/95) + - [Batch 2](https://github.com/jupyter/jupyter_server/pull/97) + - [Batch 3](https://github.com/jupyter/jupyter_server/pull/98) + - [Batch 4](https://github.com/jupyter/jupyter_server/pull/99) + - [Batch 5](https://github.com/jupyter/jupyter_server/pull/103) + - [Batch 6](https://github.com/jupyter/jupyter_server/pull/104) + - [Batch 7](https://github.com/jupyter/jupyter_server/pull/105) + - [Batch 8](https://github.com/jupyter/jupyter_server/pull/106) + +### Security +- Added a "secure_write to function for cookie/token saves ([#77](https://github.com/jupyter/jupyter_server/pull/77)) \ No newline at end of file