From ece2bb55cf35d51dbbe63ab8d879ee03413a3dd5 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Mon, 7 Jul 2025 17:14:19 -0600 Subject: [PATCH 01/16] Improve subkernel shutdown --- beaker_kernel/kernel.py | 2 +- beaker_kernel/lib/subkernel.py | 42 +++++++++++++++++++++------------- 2 files changed, 27 insertions(+), 17 deletions(-) diff --git a/beaker_kernel/kernel.py b/beaker_kernel/kernel.py index 27e2ac1c..d448b106 100644 --- a/beaker_kernel/kernel.py +++ b/beaker_kernel/kernel.py @@ -427,7 +427,7 @@ def soft_interrupt(self, signal, frame): def _interrupt(self, interrupt_subkernel=True): if interrupt_subkernel: try: - subkernel_id = self.context.subkernel.jupyter_id + subkernel_id = self.context.subkernel.kernel_id print(f"Interrupting connected subkernel: {subkernel_id}") requests.post( f"{self.context.beaker_kernel.jupyter_server}/api/kernels/{subkernel_id}/interrupt", diff --git a/beaker_kernel/lib/subkernel.py b/beaker_kernel/lib/subkernel.py index 85fdee48..de94a8b2 100644 --- a/beaker_kernel/lib/subkernel.py +++ b/beaker_kernel/lib/subkernel.py @@ -1,7 +1,7 @@ import abc import asyncio import json -from typing import Any, Callable, TYPE_CHECKING +from typing import Any, Callable, TYPE_CHECKING, ClassVar import hashlib import shutil from tempfile import mkdtemp @@ -353,6 +353,8 @@ class BeakerSubkernel(abc.ABC): FETCH_STATE_CODE: str = "" + tasks: ClassVar[set[asyncio.Task]] = set() + @classmethod @abc.abstractmethod def parse_subkernel_return(cls, execution_result) -> Any: @@ -363,7 +365,7 @@ def tools(self): return [tool for tool, condition in self.TOOLS if condition()] def __init__(self, jupyter_id: str, subkernel_configuration: dict, context: BeakerContext): - self.jupyter_id = jupyter_id + self.kernel_id = jupyter_id self.connected_kernel = ProxyKernelClient(subkernel_configuration, session_id=context.beaker_kernel.session_id) self.context = context @@ -374,21 +376,29 @@ def get_treesitter_language(self) -> "TreeSitterLanguage": async def lint_code(self, cells: AnalysisCodeCells): pass + async def shutdown(self, kernel_id) -> bool: + try: + logger.info(f"Shutting down connected subkernel {kernel_id}") + res = requests.delete( + f"{self.context.beaker_kernel.jupyter_server}/api/kernels/{kernel_id}", + headers={"Authorization": f"token {config.jupyter_token}"}, + ) + if res.status_code == 204: + return True + except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as err: + return False def cleanup(self): - if self.jupyter_id is not None: - try: - print(f"Shutting down connected subkernel {self.jupyter_id}") - res = requests.delete( - f"{self.context.beaker_kernel.jupyter_server}/api/kernels/{self.jupyter_id}", - headers={"Authorization": f"token {config.jupyter_token}"}, - timeout=0.5, - ) - if res.status_code == 204: - self.jupyter_id = None - except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as err: - message = f"Error while shutting down subkernel: {err}\n Subkernel or server may have already been shut down." - logger.error(message, exc_info=err) + def finish_cleanup(task: asyncio.Task): + success = task.result() + if success: + self.kernel_id = None + self.tasks.discard(task) + + if self.kernel_id is not None: + task = asyncio.create_task(self.shutdown(self.kernel_id)) + self.tasks.add(task) + task.add_done_callback(finish_cleanup) def format_kernel_state(self, state: dict) -> dict: return state @@ -407,7 +417,7 @@ class CheckpointableBeakerSubkernel(BeakerSubkernel): def __init__(self, jupyter_id: str, subkernel_configuration: dict, context): super().__init__(jupyter_id, subkernel_configuration, context) self.checkpoints_enabled = is_checkpointing_enabled() - self.storage_prefix = os.path.join(config.checkpoint_storage_path, self.jupyter_id) + self.storage_prefix = os.path.join(config.checkpoint_storage_path, self.kernel_id) self.checkpoints : list[Checkpoint] = [] if self.checkpoints_enabled: os.makedirs(self.storage_prefix, exist_ok=True, mode=0o777) From c413f2f49bade3be0ca71d9710648faf8acfd8b3 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Wed, 10 Sep 2025 16:03:14 -0600 Subject: [PATCH 02/16] Track language used by Jupyter when defining subkernels --- beaker_kernel/lib/subkernel.py | 1 + beaker_kernel/subkernels/julia.py | 1 + beaker_kernel/subkernels/python.py | 1 + beaker_kernel/subkernels/rlang.py | 1 + 4 files changed, 4 insertions(+) diff --git a/beaker_kernel/lib/subkernel.py b/beaker_kernel/lib/subkernel.py index de94a8b2..ed6d4870 100644 --- a/beaker_kernel/lib/subkernel.py +++ b/beaker_kernel/lib/subkernel.py @@ -339,6 +339,7 @@ class BeakerSubkernel(abc.ABC): DISPLAY_NAME: str SLUG: str KERNEL_NAME: str + JUPYTER_LANGUAGE: str WEIGHT: int = 50 # Used for auto-sorting in drop-downs, etc. Lower weights are listed earlier. diff --git a/beaker_kernel/subkernels/julia.py b/beaker_kernel/subkernels/julia.py index e7d973ad..e05c5f79 100644 --- a/beaker_kernel/subkernels/julia.py +++ b/beaker_kernel/subkernels/julia.py @@ -27,6 +27,7 @@ class JuliaSubkernel(BeakerSubkernel): """ DISPLAY_NAME = "Julia" SLUG = "julia" + JUPYTER_LANGUAGE = "julia" KERNEL_NAME = get_kernel_name() # varinfo / filter / display diff --git a/beaker_kernel/subkernels/python.py b/beaker_kernel/subkernels/python.py index 6d34cae9..0e48ab87 100644 --- a/beaker_kernel/subkernels/python.py +++ b/beaker_kernel/subkernels/python.py @@ -18,6 +18,7 @@ class PythonSubkernel(CheckpointableBeakerSubkernel): """ DISPLAY_NAME = "Python 3" SLUG = "python3" + JUPYTER_LANGUAGE = "python" KERNEL_NAME = "python3" WEIGHT = 20 diff --git a/beaker_kernel/subkernels/rlang.py b/beaker_kernel/subkernels/rlang.py index 1f38a6f7..ef60bcab 100644 --- a/beaker_kernel/subkernels/rlang.py +++ b/beaker_kernel/subkernels/rlang.py @@ -18,6 +18,7 @@ class RSubkernel(BeakerSubkernel): DISPLAY_NAME = "R" SLUG = "rlang" KERNEL_NAME = "ir" + JUPYTER_LANGUAGE = "R" DATAFRAME_TYPE_NAME = "data.frame" WEIGHT = 60 From dd313c074365119f536a3ee7a1748790fa93c1e0 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Wed, 10 Sep 2025 16:04:25 -0600 Subject: [PATCH 03/16] Allow sessions to use default kernel if one is not specified. --- beaker-ts/src/session.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/beaker-ts/src/session.ts b/beaker-ts/src/session.ts index 28362028..8e940aeb 100644 --- a/beaker-ts/src/session.ts +++ b/beaker-ts/src/session.ts @@ -18,7 +18,7 @@ import { BeakerRenderer, IBeakerRendererOptions } from './render'; export interface IBeakerSessionOptions { settings: any; name: string; - kernelName: string; + kernelName?: string; sessionId?: string; rendererOptions?: IBeakerRendererOptions; messageHandler?: Slot; @@ -80,9 +80,9 @@ export class BeakerSession { specsManager: this._services.kernelspecs, name: options?.name, path: options?.sessionId, - kernelPreference: { + kernelPreference: options?.kernelName ? { name: options?.kernelName, - }, + } : undefined, }); // Track all messages from kernels. The disconnect on newValue is in case the kernel connection is reused, to From b161ebfb9232bcd282e4be7ebe47c2f233af11d1 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Fri, 19 Sep 2025 14:00:42 -0600 Subject: [PATCH 04/16] Batch of server updates - Refactor authentication structure by moving from single auth.py to auth/ module - Add multiuser service support and extension system - Update Vite configuration for improved proxy handling and CORS settings - Generate comprehensive sample configuration file with all available options - Implement server lifecycle management with list, start, stop, and config generation CLI commands --- beaker-vue/vite.config.ts | 23 +- beaker_config_sample.py | 2631 +++++++++++++++++++++ beaker_kernel/builder/beaker.py | 69 +- beaker_kernel/cli/config.py | 4 +- beaker_kernel/cli/main.py | 40 +- beaker_kernel/cli/running.py | 14 +- beaker_kernel/cli/server.py | 210 ++ beaker_kernel/kernel.py | 37 +- beaker_kernel/lib/autodiscovery.py | 67 +- beaker_kernel/lib/context.py | 66 +- beaker_kernel/lib/extension.py | 53 + beaker_kernel/service/api/integrations.py | 5 +- beaker_kernel/service/auth.py | 119 - beaker_kernel/service/auth/__init__.py | 164 ++ beaker_kernel/service/auth/dummy.py | 33 + beaker_kernel/service/auth/notebook.py | 22 + beaker_kernel/service/base.py | 642 ++++- beaker_kernel/service/handlers.py | 118 +- beaker_kernel/service/multiuser.py | 12 + beaker_kernel/service/notebook.py | 14 +- beaker_kernel/service/server.py | 19 +- 21 files changed, 3950 insertions(+), 412 deletions(-) create mode 100644 beaker_config_sample.py create mode 100644 beaker_kernel/cli/server.py create mode 100644 beaker_kernel/lib/extension.py delete mode 100644 beaker_kernel/service/auth.py create mode 100644 beaker_kernel/service/auth/__init__.py create mode 100644 beaker_kernel/service/auth/dummy.py create mode 100644 beaker_kernel/service/auth/notebook.py create mode 100644 beaker_kernel/service/multiuser.py diff --git a/beaker-vue/vite.config.ts b/beaker-vue/vite.config.ts index 1293b7b1..06a0a8df 100644 --- a/beaker-vue/vite.config.ts +++ b/beaker-vue/vite.config.ts @@ -9,6 +9,11 @@ import topLevelAwait from 'vite-plugin-top-level-await'; let chunkNum: number = 0; const ProxyHost = `${process.env.PROXY || 'http://localhost:8888'}`; +const proxyConfig = { + target: `${ProxyHost}/`, + xfwd: true, + changeOrigin: false, +} // https://vite.dev/config/ export default defineConfig({ @@ -16,19 +21,19 @@ export default defineConfig({ server: { host: '0.0.0.0', port: 8080, + proxy: { '/api': { - target: `${ProxyHost}/`, + ...proxyConfig, ws: true, - xfwd: true, - rewriteWsOrigin: true, + rewriteWsOrigin: false, }, - '/beaker': `${ProxyHost}/`, - '/appconfig.js': `${ProxyHost}/`, - '/files': `${ProxyHost}/`, - '/config': `${ProxyHost}/`, - '/contexts': `${ProxyHost}/`, - '/assets': `${ProxyHost}/`, + '/beaker': proxyConfig, + '/appconfig.js': proxyConfig, + '/files': proxyConfig, + '/config': proxyConfig, + '/contexts': proxyConfig, + '/assets': proxyConfig, }, fs: { allow: [".."] diff --git a/beaker_config_sample.py b/beaker_config_sample.py new file mode 100644 index 00000000..04b4633a --- /dev/null +++ b/beaker_config_sample.py @@ -0,0 +1,2631 @@ +# =========================================== +# Beaker Notebook Service Configuration File +# =========================================== +# This file demonstrates all configurable traitlets in the Beaker Notebook service. +# Copy this file to jupyter_server_config.py or beaker_config.py in your Jupyter config directory. +# Uncomment and modify values as needed for your deployment. + +c = get_config() # noqa + +#------------------------------------------------------------------------------ +# BaseBeakerApp(ServerApp) configuration +#------------------------------------------------------------------------------ +## Customizable ServerApp for use with Beaker + +## Username for the Beaker kernel agent process +# Default: '' +# c.BaseBeakerApp.agent_user = '' + +## Set the Access-Control-Allow-Credentials: true header +# See also: ServerApp.allow_credentials +# c.BaseBeakerApp.allow_credentials = False + +## Whether or not to allow external kernels, whose connection files are placed in +# external_connection_dir. +# See also: ServerApp.allow_external_kernels +# c.BaseBeakerApp.allow_external_kernels = False + +# Default: '*' +# c.BaseBeakerApp.allow_origin = '*' + +## Use a regular expression for the Access-Control-Allow-Origin header +# See also: ServerApp.allow_origin_pat +# c.BaseBeakerApp.allow_origin_pat = '' + +## DEPRECATED in 2.0. Use PasswordIdentityProvider.allow_password_change +# See also: ServerApp.allow_password_change +# c.BaseBeakerApp.allow_password_change = True + +## Allow requests where the Host header doesn't point to a local server +# See also: ServerApp.allow_remote_access +# c.BaseBeakerApp.allow_remote_access = False + +## Whether to allow the user to run the server as root. +# See also: ServerApp.allow_root +# c.BaseBeakerApp.allow_root = False + +## Allow unauthenticated access to endpoints without authentication rule. +# See also: ServerApp.allow_unauthenticated_access +# c.BaseBeakerApp.allow_unauthenticated_access = True + +## Answer yes to any prompts. +# See also: JupyterApp.answer_yes +# c.BaseBeakerApp.answer_yes = False + +# Default: '' +# c.BaseBeakerApp.app_slug = '' + +## " +# See also: ServerApp.authenticate_prometheus +# c.BaseBeakerApp.authenticate_prometheus = True + +## The authorizer class to use. +# See also: ServerApp.authorizer_class +# c.BaseBeakerApp.authorizer_class = 'jupyter_server.auth.authorizer.AllowAllAuthorizer' + +## Reload the webapp when changes are made to any Python src files. +# See also: ServerApp.autoreload +# c.BaseBeakerApp.autoreload = False + +## The base URL for the Jupyter server. +# See also: ServerApp.base_url +# c.BaseBeakerApp.base_url = '/' + +# Default: traitlets.Undefined +# c.BaseBeakerApp.beaker_config_path = traitlets.Undefined + +# Default: {} +# c.BaseBeakerApp.beaker_extension_app = {} + +## Specify what command to use to invoke a web +# See also: ServerApp.browser +# c.BaseBeakerApp.browser = '' + +## The full path to an SSL/TLS certificate file. +# See also: ServerApp.certfile +# c.BaseBeakerApp.certfile = '' + +## The full path to a certificate authority certificate for SSL/TLS client +# authentication. +# See also: ServerApp.client_ca +# c.BaseBeakerApp.client_ca = '' + +## Full path of a config file. +# See also: JupyterApp.config_file +# c.BaseBeakerApp.config_file = '' + +# Default: '' +# c.BaseBeakerApp.config_file_name = '' + +## The config manager class to use +# See also: ServerApp.config_manager_class +# c.BaseBeakerApp.config_manager_class = 'jupyter_server.services.config.manager.ConfigManager' + +# Default: '' +# c.BaseBeakerApp.connection_dir = '' + +## DEPRECATED. Use IdentityProvider.cookie_options +# See also: ServerApp.cookie_options +# c.BaseBeakerApp.cookie_options = {} + +## The random bytes used to secure cookies. +# See also: ServerApp.cookie_secret +# c.BaseBeakerApp.cookie_secret = b'' + +## The file where the cookie secret is stored. +# See also: ServerApp.cookie_secret_file +# c.BaseBeakerApp.cookie_secret_file = '' + +## Override URL shown to users. +# See also: ServerApp.custom_display_url +# c.BaseBeakerApp.custom_display_url = '' + +## The default URL to redirect to from `/` +# See also: ServerApp.default_url +# c.BaseBeakerApp.default_url = '/' + +# Default: '/' +# c.BaseBeakerApp.extension_url = '/' + +## The directory to look at for external kernel connection files, if +# allow_external_kernels is True. Defaults to Jupyter +# runtime_dir/external_kernels. Make sure that this directory is not filled with +# left-over connection files, that could result in unnecessary kernel manager +# creations. +# See also: ServerApp.external_connection_dir +# c.BaseBeakerApp.external_connection_dir = None + +## handlers that should be loaded at higher priority than the default services +# See also: ServerApp.extra_services +# c.BaseBeakerApp.extra_services = [] + +## Extra paths to search for serving static files. +# See also: ServerApp.extra_static_paths +# c.BaseBeakerApp.extra_static_paths = [] + +## Extra paths to search for serving jinja templates. +# See also: ServerApp.extra_template_paths +# c.BaseBeakerApp.extra_template_paths = [] + +## Open the named file when the application is launched. +# See also: ServerApp.file_to_run +# c.BaseBeakerApp.file_to_run = '' + +## The URL prefix where files are opened directly. +# See also: ServerApp.file_url_prefix +# c.BaseBeakerApp.file_url_prefix = 'notebooks' + +## Generate default config file. +# See also: JupyterApp.generate_config +# c.BaseBeakerApp.generate_config = False + +## DEPRECATED. Use IdentityProvider.get_secure_cookie_kwargs +# See also: ServerApp.get_secure_cookie_kwargs +# c.BaseBeakerApp.get_secure_cookie_kwargs = {} + +## The identity provider class to use. +# See also: ServerApp.identity_provider_class +# c.BaseBeakerApp.identity_provider_class = 'jupyter_server.auth.identity.PasswordIdentityProvider' + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_data_rate_limit +# See also: ServerApp.iopub_data_rate_limit +# c.BaseBeakerApp.iopub_data_rate_limit = 0.0 + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_msg_rate_limit +# See also: ServerApp.iopub_msg_rate_limit +# c.BaseBeakerApp.iopub_msg_rate_limit = 0.0 + +## The IP address the Jupyter server will listen on. +# See also: ServerApp.ip +# c.BaseBeakerApp.ip = 'localhost' + +## Supply extra arguments that will be passed to Jinja environment. +# See also: ServerApp.jinja_environment_options +# c.BaseBeakerApp.jinja_environment_options = {} + +## Extra variables to supply to jinja templates when rendering. +# See also: ServerApp.jinja_template_vars +# c.BaseBeakerApp.jinja_template_vars = {} + +## Dict of Python modules to load as Jupyter server extensions.Entry values can +# be used to enable and disable the loading ofthe extensions. The extensions +# will be loaded in alphabetical order. +# See also: ServerApp.jpserver_extensions +# c.BaseBeakerApp.jpserver_extensions = {} + +## Include local kernel specs +# Default: True +# c.BaseBeakerApp.kernel_spec_include_local = True + +## Kernel specification managers indexed by extension name +# Default: {} +# c.BaseBeakerApp.kernel_spec_managers = {} + +## The kernel websocket connection class to use. +# See also: ServerApp.kernel_websocket_connection_class +# c.BaseBeakerApp.kernel_websocket_connection_class = 'jupyter_server.services.kernels.connection.base.BaseKernelWebsocketConnection' + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.kernel_ws_protocol +# See also: ServerApp.kernel_ws_protocol +# c.BaseBeakerApp.kernel_ws_protocol = '' + +## The full path to a private key file for usage with SSL/TLS. +# See also: ServerApp.keyfile +# c.BaseBeakerApp.keyfile = '' + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.limit_rate +# See also: ServerApp.limit_rate +# c.BaseBeakerApp.limit_rate = False + +## Hostnames to allow as local when allow_remote_access is False. +# See also: ServerApp.local_hostnames +# c.BaseBeakerApp.local_hostnames = ['localhost'] + +## The date format used by logging formatters for %(asctime)s +# See also: Application.log_datefmt +# c.BaseBeakerApp.log_datefmt = '%Y-%m-%d %H:%M:%S' + +## The Logging format template +# See also: Application.log_format +# c.BaseBeakerApp.log_format = '[%(name)s]%(highlevel)s %(message)s' + +## Set the log level by value or name. +# See also: Application.log_level +# c.BaseBeakerApp.log_level = 30 + +## Enable request logging +# Default: False +# c.BaseBeakerApp.log_requests = False + +## +# See also: Application.logging_config +# c.BaseBeakerApp.logging_config = {} + +## The login handler class to use. +# See also: ServerApp.login_handler_class +# c.BaseBeakerApp.login_handler_class = 'jupyter_server.auth.login.LegacyLoginHandler' + +## The logout handler class to use. +# See also: ServerApp.logout_handler_class +# c.BaseBeakerApp.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' + +## +# See also: ServerApp.max_body_size +# c.BaseBeakerApp.max_body_size = 536870912 + +## +# See also: ServerApp.max_buffer_size +# c.BaseBeakerApp.max_buffer_size = 536870912 + +## +# See also: ServerApp.min_open_files_limit +# c.BaseBeakerApp.min_open_files_limit = 0 + +# Default: 'beaker' +# c.BaseBeakerApp.name = 'beaker' + +## DEPRECATED, use root_dir. +# See also: ServerApp.notebook_dir +# c.BaseBeakerApp.notebook_dir = '' + +# Default: False +# c.BaseBeakerApp.open_browser = False + +## DEPRECATED in 2.0. Use PasswordIdentityProvider.hashed_password +# See also: ServerApp.password +# c.BaseBeakerApp.password = '' + +## DEPRECATED in 2.0. Use PasswordIdentityProvider.password_required +# See also: ServerApp.password_required +# c.BaseBeakerApp.password_required = False + +## The port the server will listen on (env: JUPYTER_PORT). +# See also: ServerApp.port +# c.BaseBeakerApp.port = 0 + +## The number of additional ports to try if the specified port is not available +# (env: JUPYTER_PORT_RETRIES). +# See also: ServerApp.port_retries +# c.BaseBeakerApp.port_retries = 50 + +## Preferred starting directory to use for notebooks and kernels. +# ServerApp.preferred_dir is deprecated in jupyter-server 2.0. Use +# FileContentsManager.preferred_dir instead +# See also: ServerApp.preferred_dir +# c.BaseBeakerApp.preferred_dir = '' + +## +# See also: ServerApp.pylab +# c.BaseBeakerApp.pylab = 'disabled' + +## If True, display controls to shut down the Jupyter server, such as menu items +# or buttons. +# See also: ServerApp.quit_button +# c.BaseBeakerApp.quit_button = True + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.rate_limit_window +# See also: ServerApp.rate_limit_window +# c.BaseBeakerApp.rate_limit_window = 0.0 + +## The directory to use for notebooks and kernels. +# See also: ServerApp.root_dir +# c.BaseBeakerApp.root_dir = '' + +## Username under which the Beaker service is running +# Default: '' +# c.BaseBeakerApp.service_user = '' + +## Instead of starting the Application, dump configuration to stdout +# See also: Application.show_config +# c.BaseBeakerApp.show_config = False + +## Instead of starting the Application, dump configuration to stdout (as JSON) +# See also: Application.show_config_json +# c.BaseBeakerApp.show_config_json = False + +## Shut down the server after N seconds with no kernelsrunning and no activity. +# This can be used together with culling idle kernels +# (MappingKernelManager.cull_idle_timeout) to shutdown the Jupyter server when +# it's not in use. This is not precisely timed: it may shut down up to a minute +# later. 0 (the default) disables this automatic shutdown. +# See also: ServerApp.shutdown_no_activity_timeout +# c.BaseBeakerApp.shutdown_no_activity_timeout = 0 + +## The UNIX socket the Jupyter server will listen on. +# See also: ServerApp.sock +# c.BaseBeakerApp.sock = '' + +## The permissions mode for UNIX socket creation (default: 0600). +# See also: ServerApp.sock_mode +# c.BaseBeakerApp.sock_mode = '0600' + +## Supply SSL options for the tornado HTTPServer. +# See also: ServerApp.ssl_options +# c.BaseBeakerApp.ssl_options = {} + +## +# See also: ServerApp.static_immutable_cache +# c.BaseBeakerApp.static_immutable_cache = [] + +## Username under which subkernels (Python, R, etc.) are executed +# Default: '' +# c.BaseBeakerApp.subkernel_user = '' + +## Supply overrides for terminado. Currently only supports "shell_command". +# See also: ServerApp.terminado_settings +# c.BaseBeakerApp.terminado_settings = {} + +## Set to False to disable terminals. +# See also: ServerApp.terminals_enabled +# c.BaseBeakerApp.terminals_enabled = False + +## DEPRECATED. Use IdentityProvider.token +# See also: ServerApp.token +# c.BaseBeakerApp.token = '' + +## Supply overrides for the tornado.web.Application that the Jupyter server uses. +# See also: ServerApp.tornado_settings +# c.BaseBeakerApp.tornado_settings = {} + +## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- +# For headerssent by the upstream reverse proxy. Necessary if the proxy handles +# SSL +# See also: ServerApp.trust_xheaders +# c.BaseBeakerApp.trust_xheaders = False + +## Working directory for kernel execution and file operations +# Default: '' +# c.BaseBeakerApp.ui_path = '' + +## Disable launching browser by redirect file +# See also: ServerApp.use_redirect_file +# c.BaseBeakerApp.use_redirect_file = True + +## Specify where to open the server on startup. This is the +# See also: ServerApp.webbrowser_open_new +# c.BaseBeakerApp.webbrowser_open_new = 2 + +## +# See also: ServerApp.websocket_compression_options +# c.BaseBeakerApp.websocket_compression_options = None + +## +# See also: ServerApp.websocket_ping_interval +# c.BaseBeakerApp.websocket_ping_interval = 0 + +## +# See also: ServerApp.websocket_ping_timeout +# c.BaseBeakerApp.websocket_ping_timeout = 0 + +## The base URL for websockets, +# See also: ServerApp.websocket_url +# c.BaseBeakerApp.websocket_url = '' + +## Working directory for kernel execution and file operations +# Default: '' +# c.BaseBeakerApp.working_dir = '' + +#------------------------------------------------------------------------------ +# BeakerIdentityProvider(IdentityProvider) configuration +#------------------------------------------------------------------------------ +## Header name for Beaker kernel authentication +# Default: 'X-AUTH-BEAKER' +# c.BeakerIdentityProvider.beaker_kernel_header = 'X-AUTH-BEAKER' + +## Name of the cookie to set for persisting login. Default: username-${Host}. +# See also: IdentityProvider.cookie_name +# c.BeakerIdentityProvider.cookie_name = '' + +## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's +# set_secure_cookie docs for details. +# See also: IdentityProvider.cookie_options +# c.BeakerIdentityProvider.cookie_options = {} + +## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's +# get_secure_cookie docs for details. +# See also: IdentityProvider.get_secure_cookie_kwargs +# c.BeakerIdentityProvider.get_secure_cookie_kwargs = {} + +## The login handler class to use, if any. +# See also: IdentityProvider.login_handler_class +# c.BeakerIdentityProvider.login_handler_class = 'jupyter_server.auth.login.LoginFormHandler' + +## The logout handler class to use. +# See also: IdentityProvider.logout_handler_class +# c.BeakerIdentityProvider.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' + +## Specify whether login cookie should have the `secure` property (HTTPS- +# only).Only needed when protocol-detection gives the wrong answer due to +# proxies. +# See also: IdentityProvider.secure_cookie +# c.BeakerIdentityProvider.secure_cookie = None + +## Token used for authenticating first-time connections to the server. +# See also: IdentityProvider.token +# c.BeakerIdentityProvider.token = '' + +#------------------------------------------------------------------------------ +# NotebookIdentityProvider(BeakerIdentityProvider, IdentityProvider) configuration +#------------------------------------------------------------------------------ +## Header name for Beaker kernel authentication +# See also: BeakerIdentityProvider.beaker_kernel_header +# c.NotebookIdentityProvider.beaker_kernel_header = 'X-AUTH-BEAKER' + +## Name of the cookie to set for persisting login. Default: username-${Host}. +# See also: IdentityProvider.cookie_name +# c.NotebookIdentityProvider.cookie_name = '' + +## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's +# set_secure_cookie docs for details. +# See also: IdentityProvider.cookie_options +# c.NotebookIdentityProvider.cookie_options = {} + +## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's +# get_secure_cookie docs for details. +# See also: IdentityProvider.get_secure_cookie_kwargs +# c.NotebookIdentityProvider.get_secure_cookie_kwargs = {} + +## The login handler class to use, if any. +# See also: IdentityProvider.login_handler_class +# c.NotebookIdentityProvider.login_handler_class = 'jupyter_server.auth.login.LoginFormHandler' + +## The logout handler class to use. +# See also: IdentityProvider.logout_handler_class +# c.NotebookIdentityProvider.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' + +## Specify whether login cookie should have the `secure` property (HTTPS- +# only).Only needed when protocol-detection gives the wrong answer due to +# proxies. +# See also: IdentityProvider.secure_cookie +# c.NotebookIdentityProvider.secure_cookie = None + +## Token used for authenticating first-time connections to the server. +# See also: IdentityProvider.token +# c.NotebookIdentityProvider.token = '' + +#------------------------------------------------------------------------------ +# BeakerContentsManager(AsyncLargeFileManager) configuration +#------------------------------------------------------------------------------ +## Allow access to hidden files +# See also: ContentsManager.allow_hidden +# c.BeakerContentsManager.allow_hidden = False + +## If True, deleting a non-empty directory will always be allowed. +# See also: FileContentsManager.always_delete_dir +# c.BeakerContentsManager.always_delete_dir = False + +# See also: AsyncContentsManager.checkpoints +# c.BeakerContentsManager.checkpoints = None + +# See also: AsyncContentsManager.checkpoints_class +# c.BeakerContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' + +# See also: AsyncContentsManager.checkpoints_kwargs +# c.BeakerContentsManager.checkpoints_kwargs = {} + +## If True (default), deleting files will send them to the +# See also: FileContentsManager.delete_to_trash +# c.BeakerContentsManager.delete_to_trash = True + +# See also: ContentsManager.event_logger +# c.BeakerContentsManager.event_logger = None + +## handler class to use when serving raw file requests. +# See also: ContentsManager.files_handler_class +# c.BeakerContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# See also: ContentsManager.files_handler_params +# c.BeakerContentsManager.files_handler_params = {} + +## Hash algorithm to use for file content, support by hashlib +# See also: FileManagerMixin.hash_algorithm +# c.BeakerContentsManager.hash_algorithm = 'sha256' + +## +# See also: ContentsManager.hide_globs +# c.BeakerContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] + +## The max folder size that can be copied +# See also: FileContentsManager.max_copy_folder_size_mb +# c.BeakerContentsManager.max_copy_folder_size_mb = 500 + +## Python callable or importstring thereof +# See also: ContentsManager.post_save_hook +# c.BeakerContentsManager.post_save_hook = None + +## Python callable or importstring thereof +# See also: ContentsManager.pre_save_hook +# c.BeakerContentsManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# See also: ContentsManager.preferred_dir +# c.BeakerContentsManager.preferred_dir = '' + +# See also: FileContentsManager.root_dir +# c.BeakerContentsManager.root_dir = '' + +## The base name used when creating untitled directories. +# See also: ContentsManager.untitled_directory +# c.BeakerContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# See also: ContentsManager.untitled_file +# c.BeakerContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# See also: ContentsManager.untitled_notebook +# c.BeakerContentsManager.untitled_notebook = 'Untitled' + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. +# See also: FileManagerMixin.use_atomic_writing +# c.BeakerContentsManager.use_atomic_writing = True + +#------------------------------------------------------------------------------ +# BeakerKernelMappingManager(AsyncMappingKernelManager) configuration +#------------------------------------------------------------------------------ +## Whether to send tracebacks to clients on exceptions. +# See also: MappingKernelManager.allow_tracebacks +# c.BeakerKernelMappingManager.allow_tracebacks = True + +## White list of allowed kernel message types. +# See also: MappingKernelManager.allowed_message_types +# c.BeakerKernelMappingManager.allowed_message_types = [] + +## Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. +# See also: MappingKernelManager.buffer_offline_messages +# c.BeakerKernelMappingManager.buffer_offline_messages = True + +## Directory for kernel connection files +# Default: '/home/matt/.local/share/beaker/runtime/kernelfiles' +# c.BeakerKernelMappingManager.connection_dir = '/home/matt/.local/share/beaker/runtime/kernelfiles' + +## Whether to consider culling kernels which are busy. +# See also: MappingKernelManager.cull_busy +# c.BeakerKernelMappingManager.cull_busy = False + +## Whether to consider culling kernels which have one or more connections. +# See also: MappingKernelManager.cull_connected +# c.BeakerKernelMappingManager.cull_connected = False + +## Timeout in seconds for culling idle kernels +# Default: 0 +# c.BeakerKernelMappingManager.cull_idle_timeout = 0 + +## The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +# See also: MappingKernelManager.cull_interval +# c.BeakerKernelMappingManager.cull_interval = 300 + +## The name of the default kernel to start +# See also: MultiKernelManager.default_kernel_name +# c.BeakerKernelMappingManager.default_kernel_name = 'python3' + +## Timeout for giving up on a kernel (in seconds). +# See also: MappingKernelManager.kernel_info_timeout +# c.BeakerKernelMappingManager.kernel_info_timeout = 60 + +# See also: MappingKernelManager.root_dir +# c.BeakerKernelMappingManager.root_dir = '' + +## Share a single zmq.Context to talk to all my kernels +# See also: MultiKernelManager.shared_context +# c.BeakerKernelMappingManager.shared_context = True + +## Message to print when allow_tracebacks is False, and an exception occurs +# See also: MappingKernelManager.traceback_replacement_message +# c.BeakerKernelMappingManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' + +## List of kernel message types excluded from user activity tracking. +# See also: MappingKernelManager.untracked_message_types +# c.BeakerKernelMappingManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] + +## Whether to make kernels available before the process has started. The +# See also: AsyncMultiKernelManager.use_pending_kernels +# c.BeakerKernelMappingManager.use_pending_kernels = False + +#------------------------------------------------------------------------------ +# BeakerKernelSpecManager(KernelSpecManager) configuration +#------------------------------------------------------------------------------ +## List of allowed kernel names. +# See also: KernelSpecManager.allowed_kernelspecs +# c.BeakerKernelSpecManager.allowed_kernelspecs = set() + +## If there is no Python kernelspec registered and the IPython +# See also: KernelSpecManager.ensure_native_kernel +# c.BeakerKernelSpecManager.ensure_native_kernel = True + +## The kernel spec class. This is configurable to allow +# See also: KernelSpecManager.kernel_spec_class +# c.BeakerKernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' + +## Deprecated, use `KernelSpecManager.allowed_kernelspecs` +# See also: KernelSpecManager.whitelist +# c.BeakerKernelSpecManager.whitelist = set() + +#------------------------------------------------------------------------------ +# BeakerSessionManager(SessionManager) configuration +#------------------------------------------------------------------------------ +## The filesystem path to SQLite Database file (e.g. +# /path/to/session_database.db). By default, the session database is stored in- +# memory (i.e. `:memory:` setting from sqlite3) and does not persist when the +# current Jupyter Server shuts down. +# See also: SessionManager.database_filepath +# c.BeakerSessionManager.database_filepath = ':memory:' + +#------------------------------------------------------------------------------ +# ConnectionFileMixin(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## Mixin for configurable classes that work with connection files + +## JSON file in which to store connection info [default: kernel-.json] +# +# This file will contain the IP, ports, and authentication key needed to connect +# clients to this kernel. By default, this file will be created in the security dir +# of the current profile, but can be specified by absolute path. +# Default: '' +# c.ConnectionFileMixin.connection_file = '' + +## set the control (ROUTER) port [default: random] +# Default: 0 +# c.ConnectionFileMixin.control_port = 0 + +## set the heartbeat port [default: random] +# Default: 0 +# c.ConnectionFileMixin.hb_port = 0 + +## set the iopub (PUB) port [default: random] +# Default: 0 +# c.ConnectionFileMixin.iopub_port = 0 + +## Set the kernel's IP address [default localhost]. +# If the IP address is something other than localhost, then +# Consoles on other machines will be able to connect +# to the Kernel, so be careful! +# Default: '' +# c.ConnectionFileMixin.ip = '' + +## set the shell (ROUTER) port [default: random] +# Default: 0 +# c.ConnectionFileMixin.shell_port = 0 + +## set the stdin (ROUTER) port [default: random] +# Default: 0 +# c.ConnectionFileMixin.stdin_port = 0 + +# Choices: any of ['tcp', 'ipc'] (case-insensitive) +# Default: 'tcp' +# c.ConnectionFileMixin.transport = 'tcp' + +#------------------------------------------------------------------------------ +# KernelSpecManager(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## A manager for kernel specs. + +## List of allowed kernel names. +# +# By default, all installed kernels are allowed. +# Default: set() +# c.KernelSpecManager.allowed_kernelspecs = set() + +## If there is no Python kernelspec registered and the IPython +# kernel is available, ensure it is added to the spec list. +# Default: True +# c.KernelSpecManager.ensure_native_kernel = True + +## The kernel spec class. This is configurable to allow +# subclassing of the KernelSpecManager for customized behavior. +# Default: 'jupyter_client.kernelspec.KernelSpec' +# c.KernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' + +## Deprecated, use `KernelSpecManager.allowed_kernelspecs` +# Default: set() +# c.KernelSpecManager.whitelist = set() + +#------------------------------------------------------------------------------ +# KernelManager(ConnectionFileMixin) configuration +#------------------------------------------------------------------------------ +## Manages a single kernel in a subprocess on this host. +# +# This version starts kernels with Popen. + +## Should we autorestart the kernel if it dies. +# Default: True +# c.KernelManager.autorestart = True + +## True if the MultiKernelManager should cache ports for this KernelManager +# instance +# Default: False +# c.KernelManager.cache_ports = False + +## JSON file in which to store connection info [default: kernel-.json] +# See also: ConnectionFileMixin.connection_file +# c.KernelManager.connection_file = '' + +## set the control (ROUTER) port [default: random] +# See also: ConnectionFileMixin.control_port +# c.KernelManager.control_port = 0 + +## set the heartbeat port [default: random] +# See also: ConnectionFileMixin.hb_port +# c.KernelManager.hb_port = 0 + +## set the iopub (PUB) port [default: random] +# See also: ConnectionFileMixin.iopub_port +# c.KernelManager.iopub_port = 0 + +## Set the kernel's IP address [default localhost]. +# See also: ConnectionFileMixin.ip +# c.KernelManager.ip = '' + +## set the shell (ROUTER) port [default: random] +# See also: ConnectionFileMixin.shell_port +# c.KernelManager.shell_port = 0 + +## Time to wait for a kernel to terminate before killing it, in seconds. When a +# shutdown request is initiated, the kernel will be immediately sent an +# interrupt (SIGINT), followedby a shutdown_request message, after 1/2 of +# `shutdown_wait_time`it will be sent a terminate (SIGTERM) request, and finally +# at the end of `shutdown_wait_time` will be killed (SIGKILL). terminate and +# kill may be equivalent on windows. Note that this value can beoverridden by +# the in-use kernel provisioner since shutdown times mayvary by provisioned +# environment. +# Default: 5.0 +# c.KernelManager.shutdown_wait_time = 5.0 + +## set the stdin (ROUTER) port [default: random] +# See also: ConnectionFileMixin.stdin_port +# c.KernelManager.stdin_port = 0 + +# See also: ConnectionFileMixin.transport +# c.KernelManager.transport = 'tcp' + +#------------------------------------------------------------------------------ +# AsyncMultiKernelManager(MultiKernelManager) configuration +#------------------------------------------------------------------------------ +## The name of the default kernel to start +# See also: MultiKernelManager.default_kernel_name +# c.AsyncMultiKernelManager.default_kernel_name = 'python3' + +## The kernel manager class. This is configurable to allow +# subclassing of the AsyncKernelManager for customized behavior. +# Default: 'jupyter_client.ioloop.AsyncIOLoopKernelManager' +# c.AsyncMultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' + +## Share a single zmq.Context to talk to all my kernels +# See also: MultiKernelManager.shared_context +# c.AsyncMultiKernelManager.shared_context = True + +## Whether to make kernels available before the process has started. The +# kernel has a `.ready` future which can be awaited before connecting +# Default: False +# c.AsyncMultiKernelManager.use_pending_kernels = False + +#------------------------------------------------------------------------------ +# MultiKernelManager(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## A class for managing multiple kernels. + +## The name of the default kernel to start +# Default: 'python3' +# c.MultiKernelManager.default_kernel_name = 'python3' + +## The kernel manager class. This is configurable to allow +# subclassing of the KernelManager for customized behavior. +# Default: 'jupyter_client.ioloop.IOLoopKernelManager' +# c.MultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' + +## Share a single zmq.Context to talk to all my kernels +# Default: True +# c.MultiKernelManager.shared_context = True + +#------------------------------------------------------------------------------ +# Session(Configurable) configuration +#------------------------------------------------------------------------------ +## Object for handling serialization and sending of messages. +# +# The Session object handles building messages and sending them with ZMQ sockets +# or ZMQStream objects. Objects can communicate with each other over the +# network via Session objects, and only need to work with the dict-based IPython +# message spec. The Session will handle serialization/deserialization, security, +# and metadata. +# +# Sessions support configurable serialization via packer/unpacker traits, and +# signing with HMAC digests via the key/keyfile traits. +# +# Parameters ---------- +# +# debug : bool +# whether to trigger extra debugging statements +# packer/unpacker : str : 'json', 'pickle' or import_string +# importstrings for methods to serialize message parts. If just +# 'json' or 'pickle', predefined JSON and pickle packers will be used. +# Otherwise, the entire importstring must be used. +# +# The functions must accept at least valid JSON input, and output *bytes*. +# +# For example, to use msgpack: +# packer = 'msgpack.packb', unpacker='msgpack.unpackb' +# pack/unpack : callables +# You can also set the pack/unpack callables for serialization directly. +# session : bytes +# the ID of this Session object. The default is to generate a new UUID. +# username : unicode +# username added to message headers. The default is to ask the OS. +# key : bytes +# The key used to initialize an HMAC signature. If unset, messages +# will not be signed or checked. +# keyfile : filepath +# The file containing a key. If this is set, `key` will be initialized +# to the contents of the file. + +## Threshold (in bytes) beyond which an object's buffer should be extracted to +# avoid pickling. +# Default: 1024 +# c.Session.buffer_threshold = 1024 + +## Whether to check PID to protect against calls after fork. +# +# This check can be disabled if fork-safety is handled elsewhere. +# Default: True +# c.Session.check_pid = True + +## Threshold (in bytes) beyond which a buffer should be sent without copying. +# Default: 65536 +# c.Session.copy_threshold = 65536 + +## Debug output in the Session +# Default: False +# c.Session.debug = False + +## The maximum number of digests to remember. +# +# The digest history will be culled when it exceeds this value. +# Default: 65536 +# c.Session.digest_history_size = 65536 + +## The maximum number of items for a container to be introspected for custom serialization. +# Containers larger than this are pickled outright. +# Default: 64 +# c.Session.item_threshold = 64 + +## execution key, for signing messages. +# Default: b'' +# c.Session.key = b'' + +## path to file containing execution key. +# Default: '' +# c.Session.keyfile = '' + +## Metadata dictionary, which serves as the default top-level metadata dict for +# each message. +# Default: {} +# c.Session.metadata = {} + +## The name of the packer for serializing messages. +# Should be one of 'json', 'pickle', or an import name +# for a custom callable serializer. +# Default: 'json' +# c.Session.packer = 'json' + +## The UUID identifying this session. +# Default: '' +# c.Session.session = '' + +## The digest scheme used to construct the message signatures. +# Must have the form 'hmac-HASH'. +# Default: 'hmac-sha256' +# c.Session.signature_scheme = 'hmac-sha256' + +## The name of the unpacker for unserializing messages. +# Only used with custom functions for `packer`. +# Default: 'json' +# c.Session.unpacker = 'json' + +## Username for the Session. Default is your system username. +# Default: 'matt' +# c.Session.username = 'matt' + +#------------------------------------------------------------------------------ +# JupyterApp(Application) configuration +#------------------------------------------------------------------------------ +## Base class for Jupyter applications + +## Answer yes to any prompts. +# Default: False +# c.JupyterApp.answer_yes = False + +## Full path of a config file. +# Default: '' +# c.JupyterApp.config_file = '' + +## Specify a config file to load. +# Default: '' +# c.JupyterApp.config_file_name = '' + +## Generate default config file. +# Default: False +# c.JupyterApp.generate_config = False + +## The date format used by logging formatters for %(asctime)s +# See also: Application.log_datefmt +# c.JupyterApp.log_datefmt = '%Y-%m-%d %H:%M:%S' + +## The Logging format template +# See also: Application.log_format +# c.JupyterApp.log_format = '[%(name)s]%(highlevel)s %(message)s' + +## Set the log level by value or name. +# See also: Application.log_level +# c.JupyterApp.log_level = 30 + +## +# See also: Application.logging_config +# c.JupyterApp.logging_config = {} + +## Instead of starting the Application, dump configuration to stdout +# See also: Application.show_config +# c.JupyterApp.show_config = False + +## Instead of starting the Application, dump configuration to stdout (as JSON) +# See also: Application.show_config_json +# c.JupyterApp.show_config_json = False + +#------------------------------------------------------------------------------ +# EventLogger(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## An Event logger for emitting structured events. +# +# Event schemas must be registered with the EventLogger using the +# `register_schema` or `register_schema_file` methods. Every schema will be +# validated against Jupyter Event's metaschema. + +## A list of logging.Handler instances to send events to. +# +# When set to None (the default), all events are discarded. +# Default: None +# c.EventLogger.handlers = None + +#------------------------------------------------------------------------------ +# IdentityProvider(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## Interface for providing identity management and authentication. +# +# Two principle methods: +# +# - :meth:`~jupyter_server.auth.IdentityProvider.get_user` returns a :class:`~.User` object +# for successful authentication, or None for no-identity-found. +# - :meth:`~jupyter_server.auth.IdentityProvider.identity_model` turns a :class:`~jupyter_server.auth.User` into a JSONable dict. +# The default is to use :py:meth:`dataclasses.asdict`, +# and usually shouldn't need override. +# +# Additional methods can customize authentication. +# +# .. versionadded:: 2.0 + +## Name of the cookie to set for persisting login. Default: username-${Host}. +# Default: '' +# c.IdentityProvider.cookie_name = '' + +## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's +# set_secure_cookie docs for details. +# Default: {} +# c.IdentityProvider.cookie_options = {} + +## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's +# get_secure_cookie docs for details. +# Default: {} +# c.IdentityProvider.get_secure_cookie_kwargs = {} + +## The login handler class to use, if any. +# Default: 'jupyter_server.auth.login.LoginFormHandler' +# c.IdentityProvider.login_handler_class = 'jupyter_server.auth.login.LoginFormHandler' + +## The logout handler class to use. +# Default: 'jupyter_server.auth.logout.LogoutHandler' +# c.IdentityProvider.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' + +## Specify whether login cookie should have the `secure` property (HTTPS- +# only).Only needed when protocol-detection gives the wrong answer due to +# proxies. +# Default: None +# c.IdentityProvider.secure_cookie = None + +## Token used for authenticating first-time connections to the server. +# +# The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set directly +# with the JUPYTER_TOKEN environment variable. +# +# When no password is enabled, +# the default is to generate a new, random token. +# +# Setting to an empty string disables authentication altogether, which +# is NOT RECOMMENDED. +# +# Prior to 2.0: configured as ServerApp.token +# Default: '' +# c.IdentityProvider.token = '' + +#------------------------------------------------------------------------------ +# GatewayWebSocketConnection(BaseKernelWebsocketConnection) configuration +#------------------------------------------------------------------------------ +## Web socket connection that proxies to a kernel/enterprise gateway. + +# Default: '' +# c.GatewayWebSocketConnection.kernel_ws_protocol = '' + +# See also: BaseKernelWebsocketConnection.session +# c.GatewayWebSocketConnection.session = None + +#------------------------------------------------------------------------------ +# GatewayClient(SingletonConfigurable) configuration +#------------------------------------------------------------------------------ +## This class manages the configuration. It's its own singleton class so that we +# can share these values across all objects. It also contains some options. +# helper methods to build request arguments out of the various config + +## Accept and manage cookies sent by the service side. This is often useful +# for load balancers to decide which backend node to use. +# (JUPYTER_GATEWAY_ACCEPT_COOKIES env var) +# Default: False +# c.GatewayClient.accept_cookies = False + +## A comma-separated list of environment variable names that will be included, +# along with their values, in the kernel startup request. The corresponding +# `client_envs` configuration value must also be set on the Gateway server - +# since that configuration value indicates which environmental values to make +# available to the kernel. (JUPYTER_GATEWAY_ALLOWED_ENVS env var) +# Default: '' +# c.GatewayClient.allowed_envs = '' + +## The authorization header's key name (typically 'Authorization') used in the +# HTTP headers. The header will be formatted as:: +# +# {'{auth_header_key}': '{auth_scheme} {auth_token}'} +# +# If the authorization header key takes a single value, `auth_scheme` should be +# set to None and 'auth_token' should be configured to use the appropriate +# value. +# +# (JUPYTER_GATEWAY_AUTH_HEADER_KEY env var) +# Default: '' +# c.GatewayClient.auth_header_key = '' + +## The auth scheme, added as a prefix to the authorization token used in the HTTP +# headers. (JUPYTER_GATEWAY_AUTH_SCHEME env var) +# Default: '' +# c.GatewayClient.auth_scheme = '' + +## The authorization token used in the HTTP headers. The header will be formatted +# as:: +# +# {'{auth_header_key}': '{auth_scheme} {auth_token}'} +# +# (JUPYTER_GATEWAY_AUTH_TOKEN env var) +# Default: None +# c.GatewayClient.auth_token = None + +## The filename of CA certificates or None to use defaults. +# (JUPYTER_GATEWAY_CA_CERTS env var) +# Default: None +# c.GatewayClient.ca_certs = None + +## The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT +# env var) +# Default: None +# c.GatewayClient.client_cert = None + +## The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) +# Default: None +# c.GatewayClient.client_key = None + +## The time allowed for HTTP connection establishment with the Gateway server. +# (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var) +# Default: 40.0 +# c.GatewayClient.connect_timeout = 40.0 + +## Deprecated, use `GatewayClient.allowed_envs` +# Default: '' +# c.GatewayClient.env_whitelist = '' + +# Default: None +# c.GatewayClient.event_logger = None + +## The time allowed for HTTP reconnection with the Gateway server for the first +# time. Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor +# of numbers of retries but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX. +# (JUPYTER_GATEWAY_RETRY_INTERVAL env var) +# Default: 1.0 +# c.GatewayClient.gateway_retry_interval = 1.0 + +## The maximum time allowed for HTTP reconnection retry with the Gateway server. +# (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var) +# Default: 30.0 +# c.GatewayClient.gateway_retry_interval_max = 30.0 + +## The maximum retries allowed for HTTP reconnection with the Gateway server. +# (JUPYTER_GATEWAY_RETRY_MAX env var) +# Default: 5 +# c.GatewayClient.gateway_retry_max = 5 + +## The class to use for Gateway token renewal. +# (JUPYTER_GATEWAY_TOKEN_RENEWER_CLASS env var) +# Default: 'jupyter_server.gateway.gateway_client.GatewayTokenRenewerBase' +# c.GatewayClient.gateway_token_renewer_class = 'jupyter_server.gateway.gateway_client.GatewayTokenRenewerBase' + +## Additional HTTP headers to pass on the request. This value will be converted to a dict. +# (JUPYTER_GATEWAY_HEADERS env var) +# Default: '{}' +# c.GatewayClient.headers = '{}' + +## The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) +# Default: None +# c.GatewayClient.http_pwd = None + +## The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) +# Default: None +# c.GatewayClient.http_user = None + +## The gateway API endpoint for accessing kernel resources +# (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var) +# Default: '/api/kernels' +# c.GatewayClient.kernels_endpoint = '/api/kernels' + +## The gateway API endpoint for accessing kernelspecs +# (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var) +# Default: '/api/kernelspecs' +# c.GatewayClient.kernelspecs_endpoint = '/api/kernelspecs' + +## The gateway endpoint for accessing kernelspecs resources +# (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var) +# Default: '/kernelspecs' +# c.GatewayClient.kernelspecs_resource_endpoint = '/kernelspecs' + +## Timeout pad to be ensured between KERNEL_LAUNCH_TIMEOUT and request_timeout +# such that request_timeout >= KERNEL_LAUNCH_TIMEOUT + launch_timeout_pad. +# (JUPYTER_GATEWAY_LAUNCH_TIMEOUT_PAD env var) +# Default: 2.0 +# c.GatewayClient.launch_timeout_pad = 2.0 + +## The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT +# env var) +# Default: 42.0 +# c.GatewayClient.request_timeout = 42.0 + +## The url of the Kernel or Enterprise Gateway server where kernel specifications +# are defined and kernel management takes place. If defined, this Notebook +# server acts as a proxy for all kernel management and kernel specification +# retrieval. (JUPYTER_GATEWAY_URL env var) +# Default: None +# c.GatewayClient.url = None + +## For HTTPS requests, determines if server's certificate should be validated or +# not. (JUPYTER_GATEWAY_VALIDATE_CERT env var) +# Default: True +# c.GatewayClient.validate_cert = True + +## The websocket url of the Kernel or Enterprise Gateway server. If not +# provided, this value will correspond to the value of the Gateway url with 'ws' +# in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) +# Default: None +# c.GatewayClient.ws_url = None + +#------------------------------------------------------------------------------ +# GatewayKernelSpecManager(KernelSpecManager) configuration +#------------------------------------------------------------------------------ +## A gateway kernel spec manager. + +## List of allowed kernel names. +# See also: KernelSpecManager.allowed_kernelspecs +# c.GatewayKernelSpecManager.allowed_kernelspecs = set() + +## If there is no Python kernelspec registered and the IPython +# See also: KernelSpecManager.ensure_native_kernel +# c.GatewayKernelSpecManager.ensure_native_kernel = True + +## The kernel spec class. This is configurable to allow +# See also: KernelSpecManager.kernel_spec_class +# c.GatewayKernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' + +## Deprecated, use `KernelSpecManager.allowed_kernelspecs` +# See also: KernelSpecManager.whitelist +# c.GatewayKernelSpecManager.whitelist = set() + +#------------------------------------------------------------------------------ +# GatewayMappingKernelManager(AsyncMappingKernelManager) configuration +#------------------------------------------------------------------------------ +## Kernel manager that supports remote kernels hosted by Jupyter Kernel or +# Enterprise Gateway. + +## Whether to send tracebacks to clients on exceptions. +# See also: MappingKernelManager.allow_tracebacks +# c.GatewayMappingKernelManager.allow_tracebacks = True + +## White list of allowed kernel message types. +# See also: MappingKernelManager.allowed_message_types +# c.GatewayMappingKernelManager.allowed_message_types = [] + +## Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. +# See also: MappingKernelManager.buffer_offline_messages +# c.GatewayMappingKernelManager.buffer_offline_messages = True + +## Whether to consider culling kernels which are busy. +# See also: MappingKernelManager.cull_busy +# c.GatewayMappingKernelManager.cull_busy = False + +## Whether to consider culling kernels which have one or more connections. +# See also: MappingKernelManager.cull_connected +# c.GatewayMappingKernelManager.cull_connected = False + +## Timeout (in seconds) after which a kernel is considered idle and ready to be +# culled. +# See also: MappingKernelManager.cull_idle_timeout +# c.GatewayMappingKernelManager.cull_idle_timeout = 0 + +## The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +# See also: MappingKernelManager.cull_interval +# c.GatewayMappingKernelManager.cull_interval = 300 + +## The name of the default kernel to start +# See also: MultiKernelManager.default_kernel_name +# c.GatewayMappingKernelManager.default_kernel_name = 'python3' + +## Timeout for giving up on a kernel (in seconds). +# See also: MappingKernelManager.kernel_info_timeout +# c.GatewayMappingKernelManager.kernel_info_timeout = 60 + +## The kernel manager class. This is configurable to allow +# See also: AsyncMultiKernelManager.kernel_manager_class +# c.GatewayMappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' + +# See also: MappingKernelManager.root_dir +# c.GatewayMappingKernelManager.root_dir = '' + +## Share a single zmq.Context to talk to all my kernels +# See also: MultiKernelManager.shared_context +# c.GatewayMappingKernelManager.shared_context = True + +## Message to print when allow_tracebacks is False, and an exception occurs +# See also: MappingKernelManager.traceback_replacement_message +# c.GatewayMappingKernelManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' + +## List of kernel message types excluded from user activity tracking. +# See also: MappingKernelManager.untracked_message_types +# c.GatewayMappingKernelManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] + +## Whether to make kernels available before the process has started. The +# See also: AsyncMultiKernelManager.use_pending_kernels +# c.GatewayMappingKernelManager.use_pending_kernels = False + +#------------------------------------------------------------------------------ +# GatewaySessionManager(SessionManager) configuration +#------------------------------------------------------------------------------ +## A gateway session manager. + +## The filesystem path to SQLite Database file (e.g. +# /path/to/session_database.db). By default, the session database is stored in- +# memory (i.e. `:memory:` setting from sqlite3) and does not persist when the +# current Jupyter Server shuts down. +# See also: SessionManager.database_filepath +# c.GatewaySessionManager.database_filepath = ':memory:' + +#------------------------------------------------------------------------------ +# ServerApp(JupyterApp) configuration +#------------------------------------------------------------------------------ +## The Jupyter Server application class. + +## Set the Access-Control-Allow-Credentials: true header +# Default: False +# c.ServerApp.allow_credentials = False + +## Whether or not to allow external kernels, whose connection files are placed in +# external_connection_dir. +# Default: False +# c.ServerApp.allow_external_kernels = False + +## Set the Access-Control-Allow-Origin header +# +# Use '*' to allow any origin to access your server. +# +# Takes precedence over allow_origin_pat. +# Default: '' +# c.ServerApp.allow_origin = '' + +## Use a regular expression for the Access-Control-Allow-Origin header +# +# Requests from an origin matching the expression will get replies with: +# +# Access-Control-Allow-Origin: origin +# +# where `origin` is the origin of the request. +# +# Ignored if allow_origin is set. +# Default: '' +# c.ServerApp.allow_origin_pat = '' + +## DEPRECATED in 2.0. Use PasswordIdentityProvider.allow_password_change +# Default: True +# c.ServerApp.allow_password_change = True + +## Allow requests where the Host header doesn't point to a local server +# +# By default, requests get a 403 forbidden response if the 'Host' header +# shows that the browser thinks it's on a non-local domain. +# Setting this option to True disables this check. +# +# This protects against 'DNS rebinding' attacks, where a remote web server +# serves you a page and then changes its DNS to send later requests to a +# local IP, bypassing same-origin checks. +# +# Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, +# along with hostnames configured in local_hostnames. +# Default: False +# c.ServerApp.allow_remote_access = False + +## Whether to allow the user to run the server as root. +# Default: False +# c.ServerApp.allow_root = False + +## Allow unauthenticated access to endpoints without authentication rule. +# +# When set to `True` (default in jupyter-server 2.0, subject to change +# in the future), any request to an endpoint without an authentication rule +# (either `@tornado.web.authenticated`, or `@allow_unauthenticated`) +# will be permitted, regardless of whether user has logged in or not. +# +# When set to `False`, logging in will be required for access to each endpoint, +# excluding the endpoints marked with `@allow_unauthenticated` decorator. +# +# This option can be configured using `JUPYTER_SERVER_ALLOW_UNAUTHENTICATED_ACCESS` +# environment variable: any non-empty value other than "true" and "yes" will +# prevent unauthenticated access to endpoints without `@allow_unauthenticated`. +# Default: True +# c.ServerApp.allow_unauthenticated_access = True + +## Answer yes to any prompts. +# See also: JupyterApp.answer_yes +# c.ServerApp.answer_yes = False + +## " +# Require authentication to access prometheus metrics. +# Default: True +# c.ServerApp.authenticate_prometheus = True + +## The authorizer class to use. +# Default: 'jupyter_server.auth.authorizer.AllowAllAuthorizer' +# c.ServerApp.authorizer_class = 'jupyter_server.auth.authorizer.AllowAllAuthorizer' + +## Reload the webapp when changes are made to any Python src files. +# Default: False +# c.ServerApp.autoreload = False + +## The base URL for the Jupyter server. +# +# Leading and trailing slashes can be omitted, +# and will automatically be added. +# Default: '/' +# c.ServerApp.base_url = '/' + +## Specify what command to use to invoke a web +# browser when starting the server. If not specified, the +# default browser will be determined by the `webbrowser` +# standard library module, which allows setting of the +# BROWSER environment variable to override it. +# Default: '' +# c.ServerApp.browser = '' + +## The full path to an SSL/TLS certificate file. +# Default: '' +# c.ServerApp.certfile = '' + +## The full path to a certificate authority certificate for SSL/TLS client +# authentication. +# Default: '' +# c.ServerApp.client_ca = '' + +## Full path of a config file. +# See also: JupyterApp.config_file +# c.ServerApp.config_file = '' + +## Specify a config file to load. +# See also: JupyterApp.config_file_name +# c.ServerApp.config_file_name = '' + +## The config manager class to use +# Default: 'jupyter_server.services.config.manager.ConfigManager' +# c.ServerApp.config_manager_class = 'jupyter_server.services.config.manager.ConfigManager' + +## The content manager class to use. +# Default: 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' +# c.ServerApp.contents_manager_class = 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' + +## DEPRECATED. Use IdentityProvider.cookie_options +# Default: {} +# c.ServerApp.cookie_options = {} + +## The random bytes used to secure cookies. +# By default this is generated on first start of the server and persisted across server +# sessions by writing the cookie secret into the `cookie_secret_file` file. +# When using an executable config file you can override this to be random at each server restart. +# +# Note: Cookie secrets should be kept private, do not share config files with +# cookie_secret stored in plaintext (you can read the value from a file). +# Default: b'' +# c.ServerApp.cookie_secret = b'' + +## The file where the cookie secret is stored. +# Default: '' +# c.ServerApp.cookie_secret_file = '' + +## Override URL shown to users. +# +# Replace actual URL, including protocol, address, port and base URL, +# with the given value when displaying URL to the users. Do not change +# the actual connection URL. If authentication token is enabled, the +# token is added to the custom URL automatically. +# +# This option is intended to be used when the URL to display to the user +# cannot be determined reliably by the Jupyter server (proxified +# or containerized setups for example). +# Default: '' +# c.ServerApp.custom_display_url = '' + +## The default URL to redirect to from `/` +# Default: '/' +# c.ServerApp.default_url = '/' + +## Disable cross-site-request-forgery protection +# +# Jupyter server includes protection from cross-site request forgeries, +# requiring API requests to either: +# +# - originate from pages served by this server (validated with XSRF cookie and token), or +# - authenticate with a token +# +# Some anonymous compute resources still desire the ability to run code, +# completely without authentication. +# These services can disable all authentication and security checks, +# with the full knowledge of what that implies. +# Default: False +# c.ServerApp.disable_check_xsrf = False + +## The directory to look at for external kernel connection files, if +# allow_external_kernels is True. Defaults to Jupyter +# runtime_dir/external_kernels. Make sure that this directory is not filled with +# left-over connection files, that could result in unnecessary kernel manager +# creations. +# Default: None +# c.ServerApp.external_connection_dir = None + +## handlers that should be loaded at higher priority than the default services +# Default: [] +# c.ServerApp.extra_services = [] + +## Extra paths to search for serving static files. +# +# This allows adding javascript/css to be available from the Jupyter server machine, +# or overriding individual files in the IPython +# Default: [] +# c.ServerApp.extra_static_paths = [] + +## Extra paths to search for serving jinja templates. +# +# Can be used to override templates from jupyter_server.templates. +# Default: [] +# c.ServerApp.extra_template_paths = [] + +## Open the named file when the application is launched. +# Default: '' +# c.ServerApp.file_to_run = '' + +## The URL prefix where files are opened directly. +# Default: 'notebooks' +# c.ServerApp.file_url_prefix = 'notebooks' + +## Generate default config file. +# See also: JupyterApp.generate_config +# c.ServerApp.generate_config = False + +## DEPRECATED. Use IdentityProvider.get_secure_cookie_kwargs +# Default: {} +# c.ServerApp.get_secure_cookie_kwargs = {} + +## The identity provider class to use. +# Default: 'jupyter_server.auth.identity.PasswordIdentityProvider' +# c.ServerApp.identity_provider_class = 'jupyter_server.auth.identity.PasswordIdentityProvider' + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_data_rate_limit +# Default: 0.0 +# c.ServerApp.iopub_data_rate_limit = 0.0 + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_msg_rate_limit +# Default: 0.0 +# c.ServerApp.iopub_msg_rate_limit = 0.0 + +## The IP address the Jupyter server will listen on. +# Default: 'localhost' +# c.ServerApp.ip = 'localhost' + +## Supply extra arguments that will be passed to Jinja environment. +# Default: {} +# c.ServerApp.jinja_environment_options = {} + +## Extra variables to supply to jinja templates when rendering. +# Default: {} +# c.ServerApp.jinja_template_vars = {} + +## Dict of Python modules to load as Jupyter server extensions.Entry values can +# be used to enable and disable the loading ofthe extensions. The extensions +# will be loaded in alphabetical order. +# Default: {} +# c.ServerApp.jpserver_extensions = {} + +## The kernel manager class to use. +# Default: 'jupyter_server.services.kernels.kernelmanager.MappingKernelManager' +# c.ServerApp.kernel_manager_class = 'jupyter_server.services.kernels.kernelmanager.MappingKernelManager' + +## The kernel spec manager class to use. Should be a subclass of +# `jupyter_client.kernelspec.KernelSpecManager`. +# +# The Api of KernelSpecManager is provisional and might change without warning +# between this version of Jupyter and the next stable one. +# Default: 'builtins.object' +# c.ServerApp.kernel_spec_manager_class = 'builtins.object' + +## The kernel websocket connection class to use. +# Default: 'jupyter_server.services.kernels.connection.base.BaseKernelWebsocketConnection' +# c.ServerApp.kernel_websocket_connection_class = 'jupyter_server.services.kernels.connection.base.BaseKernelWebsocketConnection' + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.kernel_ws_protocol +# Default: '' +# c.ServerApp.kernel_ws_protocol = '' + +## The full path to a private key file for usage with SSL/TLS. +# Default: '' +# c.ServerApp.keyfile = '' + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.limit_rate +# Default: False +# c.ServerApp.limit_rate = False + +## Hostnames to allow as local when allow_remote_access is False. +# +# Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted +# as local as well. +# Default: ['localhost'] +# c.ServerApp.local_hostnames = ['localhost'] + +## The date format used by logging formatters for %(asctime)s +# See also: Application.log_datefmt +# c.ServerApp.log_datefmt = '%Y-%m-%d %H:%M:%S' + +## The Logging format template +# See also: Application.log_format +# c.ServerApp.log_format = '[%(name)s]%(highlevel)s %(message)s' + +## Set the log level by value or name. +# See also: Application.log_level +# c.ServerApp.log_level = 30 + +## +# See also: Application.logging_config +# c.ServerApp.logging_config = {} + +## The login handler class to use. +# Default: 'jupyter_server.auth.login.LegacyLoginHandler' +# c.ServerApp.login_handler_class = 'jupyter_server.auth.login.LegacyLoginHandler' + +## The logout handler class to use. +# Default: 'jupyter_server.auth.logout.LogoutHandler' +# c.ServerApp.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' + +## Sets the maximum allowed size of the client request body, specified in the +# Content-Length request header field. If the size in a request exceeds the +# configured value, a malformed HTTP message is returned to the client. +# +# Note: max_body_size is applied even in streaming mode. +# Default: 536870912 +# c.ServerApp.max_body_size = 536870912 + +## Gets or sets the maximum amount of memory, in bytes, that is allocated for use +# by the buffer manager. +# Default: 536870912 +# c.ServerApp.max_buffer_size = 536870912 + +## Gets or sets a lower bound on the open file handles process resource limit. +# This may need to be increased if you run into an OSError: [Errno 24] Too many +# open files. This is not applicable when running on Windows. +# Default: 0 +# c.ServerApp.min_open_files_limit = 0 + +## DEPRECATED, use root_dir. +# Default: '' +# c.ServerApp.notebook_dir = '' + +## Whether to open in a browser after starting. +# The specific browser used is platform dependent and +# determined by the python standard library `webbrowser` +# module, unless it is overridden using the --browser +# (ServerApp.browser) configuration option. +# Default: False +# c.ServerApp.open_browser = False + +## DEPRECATED in 2.0. Use PasswordIdentityProvider.hashed_password +# Default: '' +# c.ServerApp.password = '' + +## DEPRECATED in 2.0. Use PasswordIdentityProvider.password_required +# Default: False +# c.ServerApp.password_required = False + +## The port the server will listen on (env: JUPYTER_PORT). +# Default: 0 +# c.ServerApp.port = 0 + +## The number of additional ports to try if the specified port is not available +# (env: JUPYTER_PORT_RETRIES). +# Default: 50 +# c.ServerApp.port_retries = 50 + +## Preferred starting directory to use for notebooks and kernels. +# ServerApp.preferred_dir is deprecated in jupyter-server 2.0. Use +# FileContentsManager.preferred_dir instead +# Default: '' +# c.ServerApp.preferred_dir = '' + +## DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. +# Default: 'disabled' +# c.ServerApp.pylab = 'disabled' + +## If True, display controls to shut down the Jupyter server, such as menu items +# or buttons. +# Default: True +# c.ServerApp.quit_button = True + +## DEPRECATED. Use ZMQChannelsWebsocketConnection.rate_limit_window +# Default: 0.0 +# c.ServerApp.rate_limit_window = 0.0 + +## Reraise exceptions encountered loading server extensions? +# Default: False +# c.ServerApp.reraise_server_extension_failures = False + +## The directory to use for notebooks and kernels. +# Default: '' +# c.ServerApp.root_dir = '' + +## The session manager class to use. +# Default: 'builtins.object' +# c.ServerApp.session_manager_class = 'builtins.object' + +## Instead of starting the Application, dump configuration to stdout +# See also: Application.show_config +# c.ServerApp.show_config = False + +## Instead of starting the Application, dump configuration to stdout (as JSON) +# See also: Application.show_config_json +# c.ServerApp.show_config_json = False + +## Shut down the server after N seconds with no kernelsrunning and no activity. +# This can be used together with culling idle kernels +# (MappingKernelManager.cull_idle_timeout) to shutdown the Jupyter server when +# it's not in use. This is not precisely timed: it may shut down up to a minute +# later. 0 (the default) disables this automatic shutdown. +# Default: 0 +# c.ServerApp.shutdown_no_activity_timeout = 0 + +## The UNIX socket the Jupyter server will listen on. +# Default: '' +# c.ServerApp.sock = '' + +## The permissions mode for UNIX socket creation (default: 0600). +# Default: '0600' +# c.ServerApp.sock_mode = '0600' + +## Supply SSL options for the tornado HTTPServer. +# See the tornado docs for details. +# Default: {} +# c.ServerApp.ssl_options = {} + +## Paths to set up static files as immutable. +# +# This allow setting up the cache control of static files as immutable. It +# should be used for static file named with a hash for instance. +# Default: [] +# c.ServerApp.static_immutable_cache = [] + +## Supply overrides for terminado. Currently only supports "shell_command". +# Default: {} +# c.ServerApp.terminado_settings = {} + +## Set to False to disable terminals. +# +# This does *not* make the server more secure by itself. +# Anything the user can in a terminal, they can also do in a notebook. +# +# Terminals may also be automatically disabled if the terminado package +# is not available. +# Default: False +# c.ServerApp.terminals_enabled = False + +## DEPRECATED. Use IdentityProvider.token +# Default: '' +# c.ServerApp.token = '' + +## Supply overrides for the tornado.web.Application that the Jupyter server uses. +# Default: {} +# c.ServerApp.tornado_settings = {} + +## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- +# For headerssent by the upstream reverse proxy. Necessary if the proxy handles +# SSL +# Default: False +# c.ServerApp.trust_xheaders = False + +## Disable launching browser by redirect file +# For versions of notebook > 5.7.2, a security feature measure was added that +# prevented the authentication token used to launch the browser from being visible. +# This feature makes it difficult for other users on a multi-user system from +# running code in your Jupyter session as you. +# However, some environments (like Windows Subsystem for Linux (WSL) and Chromebooks), +# launching a browser using a redirect file can lead the browser failing to load. +# This is because of the difference in file structures/paths between the runtime and +# the browser. +# +# Disabling this setting to False will disable this behavior, allowing the browser +# to launch by using a URL and visible token (as before). +# Default: True +# c.ServerApp.use_redirect_file = True + +## Specify where to open the server on startup. This is the +# `new` argument passed to the standard library method `webbrowser.open`. +# The behaviour is not guaranteed, but depends on browser support. Valid +# values are: +# +# - 2 opens a new tab, +# - 1 opens a new window, +# - 0 opens in an existing window. +# +# See the `webbrowser.open` documentation for details. +# Default: 2 +# c.ServerApp.webbrowser_open_new = 2 + +## Set the tornado compression options for websocket connections. +# +# This value will be returned from +# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable +# compression. A dict (even an empty one) will enable compression. +# +# See the tornado docs for WebSocketHandler.get_compression_options for details. +# Default: None +# c.ServerApp.websocket_compression_options = None + +## Configure the websocket ping interval in seconds. +# +# Websockets are long-lived connections that are used by some Jupyter Server +# extensions. +# +# Periodic pings help to detect disconnected clients and keep the connection +# active. If this is set to None, then no pings will be performed. +# +# When a ping is sent, the client has ``websocket_ping_timeout`` seconds to +# respond. If no response is received within this period, the connection will be +# closed from the server side. +# Default: 0 +# c.ServerApp.websocket_ping_interval = 0 + +## Configure the websocket ping timeout in seconds. +# +# See ``websocket_ping_interval`` for details. +# Default: 0 +# c.ServerApp.websocket_ping_timeout = 0 + +## The base URL for websockets, +# if it differs from the HTTP server (hint: it almost certainly doesn't). +# +# Should be in the form of an HTTP origin: ws[s]://hostname[:port] +# Default: '' +# c.ServerApp.websocket_url = '' + +#------------------------------------------------------------------------------ +# ConfigManager(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## Config Manager used for storing frontend config + +## Name of the config directory. +# Default: 'serverconfig' +# c.ConfigManager.config_dir_name = 'serverconfig' + +#------------------------------------------------------------------------------ +# AsyncFileManagerMixin(FileManagerMixin) configuration +#------------------------------------------------------------------------------ +## Mixin for ContentsAPI classes that interact with the filesystem +# asynchronously. + +## Hash algorithm to use for file content, support by hashlib +# See also: FileManagerMixin.hash_algorithm +# c.AsyncFileManagerMixin.hash_algorithm = 'sha256' + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. +# See also: FileManagerMixin.use_atomic_writing +# c.AsyncFileManagerMixin.use_atomic_writing = True + +#------------------------------------------------------------------------------ +# FileManagerMixin(LoggingConfigurable, Configurable) configuration +#------------------------------------------------------------------------------ +## Mixin for ContentsAPI classes that interact with the filesystem. +# +# Provides facilities for reading, writing, and copying files. +# +# Shared by FileContentsManager and FileCheckpoints. +# +# Note ---- Classes using this mixin must provide the following attributes: +# +# root_dir : unicode +# A directory against against which API-style paths are to be resolved. +# +# log : logging.Logger + +## Hash algorithm to use for file content, support by hashlib +# Choices: any of ['sm3', 'sha256', 'shake_128', 'blake2b', 'sha512', 'shake_256', 'sha3_512', 'md5-sha1', 'sha384', 'sha224', 'sha512_224', 'sha3_384', 'md5', 'sha1', 'blake2s', 'sha3_256', 'ripemd160', 'sha512_256', 'sha3_224'] +# Default: 'sha256' +# c.FileManagerMixin.hash_algorithm = 'sha256' + +## By default notebooks are saved on disk on a temporary file and then if successfully written, it replaces the old ones. +# This procedure, namely 'atomic_writing', causes some bugs on file system without operation order enforcement (like some networked fs). +# If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) +# Default: True +# c.FileManagerMixin.use_atomic_writing = True + +#------------------------------------------------------------------------------ +# AsyncFileContentsManager(FileContentsManager, AsyncFileManagerMixin, AsyncContentsManager) configuration +#------------------------------------------------------------------------------ +## An async file contents manager. + +## Allow access to hidden files +# See also: ContentsManager.allow_hidden +# c.AsyncFileContentsManager.allow_hidden = False + +## If True, deleting a non-empty directory will always be allowed. +# See also: FileContentsManager.always_delete_dir +# c.AsyncFileContentsManager.always_delete_dir = False + +# See also: AsyncContentsManager.checkpoints +# c.AsyncFileContentsManager.checkpoints = None + +# See also: AsyncContentsManager.checkpoints_class +# c.AsyncFileContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' + +# See also: AsyncContentsManager.checkpoints_kwargs +# c.AsyncFileContentsManager.checkpoints_kwargs = {} + +## If True (default), deleting files will send them to the +# See also: FileContentsManager.delete_to_trash +# c.AsyncFileContentsManager.delete_to_trash = True + +# See also: ContentsManager.event_logger +# c.AsyncFileContentsManager.event_logger = None + +## handler class to use when serving raw file requests. +# See also: ContentsManager.files_handler_class +# c.AsyncFileContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# See also: ContentsManager.files_handler_params +# c.AsyncFileContentsManager.files_handler_params = {} + +## Hash algorithm to use for file content, support by hashlib +# See also: FileManagerMixin.hash_algorithm +# c.AsyncFileContentsManager.hash_algorithm = 'sha256' + +## +# See also: ContentsManager.hide_globs +# c.AsyncFileContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] + +## The max folder size that can be copied +# See also: FileContentsManager.max_copy_folder_size_mb +# c.AsyncFileContentsManager.max_copy_folder_size_mb = 500 + +## Python callable or importstring thereof +# See also: ContentsManager.post_save_hook +# c.AsyncFileContentsManager.post_save_hook = None + +## Python callable or importstring thereof +# See also: ContentsManager.pre_save_hook +# c.AsyncFileContentsManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# See also: ContentsManager.preferred_dir +# c.AsyncFileContentsManager.preferred_dir = '' + +# See also: FileContentsManager.root_dir +# c.AsyncFileContentsManager.root_dir = '' + +## The base name used when creating untitled directories. +# See also: ContentsManager.untitled_directory +# c.AsyncFileContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# See also: ContentsManager.untitled_file +# c.AsyncFileContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# See also: ContentsManager.untitled_notebook +# c.AsyncFileContentsManager.untitled_notebook = 'Untitled' + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. +# See also: FileManagerMixin.use_atomic_writing +# c.AsyncFileContentsManager.use_atomic_writing = True + +#------------------------------------------------------------------------------ +# FileContentsManager(FileManagerMixin, ContentsManager) configuration +#------------------------------------------------------------------------------ +## A file contents manager. + +## Allow access to hidden files +# See also: ContentsManager.allow_hidden +# c.FileContentsManager.allow_hidden = False + +## If True, deleting a non-empty directory will always be allowed. +# WARNING this may result in files being permanently removed; e.g. on Windows, +# if the data size is too big for the trash/recycle bin the directory will be permanently +# deleted. If False (default), the non-empty directory will be sent to the trash only +# if safe. And if ``delete_to_trash`` is True, the directory won't be deleted. +# Default: False +# c.FileContentsManager.always_delete_dir = False + +# See also: ContentsManager.checkpoints +# c.FileContentsManager.checkpoints = None + +# See also: ContentsManager.checkpoints_class +# c.FileContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.Checkpoints' + +# See also: ContentsManager.checkpoints_kwargs +# c.FileContentsManager.checkpoints_kwargs = {} + +## If True (default), deleting files will send them to the +# platform's trash/recycle bin, where they can be recovered. If False, +# deleting files really deletes them. +# Default: True +# c.FileContentsManager.delete_to_trash = True + +# See also: ContentsManager.event_logger +# c.FileContentsManager.event_logger = None + +## handler class to use when serving raw file requests. +# See also: ContentsManager.files_handler_class +# c.FileContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# See also: ContentsManager.files_handler_params +# c.FileContentsManager.files_handler_params = {} + +## Hash algorithm to use for file content, support by hashlib +# See also: FileManagerMixin.hash_algorithm +# c.FileContentsManager.hash_algorithm = 'sha256' + +## +# See also: ContentsManager.hide_globs +# c.FileContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] + +## The max folder size that can be copied +# Default: 500 +# c.FileContentsManager.max_copy_folder_size_mb = 500 + +## Python callable or importstring thereof +# See also: ContentsManager.post_save_hook +# c.FileContentsManager.post_save_hook = None + +## Python callable or importstring thereof +# See also: ContentsManager.pre_save_hook +# c.FileContentsManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# See also: ContentsManager.preferred_dir +# c.FileContentsManager.preferred_dir = '' + +# Default: '' +# c.FileContentsManager.root_dir = '' + +## The base name used when creating untitled directories. +# See also: ContentsManager.untitled_directory +# c.FileContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# See also: ContentsManager.untitled_file +# c.FileContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# See also: ContentsManager.untitled_notebook +# c.FileContentsManager.untitled_notebook = 'Untitled' + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. +# See also: FileManagerMixin.use_atomic_writing +# c.FileContentsManager.use_atomic_writing = True + +#------------------------------------------------------------------------------ +# AsyncLargeFileManager(AsyncFileContentsManager) configuration +#------------------------------------------------------------------------------ +## Handle large file upload asynchronously + +## Allow access to hidden files +# See also: ContentsManager.allow_hidden +# c.AsyncLargeFileManager.allow_hidden = False + +## If True, deleting a non-empty directory will always be allowed. +# See also: FileContentsManager.always_delete_dir +# c.AsyncLargeFileManager.always_delete_dir = False + +# See also: AsyncContentsManager.checkpoints +# c.AsyncLargeFileManager.checkpoints = None + +# See also: AsyncContentsManager.checkpoints_class +# c.AsyncLargeFileManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' + +# See also: AsyncContentsManager.checkpoints_kwargs +# c.AsyncLargeFileManager.checkpoints_kwargs = {} + +## If True (default), deleting files will send them to the +# See also: FileContentsManager.delete_to_trash +# c.AsyncLargeFileManager.delete_to_trash = True + +# See also: ContentsManager.event_logger +# c.AsyncLargeFileManager.event_logger = None + +## handler class to use when serving raw file requests. +# See also: ContentsManager.files_handler_class +# c.AsyncLargeFileManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# See also: ContentsManager.files_handler_params +# c.AsyncLargeFileManager.files_handler_params = {} + +## Hash algorithm to use for file content, support by hashlib +# See also: FileManagerMixin.hash_algorithm +# c.AsyncLargeFileManager.hash_algorithm = 'sha256' + +## +# See also: ContentsManager.hide_globs +# c.AsyncLargeFileManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] + +## The max folder size that can be copied +# See also: FileContentsManager.max_copy_folder_size_mb +# c.AsyncLargeFileManager.max_copy_folder_size_mb = 500 + +## Python callable or importstring thereof +# See also: ContentsManager.post_save_hook +# c.AsyncLargeFileManager.post_save_hook = None + +## Python callable or importstring thereof +# See also: ContentsManager.pre_save_hook +# c.AsyncLargeFileManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# See also: ContentsManager.preferred_dir +# c.AsyncLargeFileManager.preferred_dir = '' + +# See also: FileContentsManager.root_dir +# c.AsyncLargeFileManager.root_dir = '' + +## The base name used when creating untitled directories. +# See also: ContentsManager.untitled_directory +# c.AsyncLargeFileManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# See also: ContentsManager.untitled_file +# c.AsyncLargeFileManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# See also: ContentsManager.untitled_notebook +# c.AsyncLargeFileManager.untitled_notebook = 'Untitled' + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. +# See also: FileManagerMixin.use_atomic_writing +# c.AsyncLargeFileManager.use_atomic_writing = True + +#------------------------------------------------------------------------------ +# AsyncContentsManager(ContentsManager) configuration +#------------------------------------------------------------------------------ +## Base class for serving files and directories asynchronously. + +## Allow access to hidden files +# See also: ContentsManager.allow_hidden +# c.AsyncContentsManager.allow_hidden = False + +# Default: None +# c.AsyncContentsManager.checkpoints = None + +# Default: 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' +# c.AsyncContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' + +# Default: {} +# c.AsyncContentsManager.checkpoints_kwargs = {} + +# See also: ContentsManager.event_logger +# c.AsyncContentsManager.event_logger = None + +## handler class to use when serving raw file requests. +# See also: ContentsManager.files_handler_class +# c.AsyncContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# See also: ContentsManager.files_handler_params +# c.AsyncContentsManager.files_handler_params = {} + +## +# See also: ContentsManager.hide_globs +# c.AsyncContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] + +## Python callable or importstring thereof +# See also: ContentsManager.post_save_hook +# c.AsyncContentsManager.post_save_hook = None + +## Python callable or importstring thereof +# See also: ContentsManager.pre_save_hook +# c.AsyncContentsManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# See also: ContentsManager.preferred_dir +# c.AsyncContentsManager.preferred_dir = '' + +# See also: ContentsManager.root_dir +# c.AsyncContentsManager.root_dir = '/' + +## The base name used when creating untitled directories. +# See also: ContentsManager.untitled_directory +# c.AsyncContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# See also: ContentsManager.untitled_file +# c.AsyncContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# See also: ContentsManager.untitled_notebook +# c.AsyncContentsManager.untitled_notebook = 'Untitled' + +#------------------------------------------------------------------------------ +# ContentsManager(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## Base class for serving files and directories. +# +# This serves any text or binary file, as well as directories, with special +# handling for JSON notebook documents. +# +# Most APIs take a path argument, which is always an API-style unicode path, and +# always refers to a directory. +# +# - unicode, not url-escaped +# - '/'-separated +# - leading and trailing '/' will be stripped +# - if unspecified, path defaults to '', +# indicating the root path. + +## Allow access to hidden files +# Default: False +# c.ContentsManager.allow_hidden = False + +# Default: None +# c.ContentsManager.checkpoints = None + +# Default: 'jupyter_server.services.contents.checkpoints.Checkpoints' +# c.ContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.Checkpoints' + +# Default: {} +# c.ContentsManager.checkpoints_kwargs = {} + +# Default: None +# c.ContentsManager.event_logger = None + +## handler class to use when serving raw file requests. +# +# Default is a fallback that talks to the ContentsManager API, +# which may be inefficient, especially for large files. +# +# Local files-based ContentsManagers can use a StaticFileHandler subclass, +# which will be much more efficient. +# +# Access to these files should be Authenticated. +# Default: 'jupyter_server.files.handlers.FilesHandler' +# c.ContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# +# For example, StaticFileHandlers generally expect a `path` argument +# specifying the root directory from which to serve files. +# Default: {} +# c.ContentsManager.files_handler_params = {} + +## Glob patterns to hide in file and directory listings. +# Default: ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] +# c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] + +## Python callable or importstring thereof +# +# to be called on the path of a file just saved. +# +# This can be used to process the file on disk, +# such as converting the notebook to a script or HTML via nbconvert. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(os_path=os_path, model=model, contents_manager=instance) +# +# - path: the filesystem path to the file just written +# - model: the model representing the file +# - contents_manager: this ContentsManager instance +# Default: None +# c.ContentsManager.post_save_hook = None + +## Python callable or importstring thereof +# +# To be called on a contents model prior to save. +# +# This can be used to process the structure, +# such as removing notebook outputs or other side effects that +# should not be saved. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(path=path, model=model, contents_manager=self) +# +# - model: the model to be saved. Includes file contents. +# Modifying this dict will affect the file that is stored. +# - path: the API path of the save destination +# - contents_manager: this ContentsManager instance +# Default: None +# c.ContentsManager.pre_save_hook = None + +## Preferred starting directory to use for notebooks. This is an API path (`/` +# separated, relative to root dir) +# Default: '' +# c.ContentsManager.preferred_dir = '' + +# Default: '/' +# c.ContentsManager.root_dir = '/' + +## The base name used when creating untitled directories. +# Default: 'Untitled Folder' +# c.ContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# Default: 'untitled' +# c.ContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# Default: 'Untitled' +# c.ContentsManager.untitled_notebook = 'Untitled' + +#------------------------------------------------------------------------------ +# BaseKernelWebsocketConnection(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## A configurable base class for connecting Kernel WebSockets to ZMQ sockets. + +## Preferred kernel message protocol over websocket to use (default: None). If an +# empty string is passed, select the legacy protocol. If None, the selected +# protocol will depend on what the front-end supports (usually the most recent +# protocol supported by the back-end and the front-end). +# Default: None +# c.BaseKernelWebsocketConnection.kernel_ws_protocol = None + +# Default: None +# c.BaseKernelWebsocketConnection.session = None + +#------------------------------------------------------------------------------ +# ZMQChannelsWebsocketConnection(BaseKernelWebsocketConnection) configuration +#------------------------------------------------------------------------------ +## A Jupyter Server Websocket Connection + +## (bytes/sec) +# Maximum rate at which stream output can be sent on iopub before they are +# limited. +# Default: 1000000 +# c.ZMQChannelsWebsocketConnection.iopub_data_rate_limit = 1000000 + +## (msgs/sec) +# Maximum rate at which messages can be sent on iopub before they are +# limited. +# Default: 1000 +# c.ZMQChannelsWebsocketConnection.iopub_msg_rate_limit = 1000 + +## Preferred kernel message protocol over websocket to use (default: None). If an +# empty string is passed, select the legacy protocol. If None, the selected +# protocol will depend on what the front-end supports (usually the most recent +# protocol supported by the back-end and the front-end). +# See also: BaseKernelWebsocketConnection.kernel_ws_protocol +# c.ZMQChannelsWebsocketConnection.kernel_ws_protocol = None + +## Whether to limit the rate of IOPub messages (default: True). If True, use +# iopub_msg_rate_limit, iopub_data_rate_limit and/or rate_limit_window to tune +# the rate. +# Default: True +# c.ZMQChannelsWebsocketConnection.limit_rate = True + +## (sec) Time window used to +# check the message and data rate limits. +# Default: 3 +# c.ZMQChannelsWebsocketConnection.rate_limit_window = 3 + +# See also: BaseKernelWebsocketConnection.session +# c.ZMQChannelsWebsocketConnection.session = None + +#------------------------------------------------------------------------------ +# AsyncMappingKernelManager(MappingKernelManager, AsyncMultiKernelManager) configuration +#------------------------------------------------------------------------------ +## An asynchronous mapping kernel manager. + +## Whether to send tracebacks to clients on exceptions. +# See also: MappingKernelManager.allow_tracebacks +# c.AsyncMappingKernelManager.allow_tracebacks = True + +## White list of allowed kernel message types. +# See also: MappingKernelManager.allowed_message_types +# c.AsyncMappingKernelManager.allowed_message_types = [] + +## Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. +# See also: MappingKernelManager.buffer_offline_messages +# c.AsyncMappingKernelManager.buffer_offline_messages = True + +## Whether to consider culling kernels which are busy. +# See also: MappingKernelManager.cull_busy +# c.AsyncMappingKernelManager.cull_busy = False + +## Whether to consider culling kernels which have one or more connections. +# See also: MappingKernelManager.cull_connected +# c.AsyncMappingKernelManager.cull_connected = False + +## Timeout (in seconds) after which a kernel is considered idle and ready to be +# culled. +# See also: MappingKernelManager.cull_idle_timeout +# c.AsyncMappingKernelManager.cull_idle_timeout = 0 + +## The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +# See also: MappingKernelManager.cull_interval +# c.AsyncMappingKernelManager.cull_interval = 300 + +## The name of the default kernel to start +# See also: MultiKernelManager.default_kernel_name +# c.AsyncMappingKernelManager.default_kernel_name = 'python3' + +## Timeout for giving up on a kernel (in seconds). +# See also: MappingKernelManager.kernel_info_timeout +# c.AsyncMappingKernelManager.kernel_info_timeout = 60 + +## The kernel manager class. This is configurable to allow +# See also: AsyncMultiKernelManager.kernel_manager_class +# c.AsyncMappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' + +# See also: MappingKernelManager.root_dir +# c.AsyncMappingKernelManager.root_dir = '' + +## Share a single zmq.Context to talk to all my kernels +# See also: MultiKernelManager.shared_context +# c.AsyncMappingKernelManager.shared_context = True + +## Message to print when allow_tracebacks is False, and an exception occurs +# See also: MappingKernelManager.traceback_replacement_message +# c.AsyncMappingKernelManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' + +## List of kernel message types excluded from user activity tracking. +# See also: MappingKernelManager.untracked_message_types +# c.AsyncMappingKernelManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] + +## Whether to make kernels available before the process has started. The +# See also: AsyncMultiKernelManager.use_pending_kernels +# c.AsyncMappingKernelManager.use_pending_kernels = False + +#------------------------------------------------------------------------------ +# MappingKernelManager(MultiKernelManager) configuration +#------------------------------------------------------------------------------ +## A KernelManager that handles - File mapping - HTTP error handling - Kernel +# message filtering + +## Whether to send tracebacks to clients on exceptions. +# Default: True +# c.MappingKernelManager.allow_tracebacks = True + +## White list of allowed kernel message types. +# When the list is empty, all message types are allowed. +# Default: [] +# c.MappingKernelManager.allowed_message_types = [] + +## Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. +# +# When True (default), messages are buffered and replayed on reconnect, +# avoiding lost messages due to interrupted connectivity. +# +# Disable if long-running kernels will produce too much output while +# no frontends are connected. +# Default: True +# c.MappingKernelManager.buffer_offline_messages = True + +## Whether to consider culling kernels which are busy. +# Only effective if cull_idle_timeout > 0. +# Default: False +# c.MappingKernelManager.cull_busy = False + +## Whether to consider culling kernels which have one or more connections. +# Only effective if cull_idle_timeout > 0. +# Default: False +# c.MappingKernelManager.cull_connected = False + +## Timeout (in seconds) after which a kernel is considered idle and ready to be culled. +# Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled +# for users with poor network connections. +# Default: 0 +# c.MappingKernelManager.cull_idle_timeout = 0 + +## The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +# Default: 300 +# c.MappingKernelManager.cull_interval = 300 + +## The name of the default kernel to start +# See also: MultiKernelManager.default_kernel_name +# c.MappingKernelManager.default_kernel_name = 'python3' + +## Timeout for giving up on a kernel (in seconds). +# +# On starting and restarting kernels, we check whether the +# kernel is running and responsive by sending kernel_info_requests. +# This sets the timeout in seconds for how long the kernel can take +# before being presumed dead. +# This affects the MappingKernelManager (which handles kernel restarts) +# and the ZMQChannelsHandler (which handles the startup). +# Default: 60 +# c.MappingKernelManager.kernel_info_timeout = 60 + +## The kernel manager class. This is configurable to allow +# See also: MultiKernelManager.kernel_manager_class +# c.MappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' + +# Default: '' +# c.MappingKernelManager.root_dir = '' + +## Share a single zmq.Context to talk to all my kernels +# See also: MultiKernelManager.shared_context +# c.MappingKernelManager.shared_context = True + +## Message to print when allow_tracebacks is False, and an exception occurs +# Default: 'An exception occurred at runtime, which is not shown due to security reasons.' +# c.MappingKernelManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' + +## List of kernel message types excluded from user activity tracking. +# +# This should be a superset of the message types sent on any channel other +# than the shell channel. +# Default: ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] +# c.MappingKernelManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] + +#------------------------------------------------------------------------------ +# SessionManager(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## A session manager. + +## The filesystem path to SQLite Database file (e.g. +# /path/to/session_database.db). By default, the session database is stored in- +# memory (i.e. `:memory:` setting from sqlite3) and does not persist when the +# current Jupyter Server shuts down. +# Default: ':memory:' +# c.SessionManager.database_filepath = ':memory:' + +#------------------------------------------------------------------------------ +# NotebookNotary(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## A class for computing and verifying notebook signatures. + +## The hashing algorithm used to sign notebooks. +# Choices: any of ['sha3_512', 'sha1', 'blake2b', 'sha256', 'sha3_384', 'sha512', 'blake2s', 'sha3_256', 'md5', 'sha3_224', 'sha384', 'sha224'] +# Default: 'sha256' +# c.NotebookNotary.algorithm = 'sha256' + +## The storage directory for notary secret and database. +# Default: '' +# c.NotebookNotary.data_dir = '' + +## The sqlite file in which to store notebook signatures. +# By default, this will be in your Jupyter data directory. +# You can set it to ':memory:' to disable sqlite writing to the filesystem. +# Default: '' +# c.NotebookNotary.db_file = '' + +## The secret key with which notebooks are signed. +# Default: b'' +# c.NotebookNotary.secret = b'' + +## The file where the secret key is stored. +# Default: '' +# c.NotebookNotary.secret_file = '' + +## A callable returning the storage backend for notebook signatures. +# The default uses an SQLite database. +# Default: traitlets.Undefined +# c.NotebookNotary.store_factory = traitlets.Undefined + +#------------------------------------------------------------------------------ +# Application(SingletonConfigurable) configuration +#------------------------------------------------------------------------------ +## This is an application. + +## The date format used by logging formatters for %(asctime)s +# Default: '%Y-%m-%d %H:%M:%S' +# c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' + +## The Logging format template +# Default: '[%(name)s]%(highlevel)s %(message)s' +# c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' + +## Set the log level by value or name. +# Choices: any of [0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL'] +# Default: 30 +# c.Application.log_level = 30 + +## Configure additional log handlers. +# +# The default stderr logs handler is configured by the log_level, log_datefmt +# and log_format settings. +# +# This configuration can be used to configure additional handlers (e.g. to +# output the log to a file) or for finer control over the default handlers. +# +# If provided this should be a logging configuration dictionary, for more +# information see: +# https://docs.python.org/3/library/logging.config.html#logging-config- +# dictschema +# +# This dictionary is merged with the base logging configuration which defines +# the following: +# +# * A logging formatter intended for interactive use called +# ``console``. +# * A logging handler that writes to stderr called +# ``console`` which uses the formatter ``console``. +# * A logger with the name of this application set to ``DEBUG`` +# level. +# +# This example adds a new handler that writes to a file: +# +# .. code-block:: python +# +# c.Application.logging_config = { +# "handlers": { +# "file": { +# "class": "logging.FileHandler", +# "level": "DEBUG", +# "filename": "", +# } +# }, +# "loggers": { +# "": { +# "level": "DEBUG", +# # NOTE: if you don't list the default "console" +# # handler here then it will be disabled +# "handlers": ["console", "file"], +# }, +# }, +# } +# Default: {} +# c.Application.logging_config = {} + +## Instead of starting the Application, dump configuration to stdout +# Default: False +# c.Application.show_config = False + +## Instead of starting the Application, dump configuration to stdout (as JSON) +# Default: False +# c.Application.show_config_json = False diff --git a/beaker_kernel/builder/beaker.py b/beaker_kernel/builder/beaker.py index 15f7adc2..8f756dd4 100644 --- a/beaker_kernel/builder/beaker.py +++ b/beaker_kernel/builder/beaker.py @@ -147,59 +147,58 @@ def initialize(self, version, build_data): from beaker_kernel.lib.integrations.base import BaseIntegrationProvider from beaker_kernel.lib.context import BeakerContext from beaker_kernel.lib.subkernel import BeakerSubkernel + from beaker_kernel.lib.extension import BeakerExtension, BeakerCLICommands dest = os.path.join(self.root, "build", "data_share_beaker") - search_paths = self.build_config.packages or [] + + # MappingType = typing.Literal["contexts", "subkernels", "apps", "commands"] + type_map = { + "contexts": BeakerContext, + "subkernels": BeakerSubkernel, + "apps": BeakerApp, + "integrations": BaseIntegrationProvider, + "commands": BeakerCLICommands, + "extensions": BeakerExtension, + } + maps = {} self.add_packages_to_path() - context_class_defs = self.find_slugged_subclasses_of(BeakerContext) - subkernel_class_defs = self.find_slugged_subclasses_of(BeakerSubkernel) - app_class_defs = self.find_slugged_subclasses_of(BeakerApp) - integration_provider_class_defs = self.find_slugged_subclasses_of(BaseIntegrationProvider) - integration_provider_classes = [class_def.cls for class_def in integration_provider_class_defs.values()] + for type, cls in type_map.items(): + maps[type] = self.find_slugged_subclasses_of(cls) + integration_provider_classes = [class_def.cls for class_def in maps["integrations"].values()] integration_data = self.find_integration_data_files(integration_provider_classes) self.remove_packages_from_path() - if context_class_defs: - print( "Found the following contexts:") - for slug, class_def in context_class_defs.items(): - print(f" '{slug}': {class_def.class_name} in package {class_def.mod_str}") - print() - if subkernel_class_defs: - print( "Found the following subkernels:") - for slug, class_def in subkernel_class_defs.items(): - print(f" '{slug}': {class_def.class_name} in package {class_def.mod_str}") + for type, classes in maps.items(): + if not classes: + continue + entry_point_name = f"beaker.{type}" + if entry_point_name in self.metadata.core.entry_points: + entry_point_map = self.metadata.core.entry_points[entry_point_name] + else: + entry_point_map = {} + self.metadata.core.entry_points[entry_point_name] = entry_point_map + print(f"Found the following {type}:") + for slug, (pkg, cls, _) in classes.items(): + target = f"{pkg}:{cls}" + if slug not in entry_point_map: + print(f" '{slug}': {target}") + entry_point_map[slug] = target + else: + print(f" Skipping '{slug}' ({target}) because it has already been defined as '{entry_point_map[slug]}") print() - if app_class_defs: - print("Found app: ") - for slug, class_def in app_class_defs.items(): - print(f" '{slug}': {class_def.class_name} in package {class_def.mod_str}") - if integration_provider_class_defs: - print("Found integration providers: ") - for slug, class_def in integration_provider_class_defs.items(): - print(f" '{slug}': {class_def.class_name} in package {class_def.mod_str}") + if integration_data: print("Found integration data: ") for dest_path, data_path in integration_data.items(): print(f" '{dest_path}': {data_path}") + print() # Recreate the destination directory, clearing any existing build artifacts if os.path.exists(dest): shutil.rmtree(dest) os.makedirs(dest) - # Write out mappings for each context and subkernel to an individual json file - for typename, src in [("contexts", context_class_defs), ("subkernels", subkernel_class_defs), ("apps", app_class_defs), ("integrations", integration_provider_class_defs)]: - dest_dir = os.path.join(dest, typename) - os.makedirs(dest_dir, exist_ok=True) - # for slug, (package_name, class_name) in src.items(): - for slug, class_def in src.items(): - dest_file = os.path.join(dest_dir, f"{slug}.json") - with open(dest_file, "w") as f: - json.dump({"slug": slug, "package": class_def.mod_str, "class_name": class_def.class_name}, f, indent=2) - # Add shared-data mappings for each file so it is installed to the correct location - self.build_config.shared_data[dest_file] = f"share/beaker/{typename}/{slug}.json" - # Copy data files to proper location in build directory and update configuration for integration_data_path, integration_data_source in integration_data.items(): integration_data_dest = os.path.join(dest, "data", integration_data_path) diff --git a/beaker_kernel/cli/config.py b/beaker_kernel/cli/config.py index eb90ce89..cdbd638e 100644 --- a/beaker_kernel/cli/config.py +++ b/beaker_kernel/cli/config.py @@ -1,3 +1,5 @@ +import importlib +import inspect import os import toml from collections import deque @@ -8,7 +10,7 @@ import click from beaker_kernel.lib.config import locate_config, config, ConfigClass, Table, recursiveOptionalUpdate, Choice -from beaker_kernel.lib.autodiscovery import LIB_LOCATIONS +from beaker_kernel.lib.utils import import_dotted_class SENSITIVE_STR_REPR = "*" * 8 diff --git a/beaker_kernel/cli/main.py b/beaker_kernel/cli/main.py index 31101c4b..acc96bf0 100644 --- a/beaker_kernel/cli/main.py +++ b/beaker_kernel/cli/main.py @@ -1,7 +1,7 @@ import click import importlib -from beaker_kernel.lib.autodiscovery import find_mappings +from beaker_kernel.lib.autodiscovery import find_mappings, autodiscover class BeakerCli(click.Group): @@ -14,36 +14,16 @@ def __init__(self, *args, **kwargs) -> None: self.subcommands = {} self.apps = {} - for _, command_info in find_mappings("commands"): - group_name = command_info["group_name"] - module = command_info["module"] - entry_point = command_info.get("entry_point", "cli_commands") - try: - module = importlib.import_module(module) - except ImportError: - click.echo(f"Unable to load item {entry_point} from module {module}. Skipping...", err=True) - continue - entry = getattr(module, entry_point, None) - if not entry: - click.echo(f"Unable to load item {entry_point} from module {module}. Skipping...", err=True) - continue - if not isinstance(entry, (click.Command, click.Group)): - click.echo(f"Entry point {entry_point} in module {module} is not a click Group or Command class. Skipping...", err=True) - self.subcommands[group_name] = entry + # Register commands from extensions + for group_name, entry in autodiscover("commands").items(): + group = entry.as_group() + self.subcommands[group_name] = group - for _, app_info in find_mappings("apps"): - app_name = app_info["slug"] - package = app_info["package"] - class_name = app_info["class_name"] - app_import_str = f"{package}.{class_name}" - try: - module = importlib.import_module(package) - cls = getattr(module, class_name, None) - except ImportError: - cls = None - if cls is None: + # Register Beaker app commands + for app_name, entry in autodiscover("apps").items(): + if entry is None: continue - self.apps[app_name] = app_import_str + self.apps[app_name] = f"{entry.__module__}.{entry.__name__}" def list_commands(self, ctx): commands = super().list_commands(ctx) @@ -72,6 +52,7 @@ def cli(): from .project import project from .config import config_group from .running import dev, notebook +from .server import server from .context import context from .subkernel import subkernel from .app import app @@ -80,6 +61,7 @@ def cli(): cli.add_command(config_group) cli.add_command(context) cli.add_command(dev) +cli.add_command(server) cli.add_command(notebook) cli.add_command(subkernel) cli.add_command(app) diff --git a/beaker_kernel/cli/running.py b/beaker_kernel/cli/running.py index 1a04ebf0..b651003a 100644 --- a/beaker_kernel/cli/running.py +++ b/beaker_kernel/cli/running.py @@ -23,9 +23,14 @@ def notebook(ctx, extra_args, beakerapp_cls=None): Start Beaker in local mode and opens a notebook. """ from beaker_kernel.service.notebook import BeakerNotebookApp + from beaker_kernel.lib.config import config + from jupyter_core.utils import ensure_event_loop app = None + loop = ensure_event_loop() try: - app = BeakerNotebookApp.initialize_server(argv=extra_args) + app = BeakerNotebookApp.instance(**{"IdentityProvider.token": config.jupyter_token}) + app.initialize(argv=extra_args) + config.jupyter_server = app.connection_url set_config_from_app(app) app.start() except (InterruptedError, KeyboardInterrupt, EOFError) as err: @@ -33,6 +38,7 @@ def notebook(ctx, extra_args, beakerapp_cls=None): finally: if app: app.stop() + loop.close() @click.group(name="dev", invoke_without_command=True) @@ -55,16 +61,20 @@ def dev(ctx: click.Context, no_open_notebook): @click.argument("extra_args", nargs=-1, type=click.UNPROCESSED) @click.option("--open-notebook", "-n", is_flag=True, default=False, type=bool, help="Open a notebook in a webbrowser.") def serve(open_notebook, extra_args): + from jupyter_core.utils import ensure_event_loop from beaker_kernel.service.dev import BeakerNotebookApp + loop = ensure_event_loop() try: - app = BeakerNotebookApp.initialize_server(argv=extra_args) + app = BeakerNotebookApp.instance() set_config_from_app(app) + app.initialize(argv=extra_args) if open_notebook: webbrowser.open(app.public_url) app.start() finally: app.stop() + loop.close() @dev.command(context_settings={"ignore_unknown_options": True, "allow_extra_args": True}) diff --git a/beaker_kernel/cli/server.py b/beaker_kernel/cli/server.py new file mode 100644 index 00000000..038f7b8d --- /dev/null +++ b/beaker_kernel/cli/server.py @@ -0,0 +1,210 @@ +import importlib +import inspect +import pkgutil +import subprocess +import sys +import tempfile +from pathlib import Path + +import click +import psutil + +from beaker_kernel import service +from beaker_kernel.service.base import BaseBeakerApp + + +TEMP_DIR = Path(tempfile.gettempdir()) + + +@click.group(name="server") +def server(): + """ + Options for finding, configuring, and running Beaker Servers + """ + pass + + +@server.command() +def list_types(): + """ + List all available Beaker server types. + """ + service_types = [] + + # Find all modules in beaker_kernel.service package + for finder, name, ispkg in pkgutil.iter_modules(service.__path__, service.__name__ + "."): + if not ispkg: # Only include modules, not subpackages + module_name = name.split('.')[-1] # Get just the module name + # Skip internal modules + if not module_name.startswith('_') and module_name not in ['handlers', 'admin_utils']: + service_types.append(module_name) + + click.echo("Available Beaker server types:") + for service_type in sorted(service_types): + click.echo(f" {service_type}") + + +@server.command(context_settings={"ignore_unknown_options": True, "allow_extra_args": True}) +@click.argument("server_type", type=click.STRING, default="server") +@click.option("--force", is_flag=True, help="Force start even if server is already running") +@click.option("--port", type=int, default=8888, help="Port to run server on") +@click.option("--daemon", "-d", is_flag=True, help="Run server in daemon mode") +@click.pass_context +def start(ctx, server_type, force, port, daemon): + """ + Start a Beaker Server instance + """ + # Check if server is already running + pidfile = TEMP_DIR / f"beaker_{server_type}_{port}.pid" + + if pidfile.exists() and not force: + try: + with open(pidfile) as f: + pid = int(f.read().strip()) + if psutil.pid_exists(pid): + proc = psutil.Process(pid) + if any('beaker_kernel.service' in ' '.join(cmd) for cmd in [proc.cmdline()]): + click.echo(f"Beaker {server_type} server is already running on port {port} (PID: {pid})") + click.echo("Use --force to start anyway") + return + except (ValueError, FileNotFoundError, psutil.NoSuchProcess): + # PID file exists but process is not running, remove stale pidfile + pidfile.unlink(missing_ok=True) + + # Build command + cmd = [sys.executable, "-m", f"beaker_kernel.service.{server_type}", "--port", str(port)] + + # Add any extra arguments from ctx.args + if ctx.args: + cmd.extend(ctx.args) + + click.echo(f"Starting Beaker {server_type} server on port {port}...") + + if daemon: + # Start in daemon mode + proc = subprocess.Popen(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, start_new_session=True) + + # Save PID to file + with open(pidfile, 'w') as f: + f.write(str(proc.pid)) + + click.echo(f"Server started in daemon mode (PID: {proc.pid})") + else: + # Start in foreground + try: + proc = subprocess.Popen(cmd) + # Save PID to file + with open(pidfile, 'w') as f: + f.write(str(proc.pid)) + + # Wait for process + proc.wait() + except KeyboardInterrupt: + click.echo("\nShutting down server...") + proc.terminate() + proc.wait() + finally: + # Clean up PID file + pidfile.unlink(missing_ok=True) + + +@server.command(context_settings={"ignore_unknown_options": True, "allow_extra_args": True}) +@click.argument("server_type", type=click.STRING, default="server") +@click.option("--port", type=int, default=8888, help="Port the server is running on") +@click.option("--all", "stop_all", is_flag=True, help="Stop all Beaker servers") +@click.pass_context +def stop(ctx, server_type, port, stop_all): + """ + Stop a Beaker Server instance + """ + if stop_all: + # Stop all beaker servers + pidfiles = list(TEMP_DIR.glob("beaker_*.pid")) + stopped_count = 0 + + for pidfile in pidfiles: + try: + with open(pidfile) as f: + pid = int(f.read().strip()) + + if psutil.pid_exists(pid): + proc = psutil.Process(pid) + if any('beaker_kernel.service' in ' '.join(cmd) for cmd in [proc.cmdline()]): + click.echo(f"Stopping Beaker server (PID: {pid})...") + proc.terminate() + try: + proc.wait(timeout=10) + except psutil.TimeoutExpired: + proc.kill() + stopped_count += 1 + + pidfile.unlink(missing_ok=True) + except (ValueError, FileNotFoundError, psutil.NoSuchProcess) as e: + pidfile.unlink(missing_ok=True) + + if stopped_count > 0: + click.echo(f"Stopped {stopped_count} Beaker server(s)") + else: + click.echo("No running Beaker servers found") + return + + # Stop specific server + pidfile = TEMP_DIR / f"beaker_{server_type}_{port}.pid" + + if not pidfile.exists(): + click.echo(f"No Beaker {server_type} server running on port {port}") + return + + try: + with open(pidfile) as f: + pid = int(f.read().strip()) + + if psutil.pid_exists(pid): + proc = psutil.Process(pid) + if any('beaker_kernel.service' in ' '.join(cmd) for cmd in [proc.cmdline()]): + click.echo(f"Stopping Beaker {server_type} server (PID: {pid})...") + proc.terminate() + try: + proc.wait(timeout=10) + click.echo("Server stopped") + except psutil.TimeoutExpired: + click.echo("Server did not stop gracefully, killing...") + proc.kill() + click.echo("Server killed") + else: + click.echo(f"PID {pid} is not a Beaker server process") + else: + click.echo(f"Process {pid} is not running") + + pidfile.unlink(missing_ok=True) + except (ValueError, FileNotFoundError, psutil.NoSuchProcess) as e: + click.echo(f"Error stopping server: {e}") + pidfile.unlink(missing_ok=True) + + +@server.command() +@click.option("--file", "-f", "config_file", type=click.Path(exists=False)) +@click.argument("server_type", type=click.STRING, default="") +def generate_config(server_type=None, config_file=None): + """ + Generate a server configuration file with all available options. + """ + app_class: type[BaseBeakerApp] + if server_type: + app_mod_str: str = f"beaker_kernel.service.{server_type}" + app_module = importlib.import_module(app_mod_str) + app_classes = inspect.getmembers(app_module, lambda obj: isinstance(obj, type) and issubclass(obj, BaseBeakerApp) and obj != BaseBeakerApp) + if app_classes: + _, app_class = app_classes[0] + else: + raise LookupError("Unable to determine intended BeakerAppClass") + if not config_file: + config_file = f"beaker_{app_class._app_slug()}_config.py" + else: + app_class = BaseBeakerApp + if not config_file: + config_file = "beaker_config.py" + + app: BaseBeakerApp = app_class(config_file=config_file) + app.initialize(argv=[]) + app.write_default_config() diff --git a/beaker_kernel/kernel.py b/beaker_kernel/kernel.py index d448b106..10b53476 100644 --- a/beaker_kernel/kernel.py +++ b/beaker_kernel/kernel.py @@ -48,6 +48,8 @@ class BeakerKernel(KernelProxyManager): "file_extension": ".txt", } + session_config: dict[str, str] + beaker_session: str jupyter_server: Optional[str] kernel_id: Optional[str] connection_file: Optional[str] @@ -61,6 +63,8 @@ class BeakerKernel(KernelProxyManager): running_actions: dict[str, Awaitable] def __init__(self, session_config, kernel_id=None, connection_file=None): + self.session_config = session_config + self.beaker_session = session_config.get("beaker_session", None) self.jupyter_server = session_config.get("server", config.jupyter_server) self.kernel_id = kernel_id self.connection_file = connection_file @@ -71,7 +75,7 @@ def __init__(self, session_config, kernel_id=None, connection_file=None): self.subkernel_execution_tracking = {} self.running_actions = {} context_args = session_config.get("context", {}) - super().__init__(session_config, session_id=f"{kernel_id}_session") + super().__init__(session_config, session_id=(self.beaker_session or self.kernel_id)) self.register_magic_commands() self.add_base_intercepts() self.context = None @@ -149,6 +153,20 @@ def register_magic_commands(self): "shell", "execute_request", self.handle_magic_word ) + def api_auth(self) -> str: + import hashlib + import time + + preamble = "beaker-kernel" + nonce = str(int(time.time())) + kernel_id = self.kernel_id + key = self.session_config.get("key") + + hash_source = f"{kernel_id}{nonce}{key}".encode() + hash_value = hashlib.md5(hash_source).hexdigest() + + return f"{preamble}:{kernel_id}:{nonce}:{hash_value}" + async def handle_magic_word(self, server, target_stream, data): message = JupyterMessage.parse(data) cell_content: str = message.content.get("code", "").strip() @@ -317,7 +335,7 @@ async def update_connection_file(self, **kwargs): with open(self.connection_file, "w") as connection_file: json.dump(run_info, connection_file, indent=2) - async def set_context(self, context_name, context_info, language="python3", parent_header={}): + async def set_context(self, context_name, context_info, language="python3", subkernel=None, parent_header={}): context_cls = AVAILABLE_CONTEXTS.get(context_name, None) if not context_cls: @@ -336,14 +354,16 @@ async def set_context(self, context_name, context_info, language="python3", pare context_config = { "language": language, + "subkernel": subkernel, "context_info": context_info } self.context = context_cls(beaker_kernel=self, config=context_config) await self.context.setup(context_info=context_info, parent_header=parent_header) subkernel = self.context.subkernel kernel_setup_func = getattr(subkernel, "setup", None) - with execution_context(type="setup", name=context_name, parent_header=parent_header): - await ensure_async(kernel_setup_func()) + if kernel_setup_func is not None: + with execution_context(type="setup", name=context_name, parent_header=parent_header): + await ensure_async(kernel_setup_func()) await self.update_connection_file(context={"name": context_name, "config": context_info}) await self.send_preview(parent_header=parent_header) await self.send_kernel_state_info(parent_header=parent_header) @@ -659,6 +679,7 @@ async def context_setup_request(self, message): context_name = content.get("context") context_info = content.get("context_info", {}) language = content.get("language", "python3") + subkernel = content.get("subkernel", None) enable_debug = content.get("debug", None) verbose = content.get("verbose", None) @@ -672,7 +693,13 @@ async def context_setup_request(self, message): parent_header = copy.deepcopy(message.header) if content: - await self.set_context(context_name, context_info, language=language, parent_header=parent_header) + await self.set_context( + context_name, + context_info, + language=language, + subkernel=subkernel, + parent_header=parent_header + ) # Send context_response context_response_content = await self.context.get_info() diff --git a/beaker_kernel/lib/autodiscovery.py b/beaker_kernel/lib/autodiscovery.py index dc5ef918..dce7a4bd 100644 --- a/beaker_kernel/lib/autodiscovery.py +++ b/beaker_kernel/lib/autodiscovery.py @@ -1,12 +1,10 @@ -import importlib -import importlib.util import json import logging import os import sys import typing from collections.abc import Mapping -# from typing import Dict +from importlib.metadata import entry_points, EntryPoints, EntryPoint logger = logging.getLogger(__name__) @@ -87,48 +85,27 @@ def find_mappings(resource_type: ResourceType) -> typing.Generator[typing.Dict[s continue -class AutodiscoveryItems(Mapping[str, type|dict[str, str]]): - raw: dict[str, type|dict[str, str]] - mapping: dict[str, type|dict] +class AutodiscoveryItems(Mapping[str, type]): + raw: EntryPoints + mapping: dict[str, EntryPoint] - def __init__(self, *args, **kwargs): - self.mapping = {} - self.raw = dict(*args, **kwargs) + def __init__(self, entrypoints_instance: EntryPoints): + self.raw = entrypoints_instance + self.mapping = { + item.name: item for item in self.raw + } def __getitem__(self, key): - item = self.mapping.get(key, self.raw.get(key)) - if isinstance(item, (str, bytes, os.PathLike)) and os.path(path := os.fspath(item)) and path.endswith('.json'): - with open(path) as jsonfile: - item = json.load(jsonfile) - item["mapping_file"] = path - match item: - case type(): - return item - case {"slug": slug, "package": package, "class_name": class_name, **kw}: - mapping_file = kw.get("mapping_file", None) - try: - module = importlib.import_module(package) - except (ImportError, ModuleNotFoundError) as err: - # logger.warning(f"Warning: Beaker module '{package}' in file {mapping_file} is unable to be imported. See below.") - # logger.warning(f" {err.__class__}: {err.msg}") - raise - assert slug == key - discovered_class = getattr(module, class_name) - if mapping_file: - setattr(discovered_class, '_autodiscovery', { - "mapping_file": mapping_file, - **item - }) - self.mapping[key] = discovered_class - return discovered_class - case _: - raise ValueError(f"Unable to handle autodiscovery item '{item}' (type '{item.__class__}')") - - def __setitem__(self, key, value): - self.raw[key] = value - + try: + item: EntryPoint = self.mapping.get(key, None) + if item: + item = item.load() + except ImportError: + raise + + return item def __iter__(self): - yield from self.raw.__iter__() + yield from self.mapping.keys() def __len__(self): return len(self.raw) @@ -138,8 +115,6 @@ def autodiscover(mapping_type: ResourceType) -> typing.Dict[str, type]: """ Auto discovers installed classes of specified types. """ - items: AutodiscoveryItems = AutodiscoveryItems() - for mapping_file, data in find_mappings(mapping_type): - slug = data["slug"] - items[slug] = {"mapping_file": mapping_file, **data} - return items + group = f"beaker.{mapping_type}" + eps = entry_points(group=group) + return AutodiscoveryItems(eps) diff --git a/beaker_kernel/lib/context.py b/beaker_kernel/lib/context.py index 61673c84..df9781b2 100644 --- a/beaker_kernel/lib/context.py +++ b/beaker_kernel/lib/context.py @@ -240,25 +240,56 @@ async def auto_context(self): return content def get_subkernel(self): - config = beaker_config - language = self.config.get("language", "python3") - self.beaker_kernel.debug("new_kernel", f"Setting new kernel of `{language}`") - kernel_opts = { - subkernel.KERNEL_NAME: subkernel - for subkernel in autodiscover("subkernels").values() - } - subkernel_opts = { - subkernel.SLUG: subkernel - for subkernel in autodiscover("subkernels").values() + language = self.config.get("language", None) + subkernel_slug = self.config.get("subkernel", None) + + self.beaker_kernel.debug("new_kernel", f"Setting new kernel of `{subkernel_slug}`") + if not subkernel_slug and language: + kernel_opts = { + subkernel.KERNEL_NAME: subkernel + for subkernel in autodiscover("subkernels").values() + } + subkernel_opts = { + subkernel.SLUG: subkernel + for subkernel in autodiscover("subkernels").values() + } + if language not in kernel_opts and language in subkernel_opts: + language = subkernel_opts[language].KERNEL_NAME + subkernel_slug = language + + subkernels = autodiscover("subkernels") + subkernel_by_lang = { + sub.JUPYTER_LANGUAGE: sub for sub in subkernels.values() } - if language not in kernel_opts and language in subkernel_opts: - language = subkernel_opts[language].KERNEL_NAME - url = urllib.parse.urljoin(self.beaker_kernel.jupyter_server, "/api/kernels") + urlbase = self.beaker_kernel.jupyter_server + + kernelspec_req = requests.get( + urllib.parse.urljoin(urlbase, f"/api/kernelspecs/{subkernel_slug}"), + headers={ + "X-AUTH-BEAKER": self.beaker_kernel.api_auth() + }, + ) + if kernelspec_req.status_code == 400: + raise ValueError(f"Can't find kernelspec for {subkernel_slug}") + elif kernelspec_req.status_code >= 500: + raise RuntimeError(f"Error fetching kernelspec for {subkernel_slug}: {kernelspec_req.json()}") + + kernelspec = kernelspec_req.json() + kernel_lang = kernelspec.get('spec', {}).get("language", None) + subkernel_cls = kernel_lang and subkernel_by_lang.get(kernel_lang) + + # url = urllib.parse.urljoin(self.beaker_kernel.jupyter_server, "/api/kernels") + url = urllib.parse.urljoin(urlbase, "/api/kernels") + path = self.beaker_kernel.session_config.get("beaker_session", None) + if path is None: + path = self.beaker_kernel.session_config.get("jupyter_session", "") res = requests.post( url, - json={"name": language, "path": ""}, - headers={"Authorization": f"token {config.jupyter_token}"}, + json={"name": subkernel_slug, "path": path}, + headers={ + "X-AUTH-BEAKER": self.beaker_kernel.api_auth() + }, ) kernel_info = res.json() self.beaker_kernel.update_running_kernels() @@ -271,7 +302,9 @@ def get_subkernel(self): raise ValueError("Unknown kernel " + subkernel_id) if kernels[matching] == self.beaker_kernel.server.config: raise ValueError("Refusing loopback connection") - subkernel = kernel_opts[language](subkernel_id, kernels[matching], self) + + # subkernel = kernel_opts[language](subkernel_id, kernels[matching], self) + subkernel = subkernel_cls(subkernel_id, kernels[matching], self) self.beaker_kernel.server.set_proxy_target(subkernel.connected_kernel) return subkernel @@ -346,6 +379,7 @@ async def get_info(self) -> dict: payload = { "language": self.subkernel.DISPLAY_NAME, "subkernel": self.subkernel.KERNEL_NAME, + "subkernel_kernel": self.config.get("subkernel", None), "actions": action_details, "custom_messages": custom_messages, "procedures": list(self.templates.keys()), diff --git a/beaker_kernel/lib/extension.py b/beaker_kernel/lib/extension.py new file mode 100644 index 00000000..a8187658 --- /dev/null +++ b/beaker_kernel/lib/extension.py @@ -0,0 +1,53 @@ +import inspect +from typing import ClassVar + +import click + +class BeakerExtension: + slug: str + settings: "BeakerExtensionSettings" + keybindings: "BeakerExtensionKeybindings" + components: "BeakerExtensionComponents" + + def initialize(self, *args, **kwargs): + pass + + def activate(self, *args, **kwargs): + pass + + def deactivate(self): + pass + + +class BeakerExtensionSettings: + pass + + +class BeakerExtensionKeybindings: + pass + + +class BeakerExtensionComponents: + pass + + +class BeakerCLICommands(click.Group): + slug: ClassVar[str] + group_description: ClassVar[str] = """Commands from extension""" + + @classmethod + def as_group(cls): + + @click.group(cls=cls) + def commands(): + pass + commands.help = cls.group_description + + functions = inspect.getmembers( + cls, + lambda member: isinstance(member, click.Command) + ) + for name, command in functions: + commands.add_command(command, name=name) + + return commands diff --git a/beaker_kernel/service/api/integrations.py b/beaker_kernel/service/api/integrations.py index c1b58590..69f2a5c4 100644 --- a/beaker_kernel/service/api/integrations.py +++ b/beaker_kernel/service/api/integrations.py @@ -6,7 +6,6 @@ from queue import Empty from jupyter_server.base.handlers import JupyterHandler -from jupyter_server.extension.handler import ExtensionHandlerMixin from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager from jupyter_server.services.sessions.sessionmanager import SessionManager @@ -167,7 +166,7 @@ async def call_in_context( # Integration Handler -class IntegrationHandler(BeakerAPIMixin, ExtensionHandlerMixin, JupyterHandler): +class IntegrationHandler(BeakerAPIMixin, JupyterHandler): """ Handles fetching and adding integrations. """ @@ -225,7 +224,7 @@ async def post(self, session_id=None, integration_id=None): raise tornado.web.HTTPError(status_code=500, log_message=str(e)) -class IntegrationResourceHandler(BeakerAPIMixin, ExtensionHandlerMixin, JupyterHandler): +class IntegrationResourceHandler(BeakerAPIMixin, JupyterHandler): """ Handles fetching and adding resources belonging to an integration. """ diff --git a/beaker_kernel/service/auth.py b/beaker_kernel/service/auth.py deleted file mode 100644 index f340da4d..00000000 --- a/beaker_kernel/service/auth.py +++ /dev/null @@ -1,119 +0,0 @@ -from functools import lru_cache - -import base64 -import boto3 -import json -import requests -from traitlets import Unicode, Bool - -from jupyter_server.auth.authorizer import Authorizer, AllowAllAuthorizer -from jupyter_server.auth.identity import IdentityProvider, User - -try: - import jwt as pyjwt -except ImportError: - pyjwt = None - - -class CognitoHeadersIdentityProvider(IdentityProvider): - - cognito_jwt_header = Unicode( - default_value="X-Amzn-Oidc-Data", - config=True, - help="Header containing the cognito JWT encoded grants", - ) - - cognito_identity_header = Unicode( - default_value="X-Amzn-Oidc-Identity", - config=True, - help="Header containing the cognito user identity", - ) - - cognito_accesstoken_header = Unicode( - default_value="X-Amzn-Oidc-Accesstoken", - config=True, - help="Header containing the cognito active access token", - ) - - user_pool_id = Unicode( - default_value="", - config=True, - help="AWS Cognito User Pool ID", - ) - - verify_jwt_signature = Bool( - default_value=True, - config=True, - help="Whether the jwt signature from cognito should be verified", - ) - - - @lru_cache - def _get_elb_key(self, region: str, kid: str) -> str: - key_url = f"https://public-keys.auth.elb.{region}.amazonaws.com/{kid}" - pubkey = requests.get(key_url).text - return pubkey - - - @lru_cache - def _verify_jwt(self, jwt_data: str): - if pyjwt is not None: - header, body = [json.loads(base64.b64decode(f).decode('utf-8')) for f in jwt_data.split('.')[0:2]] - self.log.warning(header) - self.log.warning(body) - signer: str = header.get("signer") - region = signer.split(':')[3] - kid: str = header.get("kid") - pubkey = self._get_elb_key(region, kid) - payload = pyjwt.decode(jwt_data, key=pubkey, algorithms=["ES256", "RS256"]) - self.log.warning(payload) - return payload - - - @lru_cache - def _get_user(self, user_id, access_token): - # Access token is provided as an argument to ensure that auth info is refetched (misses cache) if the access token changes. - try: - cognito_client = boto3.client('cognito-idp') - response = cognito_client.admin_get_user( - UserPoolId=self.user_pool_id, - Username=user_id - ) - - user_attributes = {attr['Name']: attr['Value'] for attr in response.get('UserAttributes', [])} - username = user_attributes.get('preferred_username') or user_attributes.get('email') or user_id - - return User( - username=username, - name=user_attributes.get('name', username), - display_name=user_attributes.get('given_name', username), - ) - except Exception as e: - self.log.warning(f"Failed to get cognito user info for {user_id}: {e}") - return None - - - async def get_user(self, handler) -> User|None: - jwt_data: str = handler.request.headers.get(self.cognito_jwt_header, None) - user_id: str = handler.request.headers.get(self.cognito_identity_header, None) - access_token: str = handler.request.headers.get(self.cognito_accesstoken_header, None) - - match pyjwt, self.verify_jwt_signature, jwt_data: - case (None, _, _): - self.log.warning("Unable to verify JWT signature as package 'pyjwt' is not installed.") - case (_, _, None): - self.log.warning("Unable to verify JWT signature as it is not found.") - case (_, False, _): - self.log.info("Skipping checking JWT signature due to configuration.") - case (_, True, str()): - try: - self._verify_jwt(jwt_data) - except pyjwt.exceptions.InvalidTokenError as e: - self.log.warning(f"Error attempting to verify JWT token: {e}") - return None - - if not user_id or not access_token: - return None - - user = self._get_user(user_id, access_token) - return user diff --git a/beaker_kernel/service/auth/__init__.py b/beaker_kernel/service/auth/__init__.py new file mode 100644 index 00000000..533f369f --- /dev/null +++ b/beaker_kernel/service/auth/__init__.py @@ -0,0 +1,164 @@ +import contextvars +import inspect +import os +import hashlib +import logging +from dataclasses import dataclass, field +from functools import lru_cache, update_wrapper, wraps +from traitlets import Unicode, Bool +from typing import Optional + +from jupyter_server.auth.authorizer import Authorizer +from jupyter_server.auth.identity import IdentityProvider, User +from tornado import web + +from jupyter_server.services.config.manager import ConfigManager + +current_user = contextvars.ContextVar("current_user", default=None) +current_request = contextvars.ContextVar("current_request", default=None) + + +class BeakerIdentityProvider(IdentityProvider): + + beaker_kernel_header = Unicode( + "X-AUTH-BEAKER", + help="Header name for Beaker kernel authentication", + config=True + ) + + async def _is_authorized_beaker_kernel(self, handler: web.RequestHandler): + """Validate Beaker kernel authentication token. + + Checks for a valid Beaker kernel authentication token in the request + headers and validates it against the kernel's session key using MD5 hash. + + Parameters + ---------- + handler : web.RequestHandler + The Tornado request handler containing the authentication headers + + Returns + ------- + bool + True if the token is valid, False otherwise + """ + auth_token = handler.request.headers.get(self.beaker_kernel_header, None) + if not auth_token: + return False + + try: + preamble, kernel_id, nonce, hash_value = auth_token.split(':') + if preamble != "beaker-kernel" or not kernel_id or not hash_value: + return False + kernel = handler.kernel_manager.get_kernel(kernel_id) + key = kernel.session.key.decode() + + payload = f"{kernel_id}{nonce}{key}".encode() + reconstructed_hash_value = hashlib.md5(payload).hexdigest() + valid = reconstructed_hash_value == hash_value + return valid + + except Exception as err: + logging.error(err) + return False + + + @classmethod + def beaker_kernel_auth_wrapper(cls, fn): + """Decorator for Beaker kernel authentication wrapper. + + Wraps the get_user method to check for Beaker kernel authentication + before falling back to the original authentication method. + + Parameters + ---------- + fn : callable + The original get_user method to wrap + + Returns + ------- + callable + Wrapped get_user method with Beaker kernel auth + """ + @wraps(fn) + async def get_user(self: BeakerIdentityProvider, handler: web.RequestHandler): + is_beaker = await self._is_authorized_beaker_kernel(handler) + if is_beaker: + handler._token_authenticated = True + return RoleBasedUser( + username="beaker_kernel_", + name="Beaker Kernel", + roles=["admin"], + ) + else: + result = fn(self, handler) + if inspect.isawaitable(result): + result = await result + return result + return get_user + + def __init_subclass__(cls, **kwargs): + """Setup authentication wrapper for subclasses. + + Automatically applies the Beaker kernel authentication wrapper + to the get_user method of subclasses. + + Parameters + ---------- + **kwargs + Additional keyword arguments for subclass initialization + """ + super().__init_subclass__(**kwargs) + get_user = cls.beaker_kernel_auth_wrapper(cls.get_user) + update_wrapper(get_user, cls.get_user) + cls.get_user = get_user + + +class BeakerAuthorizer(Authorizer): + pass + + +@dataclass +class BeakerUser(User): + home_dir: Optional[str] = field(default=None) + + def __post_init__(self): + """Initialize home directory if not provided. + + Automatically generates a sanitized home directory path + based on the username if not explicitly set. + """ + if self.home_dir is None: + self.home_dir = self._sanitize_homedir(self.username) + return super().__post_init__() + + @staticmethod + def _sanitize_homedir(path_string: str): + """Sanitize username for use as directory path. + + Removes invalid characters and creates a unique directory name + by combining sanitized username with SHA1 hash. + + Parameters + ---------- + path_string : str + Original username string + + Returns + ------- + str + Sanitized directory path + """ + # Characters invalid for a path + invalid_chars = r'<>:"/\|?*@\'' + os.sep + # Remove any whitespace or invalid characters from the start or end of path. + stripped_path_string = path_string.strip().strip(invalid_chars) + # Replace invalid characters with '_' + sanitized_path = "".join(char if char not in invalid_chars else '_' for char in stripped_path_string) + full_path = '_'.join((sanitized_path, hashlib.sha1(path_string.encode()).hexdigest())) + return full_path + + +@dataclass +class RoleBasedUser(BeakerUser): + roles: list[str] = field(default_factory=lambda: []) diff --git a/beaker_kernel/service/auth/dummy.py b/beaker_kernel/service/auth/dummy.py new file mode 100644 index 00000000..1702baf4 --- /dev/null +++ b/beaker_kernel/service/auth/dummy.py @@ -0,0 +1,33 @@ +import logging +from functools import lru_cache +from traitlets import Unicode, Bool + +from jupyter_server.auth.identity import User + +from . import BeakerAuthorizer, BeakerIdentityProvider, RoleBasedUser, current_request, current_user + + + +class DummyIdentityProvider(BeakerIdentityProvider): + + async def get_user(self, handler) -> User|None: + current_request.set(handler.request) + user = RoleBasedUser( + username="test@test-user.com", + name="testuser", + display_name="Test User", + roles=["admin"], + ) + logging.debug(f"User: {user}") + current_user.set(user) + return user + + + +class DummyAuthorizer(BeakerAuthorizer): + def is_authorized(self, handler, user, action, resource): + return 'admin' in user.roles + + +authorizer = DummyAuthorizer +identity_provider = DummyIdentityProvider diff --git a/beaker_kernel/service/auth/notebook.py b/beaker_kernel/service/auth/notebook.py new file mode 100644 index 00000000..ce9dd7f9 --- /dev/null +++ b/beaker_kernel/service/auth/notebook.py @@ -0,0 +1,22 @@ +import logging +from functools import lru_cache +from traitlets import Unicode, Bool + +from jupyter_server.auth.identity import User, IdentityProvider +from jupyter_server.auth.authorizer import Authorizer, AllowAllAuthorizer + +from . import BeakerAuthorizer, BeakerIdentityProvider, RoleBasedUser, current_request, current_user + + + +class NotebookIdentityProvider(BeakerIdentityProvider, IdentityProvider): + def get_user(self, handler): + return super().get_user(handler) + + +class NotebookAuthorizer(BeakerAuthorizer, AllowAllAuthorizer): + pass + + +authorizer = NotebookAuthorizer +identity_provider = NotebookIdentityProvider diff --git a/beaker_kernel/service/base.py b/beaker_kernel/service/base.py index 42c6d287..e2e483b0 100644 --- a/beaker_kernel/service/base.py +++ b/beaker_kernel/service/base.py @@ -1,21 +1,34 @@ import getpass +import inspect import logging import os import pwd +import re import shutil import signal import urllib.parse +from typing import Optional, Any, cast, ClassVar + +import traitlets +from traitlets import Unicode, Integer, Float +from traitlets.config.application import Application, ClassesType +from traitlets.config.configurable import Configurable +from traitlets.config.loader import ConfigFileNotFound from jupyter_client.ioloop.manager import AsyncIOLoopKernelManager +from jupyter_client import kernelspec +from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager from jupyter_server.services.sessions.sessionmanager import SessionManager from jupyter_server.serverapp import ServerApp -from jupyterlab_server import LabServerApp from beaker_kernel.lib.app import BeakerApp -from beaker_kernel.lib.config import config +from beaker_kernel.lib.autodiscovery import autodiscover +from beaker_kernel.lib.config import config, CONFIG_FILE_SEARCH_LOCATIONS from beaker_kernel.lib.utils import import_dotted_class -from beaker_kernel.service.handlers import register_handlers, SummaryHandler, request_log_handler, sanitize_env +from beaker_kernel.service.auth import current_user, BeakerUser, BeakerAuthorizer, BeakerIdentityProvider +from beaker_kernel.service.handlers import register_handlers, request_log_handler + logger = logging.getLogger("beaker_server") HERE = os.path.dirname(__file__) @@ -23,21 +36,277 @@ version = "1.0.0" +class BeakerContentsManager(AsyncLargeFileManager): + def _get_os_path(self, path): + """Override path resolution to use user-specific home directory. + + Parameters + ---------- + path : str + Relative path to resolve + + Returns + ------- + str + Absolute path within user's home directory + """ + user: BeakerUser = current_user.get() + if user: + path = os.path.join(user.home_dir, path) + return super()._get_os_path(path) + + +class BeakerSessionManager(SessionManager): + + def get_kernel_env(self, path, name = None): + """Get environment variables for Beaker kernel sessions. + + Sets up environment variables including session name, Beaker session, + and user information for kernel execution. + + Parameters + ---------- + path : str + Session path + name : str, optional + Session name + + Returns + ------- + dict + Environment variables for kernel + """ + # This only sets env variables for the Beaker Kernel, not subkernels. + try: + beaker_user = path.split(os.path.sep)[0] + except: + pass + env = { + **os.environ, + "JPY_SESSION_NAME": path, + "BEAKER_SESSION": str(name), + } + if beaker_user: + env.update({ + "BEAKER_USER": beaker_user, + "LANGSMITH_BEAKER_USER": beaker_user, + }) + + return env + + async def start_kernel_for_session(self, session_id, path, name, type, kernel_name): + """Start a kernel for a session with user-specific path and permissions. + + For Beaker kernels, sets up user-specific home directories and proper + file permissions for the subkernel user. + + Parameters + ---------- + session_id : str + Unique identifier for the session + path : str + Path for the session + name : str + Session name + type : str + Session type + kernel_name : str + Name of the kernel to start + + Returns + ------- + dict + Session information from parent class + """ + if kernel_name == "beaker_kernel": + user: BeakerUser = current_user.get() + if user: + path = os.path.join(user.home_dir, path) + virtual_home_root = self.kernel_manager.root_dir + virtual_home_dir = os.path.join(virtual_home_root, user.home_dir) + + subkernel_user = self.parent.subkernel_user + if not os.path.isdir(virtual_home_dir): + os.makedirs(virtual_home_dir, exist_ok=True) + shutil.chown(virtual_home_dir, user=subkernel_user, group=subkernel_user) + path = os.path.join(os.path.relpath(virtual_home_dir, self.kernel_manager.root_dir), name) + return await super().start_kernel_for_session(session_id, path, name, type, kernel_name) + + +class BeakerKernelSpecManager(kernelspec.KernelSpecManager): + NAME_SEP = r"%%" + parent: "traitlets.Instance[BaseBeakerApp]" + + @property + def kernel_spec_managers(self) -> dict[str, kernelspec.KernelSpecManager]: + """Get kernel specification managers from parent server app. + + Returns + ------- + dict[str, kernelspec.KernelSpecManager] + Mapping of extension names to kernel spec managers + """ + return self.parent.kernel_spec_managers + + def get_default_kernel_name(self) -> str: + """Get the default kernel name. + + Returns + ------- + str + The default kernel name (beaker_kernel) + """ + return f"beaker_kernel" + + def _update_spec(self, name: str, spec: dict[str, dict]) -> dict[str, dict]: + """Update kernel spec with name if not present. + + Parameters + ---------- + name : str + Kernel name to add to spec + spec : dict[str, dict] + Kernel specification dictionary + + Returns + ------- + dict[str, dict] + Updated kernel specification + """ + if "name" not in spec: + spec["name"] = name + return spec + + def get_all_specs(self) -> dict[str, dict]: + """Get all available kernel specifications from all managers. + + Aggregates kernel specifications from local manager and all extension + managers, applying proper namespacing for extension specs. + + Returns + ------- + dict[str, dict] + Dictionary mapping kernel names to their specifications + """ + res = {} + for spec_slug, spec_manager in self.kernel_spec_managers.items(): + + if not self.parent.kernel_spec_include_local and spec_slug is None: + # Even we are not including local specs, we need to include beaker_kernel + res["beaker_kernel"] = self._update_spec(spec_manager.get_all_specs()["beaker_kernel"]) + + specs = spec_manager.get_all_specs().items() + for kernel_name, spec in specs: + if spec_slug is None: + key = kernel_name + else: + key = f"{spec_slug}{self.NAME_SEP}{kernel_name}" + res[key] = self._update_spec(kernel_name, spec) + return res + + def get_kernel_spec(self, kernel_name) -> kernelspec.KernelSpec: + """Get a specific kernel specification by name. + + Handles both local kernel specs and extension-namespaced specs. + Extension specs use the format: extension_name%%kernel_name + + Parameters + ---------- + kernel_name : str + Name of the kernel spec to retrieve, optionally namespaced + + Returns + ------- + kernelspec.KernelSpec + The requested kernel specification + + Raises + ------ + kernelspec.NoSuchKernel + If the specified kernel is not found + """ + if self.NAME_SEP in kernel_name: + spec_slug, name = kernel_name.split(self.NAME_SEP, maxsplit=1) + else: + spec_slug = None + name = kernel_name + + spec_manager = self.kernel_spec_managers.get(spec_slug, None) + if spec_manager is None: + raise kernelspec.NoSuchKernel(kernel_name) + + spec = spec_manager.get_kernel_spec(name) + + if spec is None: + raise kernelspec.NoSuchKernel(kernel_name) + + # spec = super().get_kernel_spec(kernel_name) + # if kernel_name == "beaker_kernel": + # return spec + # elif self.parent.provisioner_class: + # provisioner_obj = { + # "provisioner_name": "beaker-docker-provisioner", + # "config": { + # "image": "beaker-kernel-python", + # "max_cpus": 4, + # }, + # } + # spec.metadata["kernel_provisioner"] = provisioner_obj + return spec + + + class BeakerKernelManager(AsyncIOLoopKernelManager): + beaker_session = Unicode(allow_none=True, help="Beaker session identifier", config=True) # Longer wait_time for shutdown before killing processed due to potentially needing to shutdown both the subkernel # and the beaker kernel. - shutdown_wait_time = 10.0 + shutdown_wait_time = Float( + 10.0, + help="Time to wait for shutdown before killing processes", + config=True + ) + @property def beaker_config(self): + """Get Beaker configuration from parent. + + Returns + ------- + dict + Beaker configuration dictionary + """ return getattr(self.parent, 'beaker_config') @property - def app(self) -> "BeakerServerApp": + def app(self) -> "BaseBeakerApp": + """Get the BeakerServerApp instance. + + Returns + ------- + BeakerServerApp + The server application instance + """ return self.parent.parent def write_connection_file(self, **kwargs: object) -> None: + """Write kernel connection file with Beaker-specific context. + + Extends the standard connection file with Beaker session information, + server URL, and default context from the Beaker application. + + Parameters + ---------- + **kwargs : object + Additional connection file parameters + """ + beaker_session: Optional[str] = self.beaker_session + jupyter_session: Optional[str] = kwargs.get("jupyter_session", None) + if beaker_session: + kwargs["beaker_session"] = beaker_session + if jupyter_session: + kwargs["jupyter_session"] = jupyter_session beaker_app: BeakerApp = self.beaker_config.get("app", None) default_context = beaker_app and beaker_app._default_context if default_context: @@ -48,33 +317,56 @@ def write_connection_file(self, **kwargs: object) -> None: } if app_context_dict: kwargs["context"].update(**app_context_dict) + super().write_connection_file( server=self.app.public_url, **kwargs ) + # Set file to be owned by and modifiable by the beaker user so the beaker user can modify the file. os.chmod(self.connection_file, 0o0775) shutil.chown(self.connection_file, user=self.app.agent_user) async def _async_pre_start_kernel(self, **kw): - # Fetch values from super() + """Pre-start kernel setup including user switching and environment setup. + + Configures the kernel environment with appropriate user permissions, + working directory, and environment variables before kernel launch. + + Parameters + ---------- + **kw + Keyword arguments for kernel startup + + Returns + ------- + tuple + Command and keyword arguments for kernel launch + """ + # Stash beaker_session value so it can be written in the connection file. + beaker_session = kw.get('env', {}).get('BEAKER_SESSION', None) or kw.get("session_path", None) + if beaker_session and not self.beaker_session: + self.beaker_session = beaker_session + cmd, kw = await super()._async_pre_start_kernel(**kw) env = kw.pop("env", {}) # Update user, env variables, and home directory based on type of kernel being started. if self.kernel_name == "beaker_kernel": - user = self.app.agent_user + kernel_user = self.app.agent_user + home_dir = os.path.expanduser(f"~{kernel_user}") + kw["cwd"] = self.app.working_dir else: - env = sanitize_env(env) - user = self.app.subkernel_user + kernel_user = self.app.subkernel_user + home_dir = kw.get("cwd") - user_info = pwd.getpwnam(user) - home_dir = os.path.expanduser(f"~{user}") - group_list = os.getgrouplist(user, user_info.pw_gid) + user_info = pwd.getpwnam(kernel_user) + home_dir = os.path.expanduser(f"~{kernel_user}") + group_list = os.getgrouplist(kernel_user, user_info.pw_gid) if user_info.pw_uid != os.getuid(): - env["USER"] = user - kw["user"] = user + env["USER"] = kernel_user + kw["user"] = kernel_user env["HOME"] = home_dir if os.getuid() == 0 or os.geteuid() == 0: kw["group"] = user_info.pw_gid @@ -87,6 +379,10 @@ async def _async_pre_start_kernel(self, **kw): return cmd, kw pre_start_kernel = _async_pre_start_kernel + async def _async_launch_kernel(self, kernel_cmd, **kw): + kw.pop("session_path", None) + return await super()._async_launch_kernel(kernel_cmd, **kw) + async def _async_interrupt_kernel(self): if self.shutting_down and self.kernel_name == "beaker_kernel": # During shutdown, interrupt Beaker kernel instances without interrupting the subkernel which is being @@ -98,49 +394,239 @@ async def _async_interrupt_kernel(self): class BeakerKernelMappingManager(AsyncMappingKernelManager): - # kernel_manager_class = BeakerKernelManager kernel_manager_class = "beaker_kernel.service.base.BeakerKernelManager" - connection_dir = os.path.join(config.beaker_run_path, "kernelfiles") + connection_dir = Unicode( + os.path.join(config.beaker_run_path, "kernelfiles"), + help="Directory for kernel connection files", + config=True + ) + cull_idle_timeout = Integer( + 0, + help="Timeout in seconds for culling idle kernels", + config=True + ) def __init__(self, **kwargs): + """Initialize BeakerKernelMappingManager. + + Sets up the connection directory and initializes the kernel manager + with default kernel name if available. + + Parameters + ---------- + **kwargs + Additional arguments passed to parent class + """ # Ensure connection dir exists and is readable if not os.path.isdir(self.connection_dir): os.makedirs(self.connection_dir, mode=0o0755) else: os.chmod(self.connection_dir, 0o0755) super().__init__(**kwargs) + if hasattr(self.kernel_spec_manager, "get_default_kernel_name"): + self.default_kernel_name = self.kernel_spec_manager.get_default_kernel_name() @property def beaker_config(self): return getattr(self.parent, 'beaker_config', None) - -class BeakerServerApp(ServerApp): + def cwd_for_path(self, path, **kwargs): + user: BeakerUser = current_user.get() + if user: + user_home = self.get_home_for_user(user) + return super().cwd_for_path(user_home, **kwargs) + else: + return super().cwd_for_path(path, **kwargs) + + def get_home_for_user(self, user: BeakerUser) -> os.PathLike: + return user.home_dir + + async def _async_start_kernel(self, *, kernel_id = None, path = None, **kwargs): + kwargs.setdefault('session_path', path) + return await super()._async_start_kernel(kernel_id=kernel_id, path=path, **kwargs) + start_kernel = _async_start_kernel + + def pre_start_kernel(self, kernel_name: str, kwargs: dict): + km, kernel_name, kernel_id = super().pre_start_kernel(kernel_name, kwargs) + km = cast(BeakerKernelManager, km) + beaker_session = kwargs.get("session_path", None) + if beaker_session and not km.beaker_session: + km.beaker_session = beaker_session + return km, kernel_name, kernel_id + + async def cull_kernel_if_idle(self, kernel_id): + """Cull a kernel if it is idle.""" + kernel = self._kernels.get(kernel_id, None) + if getattr(kernel, "kernel_name", None) != "beaker_kernel": + return + result = await super().cull_kernel_if_idle(kernel_id) + return result + +class BaseBeakerApp(ServerApp): """ Customizable ServerApp for use with Beaker """ + defaults: ClassVar[dict] = {} + + name = traitlets.Unicode("beaker", config=True) + app_slug = traitlets.Unicode(config=True) kernel_manager_class = BeakerKernelMappingManager + session_manager_class = BeakerSessionManager reraise_server_extension_failures = True + contents_manager_class = BeakerContentsManager + kernel_spec_manager_class = BeakerKernelSpecManager + + kernel_spec_include_local = traitlets.Bool(True, help="Include local kernel specs", config=True) + kernel_spec_managers = traitlets.Dict(help="Kernel specification managers indexed by extension name", config=True) + + beaker_extensions = traitlets.Dict(help="Auto-discovered Beaker extensions providing additional contexts and subkernels") + beaker_extension_app = traitlets.Dict(help="", config=True) + + service_user = Unicode(help="Username under which the Beaker service is running", config=True) + agent_user = Unicode(help="Username for the Beaker kernel agent process", config=True) + subkernel_user = Unicode(help="Username under which subkernels (Python, R, etc.) are executed", config=True) + working_dir = Unicode(help="Working directory for kernel execution and file operations", config=True) + ui_path = Unicode(help="Working directory for kernel execution and file operations", config=True) + log_requests = traitlets.Bool(False, help="Enable request logging", config=True) - service_user: str - agent_user: str - subkernel_user: str - working_dir: str + allow_origin = traitlets.Unicode("*", config=True) + disable_check_xsrf = traitlets.Bool(True) + open_browser = traitlets.Bool(False, config=True) + extension_url = traitlets.Unicode("/", config=True) + connection_dir = traitlets.Unicode("", config=True) + + config_file_name = traitlets.Unicode(config=True) + beaker_config_path = traitlets.Union(trait_types=[traitlets.List(trait=traitlets.Unicode()), traitlets.Unicode()], config=True) + + @classmethod + def _app_slug(cls): + cls_name = cls.__name__ + parts_to_remove = {"", "Beaker", "Base", "App"} + parts = re.split(r'([A-Z][a-z]*)', cls_name) + parts = [part.lower() for part in parts if part and part not in parts_to_remove] + return "_".join(parts) + + @traitlets.default("ui_path") + def _default_ui_path(self): + return os.path.join(os.path.dirname(__file__), "ui") + + @traitlets.default("identity_provider_class") + def _default_identity_provider_class(self): + from beaker_kernel.service.auth.notebook import NotebookIdentityProvider + return NotebookIdentityProvider + + @traitlets.default("authorizer_class") + def _default_authorizer_class(self): + from beaker_kernel.service.auth.notebook import NotebookAuthorizer + return NotebookAuthorizer + + @traitlets.default("config_file_name") + def _default_config_file_name(self): + if self.app_slug: + return f"beaker_{self.name}_config" + else: + return f"beaker_config" + + @traitlets.default("beaker_config_path") + def _default_beaker_config_path(self): + return [str(path) for (path, *_) in CONFIG_FILE_SEARCH_LOCATIONS] + + @traitlets.default("app_slug") + def _default_app_slug(self): + return self._app_slug() def __init__(self, **kwargs): + # Apply defaults from defaults classvar + defaults = getattr(self.__class__, "defaults", None) + if defaults and isinstance(defaults, dict): + from traitlets.config import Config + config = Config(**defaults) + self.config.update(config) + + kwargs.update(defaults) super().__init__(**kwargs) - self.service_user = getpass.getuser() + + def initialize(self, argv = None, find_extensions = False, new_httpserver = True, starter_extension = None): + super().initialize(argv, find_extensions, new_httpserver, starter_extension) + self.initialize_handlers() + + def initialize_handlers(self): + """Bypass initializing the default handler since we don't need to use the webserver, just the websockets.""" + self.handlers = [] + register_handlers(self) + self.web_app.add_handlers(".*", self.handlers) + + def load_config_file(self, suppress_errors = True): + default_config_file_name = self._default_config_file_name() + try: + # Load default configuration file first + try: + Application.load_config_file(self, default_config_file_name, path=self.beaker_config_path) + except ConfigFileNotFound: + self.log.debug("Config file not found, skipping: %s", self.config_file_name) + + # If another configuration file is defined, load it second so it overrides any defaults + if self.config_file_name != default_config_file_name: + try: + Application.load_config_file(self, self.config_file_name, path=self.beaker_config_path) + except ConfigFileNotFound: + self.log.debug("Config file not found, skipping: %s", self.config_file_name) + except Exception: + # Reraise errors for testing purposes, or if set in self.raise_config_file_errors + if (not suppress_errors) or self.raise_config_file_errors: + raise + self.log.warning("Error loading config file: %s", self.config_file_name, exc_info=True) + + @traitlets.default('beaker_extensions') + def _default_beaker_extensions(self): + return {k: v for k, v in autodiscover("extensions").items()} + + @traitlets.default('service_user') + def _default_service_user(self): + return getpass.getuser() + + @traitlets.default('agent_user') + def _default_agent_user(self): + if self.service_user == "root": + agent_user = os.environ.get("BEAKER_AGENT_USER", None) + if agent_user is None: + raise RuntimeError("When running as root, BEAKER_AGENT_USER environment variable must be set.") + return agent_user + else: + return os.environ.get("BEAKER_AGENT_USER", self.service_user) + + @traitlets.default('subkernel_user') + def _default_subkernel_user(self): + if self.service_user == "root": + subkernel_user = os.environ.get("BEAKER_SUBKERNEL_USER", None) + if subkernel_user is None: + raise RuntimeError("When running as root, BEAKER_SUBKERNEL_USER environment variable must be set.") + return subkernel_user + else: + return os.environ.get("BEAKER_SUBKERNEL_USER", self.service_user) + + @traitlets.default('working_dir') + def _default_working_dir(self): if self.service_user == "root": - self.agent_user = os.environ.get("BEAKER_AGENT_USER", None) - self.subkernel_user = os.environ.get("BEAKER_SUBKERNEL_USER", None) - if self.agent_user is None or self.subkernel_user is None: - raise RuntimeError("When running as root, BEAKER_AGENT_USER and BEAKER_SUBKERNEL_USER environment errors must be set.") - self.working_dir = os.path.expanduser(f"~{self.subkernel_user}") + return os.path.expanduser(f"~{self.subkernel_user}") else: - self.agent_user = os.environ.get("BEAKER_AGENT_USER", self.service_user) - self.subkernel_user = os.environ.get("BEAKER_SUBKERNEL_USER", self.service_user) - self.working_dir = os.getcwd() + return os.getcwd() + + @traitlets.default('kernel_spec_managers') + def _default_kernel_spec_managers(self): + result = {} + # Add local kernel specs in enabled first + if self.kernel_spec_include_local: + local_kernel_spec_manager = kernelspec.KernelSpecManager(parent=self) + result[None] = local_kernel_spec_manager + + # Add kernel specs from extensions + for extension_slug, extension_cls in self.beaker_extensions.items(): + spec_manager = getattr(extension_cls, "kernel_spec_manager_class", None) + if spec_manager: + result[extension_slug] = spec_manager(parent=self) + return result @property def _default_root_dir(self): @@ -152,7 +638,7 @@ def stop(self, from_signal = False): @property def beaker_config(self): - return getattr(self.starter_app, 'extension_config', None) + return self.config @property def public_url(self): @@ -170,60 +656,38 @@ def _get_urlparts(self, path: str | None = None, include_token: bool = False) -> # Always return urls without tokens return super()._get_urlparts(path, False) - -class BaseBeakerServerApp(LabServerApp): - name = "beaker_kernel" - serverapp_class = BeakerServerApp - load_other_extensions = True - app_name = "Beaker Jupyter App" - app_version = version - allow_origin = "*" - open_browser = False - extension_url = "/" - connection_dir = "" - log_requests = False - - app_traits = {} - subcommands = {} - - ui_path = os.path.join(HERE, "ui") - - @classmethod - def get_extension_package(cls): - return cls.__module__ - - @classmethod - def initialize_server(cls, argv=None, load_other_extensions=True, **kwargs): - # Set Jupyter token from config - os.environ.setdefault("JUPYTER_TOKEN", config.jupyter_token) - kwargs.update(cls.app_traits) - app = super().initialize_server(argv=argv, load_other_extensions=load_other_extensions, **kwargs) - # Log requests to console if configured - if cls.log_requests: - app.web_app.settings["log_function"] = request_log_handler - return app - - def initialize_handlers(self): - """Bypass initializing the default handler since we don't need to use the webserver, just the websockets.""" - self.handlers.append((r"/summary", SummaryHandler)) - register_handlers(self) - super().initialize_handlers() - - def initialize_settings(self): - # Override to allow cross domain websockets - self.settings["allow_origin"] = "*" - self.settings["disable_check_xsrf"] = True - - beaker_app_slug = os.environ.get("BEAKER_APP", None) - if beaker_app_slug: - cls: type[BeakerApp] = import_dotted_class(beaker_app_slug) - beaker_app: BeakerApp = cls() - self.extension_config["app_cls"] = cls - self.extension_config["app"] = beaker_app - else: - self.extension_config["app_cls"] = None - self.extension_config["app"] = None - - -if __name__ == "__main__": - BeakerServerApp.launch_instance() + def generate_config_file(self, classes: ClassesType | None = None) -> str: + """Generate default config file from Configurables""" + lines = [ + "# ===========================================", + "# Beaker Notebook Service Configuration File", + "# ===========================================", + "# This file demonstrates all configurable traitlets in the Beaker Notebook service.", + "# Copy this file to jupyter_server_config.py or beaker_config.py in your Jupyter config directory.", + "# Uncomment and modify values as needed for your deployment.", + "", + "c = get_config() # noqa", + "", + ] + + def class_sort_key(cls: type): + if cls == self.__class__: + return -1, cls.__module__, cls.__name__ + if 'jupyter' in cls.__module__ or 'nbformat' in cls.__module__ or 'traitlets' in cls.__module__: + return 1, cls.__module__, cls.__name__ + return 0, cls.__module__, cls.__name__ + # return cls.__name__ + + classes = self.classes if classes is None else classes + + extended_classes = [] + extended_classes.extend([subclass for _, subclass in inspect.getmembers(self, inspect.isclass) if subclass not in classes]) + extended_classes.extend([subclass.__class__ for _, subclass in inspect.getmembers(self, lambda obj: isinstance(obj, Configurable)) if subclass.__class__ not in classes]) + extended_classes.extend([extension for extension in getattr(self, 'beaker_extensions', {}).values() if extension not in classes]) + classes.extend(extended_classes) + + config_classes = list(self._classes_with_config_traits(classes)) + config_classes.sort(key=class_sort_key) + for cls in config_classes: + lines.append(cls.class_config_section(config_classes)) + return "\n".join(lines) diff --git a/beaker_kernel/service/handlers.py b/beaker_kernel/service/handlers.py index 814a8d35..cf5495c4 100644 --- a/beaker_kernel/service/handlers.py +++ b/beaker_kernel/service/handlers.py @@ -1,36 +1,38 @@ import asyncio -import importlib import json import logging import os import traceback import uuid import urllib.parse -from typing import get_origin, get_args -from dataclasses import is_dataclass, asdict -from collections.abc import Mapping, Collection +from importlib.metadata import entry_points, EntryPoints, EntryPoint +from typing import get_origin, get_args, Optional, TYPE_CHECKING +from dataclasses import is_dataclass from pathlib import Path -from typing import get_origin, get_args, GenericAlias, Union, Generic, Generator, Optional from types import UnionType -from jupyter_server.auth.decorator import authorized +from jupyter_server.auth.decorator import authorized, allow_unauthenticated from jupyter_server.base.handlers import JupyterHandler -from jupyter_server.extension.handler import ExtensionHandlerMixin +# from jupyter_server.extension.handler import ExtensionHandlerMixin from jupyterlab_server import LabServerApp from tornado import web, httputil -from tornado.web import StaticFileHandler, RedirectHandler, RequestHandler, HTTPError +from tornado.web import StaticFileHandler, RequestHandler, HTTPError from beaker_kernel.lib.autodiscovery import autodiscover from beaker_kernel.lib.app import BeakerApp from beaker_kernel.lib.context import BeakerContext from beaker_kernel.lib.subkernel import BeakerSubkernel -from beaker_kernel.lib.agent_tasks import summarize from beaker_kernel.lib.config import config, locate_config, Config, Table, Choice, recursiveOptionalUpdate, reset_config from beaker_kernel.service import admin_utils +from beaker_kernel.service.auth import BeakerUser from .api.handlers import register_api_handlers +if TYPE_CHECKING: + from .base import BaseBeakerApp + logger = logging.getLogger(__name__) + def sanitize_env(env: dict[str, str]) -> dict[str, str]: # Whitelist must match the env variable name exactly and is checked first. # Blacklist can match any part of the variable name. @@ -60,17 +62,19 @@ def request_log_handler(handler: JupyterHandler): method = handler.request.method.upper() if method in SKIPPED_METHODS: return + user: BeakerUser = handler.current_user logger.info( - "%d %s %.2fms", + "%d %s %.2fms %s", handler.get_status(), handler._request_summary(), request_time, + f": {user.username}" if user else "", ) class PageHandler(StaticFileHandler): """ - Special handler that + Special handler that returns UI pages dynamically defined by the UI. """ async def get(self, path: str, include_body: bool = True) -> None: @@ -109,7 +113,7 @@ async def get(self, path: str, include_body: bool = True) -> None: return await super().get(path, include_body=include_body) -class ConfigController(ExtensionHandlerMixin, JupyterHandler): +class ConfigController(JupyterHandler): """ """ @staticmethod @@ -241,11 +245,12 @@ async def get_config(self): } ) -class ConfigHandler(ExtensionHandlerMixin, JupyterHandler): +class ConfigHandler(JupyterHandler): """ Provide config via an endpoint """ + @allow_unauthenticated def get(self): # If BASE_URL is not provided in the environment, assume that the base url is the same location that # is handling this request, as reported by the request headers. @@ -260,16 +265,15 @@ def get(self): ws_scheme = "ws" ws_url = base_url.replace(base_scheme, ws_scheme) - extension_config = self.extensionapp.extension_config - beaker_app: BeakerApp|None = extension_config.get("app", None) + beaker_app: BeakerApp|None = self.config.get("app", None) config_data = { - # "appendToken": True, "appUrl": os.environ.get("APP_URL", base_url), "baseUrl": base_url, "wsUrl": os.environ.get("JUPYTER_WS_URL", ws_url), "token": config.jupyter_token, "config_type": config.config_type, + "defaultKernel": self.kernel_spec_manager.get_default_kernel_name(), "extra": {} } if hasattr(config, "send_notebook_state"): @@ -288,10 +292,15 @@ def get(self): return self.write(config_data) -class ContextHandler(ExtensionHandlerMixin, JupyterHandler): +class ContextHandler(JupyterHandler): """ Provide information about llm contexts via an endpoint """ + provisioners: EntryPoints + + def __init__(self, application, request, **kwargs): + super().__init__(application, request, **kwargs) + self.provisioners = entry_points(group="jupyter_client.kernel_provisioners") def get(self): """Get the main page for the application's interface.""" @@ -299,27 +308,62 @@ def get(self): contexts: dict[str, BeakerContext] = autodiscover("contexts") possible_subkernels: dict[str, BeakerSubkernel] = autodiscover("subkernels") subkernel_by_kernel_index = {subkernel.KERNEL_NAME: subkernel for subkernel in possible_subkernels.values()} - installed_kernels = [ - subkernel_by_kernel_index[kernel_name] for kernel_name in ksm.find_kernel_specs().keys() - if kernel_name in subkernel_by_kernel_index - ] + subkernel_by_language_index = {subkernel.JUPYTER_LANGUAGE: subkernel for subkernel in possible_subkernels.values()} + kernels = ksm.get_all_specs() + + installed_kernels = {} + for kernel_long_name, kernel_details in kernels.items(): + kernelspec = kernel_details.get("spec", {}) + kernel_name = kernelspec.get("name", kernel_long_name) + kernel_language = kernelspec.get("language", kernel_name) + + if kernel_language in subkernel_by_language_index: + installed_kernels[kernel_long_name] = { + "kernelspec": kernel_details, + "subkernel": subkernel_by_language_index[kernel_language], + } + contexts = sorted(contexts.items(), key=lambda item: (item[1].WEIGHT, item[0])) # Extract data from auto-discovered contexts and subkernels to provide options - context_data = { - context_slug: { + context_data = {} + for context_slug, context in contexts: + acceptable_subkernels = context.available_subkernels() + available_subkernels = [ + subkernel + for subkernel in acceptable_subkernels + if subkernel in set( + subkernel["subkernel"].SLUG for subkernel in installed_kernels.values() + ) + ] + + context_data[context_slug] = { "languages": [ { "slug": subkernel_slug, - "subkernel": getattr(possible_subkernels.get(subkernel_slug), "KERNEL_NAME") + "subkernel": subkernel_slug, + "display": None, } - for subkernel_slug in context.available_subkernels() - if subkernel_slug in set(subkernel.SLUG for subkernel in installed_kernels) + for subkernel_slug in available_subkernels ], - "defaultPayload": context.default_payload() + "subkernels": {}, + "defaultPayload": context.default_payload(), } - for context_slug, context in contexts - } + subkernels = context_data[context_slug]["subkernels"] + for kernel_name, kernel_info in installed_kernels.items(): + if kernel_info["subkernel"].SLUG in acceptable_subkernels: + display_name = kernel_info["subkernel"].DISPLAY_NAME + provisioner_info = kernel_info.get("provisioner", None) + if provisioner_info: + display_name += f" ({provisioner_info['name']})" + subkernels[kernel_name] = { + "language": kernel_info["kernelspec"]["spec"]["language"], + "slug": kernel_info["subkernel"].SLUG, + "display_name": display_name, + "weight": kernel_info["subkernel"].WEIGHT, + } + + return self.write(context_data) @@ -408,14 +452,7 @@ async def post(self, format): self.finish(output) -class SummaryHandler(ExtensionHandlerMixin, JupyterHandler): - async def post(self): - payload = json.loads(self.request.body) - summary = await summarize(**payload) - return self.write(summary) - - -class StatsHandler(ExtensionHandlerMixin, JupyterHandler): +class StatsHandler(JupyterHandler): """ """ @@ -505,10 +542,12 @@ async def get(self): return self.write(json.dumps(output)) -def register_handlers(app: LabServerApp): +from jupyter_server.serverapp import ServerApp +def register_handlers(app: ServerApp): pages = [] - beaker_app: BeakerApp = app.extension_config.get("app", None) + # TODO: fix beaker app registration + beaker_app: BeakerApp = app.config.get("app", None) if beaker_app and beaker_app.asset_dir: if os.path.isdir(beaker_app.asset_dir): app.handlers.append((f"/assets/{beaker_app.slug}/(.*)", StaticFileHandler, {"path": beaker_app.asset_dir})) @@ -564,7 +603,6 @@ def register_handlers(app: LabServerApp): app.handlers.append(("/config", ConfigHandler)) app.handlers.append(("/stats", StatsHandler)) app.handlers.append((r"/(favicon.ico|beaker.svg)$", StaticFileHandler, {"path": Path(app.ui_path)})) - app.handlers.append((r"/summary", SummaryHandler)) app.handlers.append((r"/export/(?P\w+)", ExportAsHandler)), app.handlers.append((r"/((?:static|themes)/.*)", StaticFileHandler, {"path": Path(app.ui_path)})), app.handlers.append((page_regex, PageHandler, {"path": app.ui_path, "default_filename": "index.html"})) diff --git a/beaker_kernel/service/multiuser.py b/beaker_kernel/service/multiuser.py new file mode 100644 index 00000000..2e2f1779 --- /dev/null +++ b/beaker_kernel/service/multiuser.py @@ -0,0 +1,12 @@ +from .server import BeakerServerApp + +class BeakerMultiUserServerApp(BeakerServerApp): + + _default_app_traits = { + "authorizer_class": "beaker_kernel.service.auth.dummy.DummyAuthorizer", + "identity_provider_class": "beaker_kernel.service.auth.dummy.DummyIdentityProvider", + } + + +if __name__ == "__main__": + BeakerMultiUserServerApp.launch_instance() diff --git a/beaker_kernel/service/notebook.py b/beaker_kernel/service/notebook.py index baae40d6..faf0c167 100644 --- a/beaker_kernel/service/notebook.py +++ b/beaker_kernel/service/notebook.py @@ -1,13 +1,13 @@ -from beaker_kernel.service.base import BaseBeakerServerApp +from beaker_kernel.service.base import BaseBeakerApp +from beaker_kernel.service.auth.notebook import NotebookAuthorizer, NotebookIdentityProvider -def _jupyter_server_extension_points(): - return [{"module": "beaker_kernel.service.notebook", "app": BeakerNotebookApp}] - - -class BeakerNotebookApp(BaseBeakerServerApp): - pass +class BeakerNotebookApp(BaseBeakerApp): + defaults = { + "authorizer_class": NotebookAuthorizer, + "identity_provider_class": NotebookIdentityProvider, + } if __name__ == "__main__": BeakerNotebookApp.launch_instance() diff --git a/beaker_kernel/service/server.py b/beaker_kernel/service/server.py index c17154e1..527e9478 100644 --- a/beaker_kernel/service/server.py +++ b/beaker_kernel/service/server.py @@ -1,17 +1,14 @@ -import os +from beaker_kernel.service.base import BaseBeakerApp -from beaker_kernel.service.base import BaseBeakerServerApp, logger - -def _jupyter_server_extension_points(): - return [{"module": "beaker_kernel.service.server", "app": BeakerServerApp}] - - -class BeakerServerApp(BaseBeakerServerApp): - log_requests = True - app_traits = { - "allow_root": True, +class BeakerServerApp(BaseBeakerApp): + defaults = { + "log_requests": True, "ip": "0.0.0.0", + "allow_root": True, + "MultiKernelManager": { + "cull_idle_timeout": 3600, + } } From c31f6be430349e297a372a2c9d5a5444f7fd8be3 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Fri, 19 Sep 2025 14:44:23 -0600 Subject: [PATCH 05/16] Update to config generation and default config to remove deprecated and duplicated configuration options --- beaker_config_sample.py | 2115 +++++++-------------------------- beaker_kernel/service/base.py | 54 +- 2 files changed, 483 insertions(+), 1686 deletions(-) diff --git a/beaker_config_sample.py b/beaker_config_sample.py index 04b4633a..828e3176 100644 --- a/beaker_config_sample.py +++ b/beaker_config_sample.py @@ -17,58 +17,89 @@ # c.BaseBeakerApp.agent_user = '' ## Set the Access-Control-Allow-Credentials: true header -# See also: ServerApp.allow_credentials +# Default: False # c.BaseBeakerApp.allow_credentials = False ## Whether or not to allow external kernels, whose connection files are placed in # external_connection_dir. -# See also: ServerApp.allow_external_kernels +# Default: False # c.BaseBeakerApp.allow_external_kernels = False # Default: '*' # c.BaseBeakerApp.allow_origin = '*' ## Use a regular expression for the Access-Control-Allow-Origin header -# See also: ServerApp.allow_origin_pat +# +# Requests from an origin matching the expression will get replies with: +# +# Access-Control-Allow-Origin: origin +# +# where `origin` is the origin of the request. +# +# Ignored if allow_origin is set. +# Default: '' # c.BaseBeakerApp.allow_origin_pat = '' -## DEPRECATED in 2.0. Use PasswordIdentityProvider.allow_password_change -# See also: ServerApp.allow_password_change -# c.BaseBeakerApp.allow_password_change = True - ## Allow requests where the Host header doesn't point to a local server -# See also: ServerApp.allow_remote_access +# +# By default, requests get a 403 forbidden response if the 'Host' header +# shows that the browser thinks it's on a non-local domain. +# Setting this option to True disables this check. +# +# This protects against 'DNS rebinding' attacks, where a remote web server +# serves you a page and then changes its DNS to send later requests to a +# local IP, bypassing same-origin checks. +# +# Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, +# along with hostnames configured in local_hostnames. +# Default: False # c.BaseBeakerApp.allow_remote_access = False ## Whether to allow the user to run the server as root. -# See also: ServerApp.allow_root +# Default: False # c.BaseBeakerApp.allow_root = False ## Allow unauthenticated access to endpoints without authentication rule. -# See also: ServerApp.allow_unauthenticated_access +# +# When set to `True` (default in jupyter-server 2.0, subject to change +# in the future), any request to an endpoint without an authentication rule +# (either `@tornado.web.authenticated`, or `@allow_unauthenticated`) +# will be permitted, regardless of whether user has logged in or not. +# +# When set to `False`, logging in will be required for access to each endpoint, +# excluding the endpoints marked with `@allow_unauthenticated` decorator. +# +# This option can be configured using `JUPYTER_SERVER_ALLOW_UNAUTHENTICATED_ACCESS` +# environment variable: any non-empty value other than "true" and "yes" will +# prevent unauthenticated access to endpoints without `@allow_unauthenticated`. +# Default: True # c.BaseBeakerApp.allow_unauthenticated_access = True ## Answer yes to any prompts. -# See also: JupyterApp.answer_yes +# Default: False # c.BaseBeakerApp.answer_yes = False # Default: '' # c.BaseBeakerApp.app_slug = '' ## " -# See also: ServerApp.authenticate_prometheus +# Require authentication to access prometheus metrics. +# Default: True # c.BaseBeakerApp.authenticate_prometheus = True ## The authorizer class to use. -# See also: ServerApp.authorizer_class +# Default: 'jupyter_server.auth.authorizer.AllowAllAuthorizer' # c.BaseBeakerApp.authorizer_class = 'jupyter_server.auth.authorizer.AllowAllAuthorizer' ## Reload the webapp when changes are made to any Python src files. -# See also: ServerApp.autoreload +# Default: False # c.BaseBeakerApp.autoreload = False ## The base URL for the Jupyter server. -# See also: ServerApp.base_url +# +# Leading and trailing slashes can be omitted, +# and will automatically be added. +# Default: '/' # c.BaseBeakerApp.base_url = '/' # Default: traitlets.Undefined @@ -78,50 +109,65 @@ # c.BaseBeakerApp.beaker_extension_app = {} ## Specify what command to use to invoke a web -# See also: ServerApp.browser +# browser when starting the server. If not specified, the +# default browser will be determined by the `webbrowser` +# standard library module, which allows setting of the +# BROWSER environment variable to override it. +# Default: '' # c.BaseBeakerApp.browser = '' ## The full path to an SSL/TLS certificate file. -# See also: ServerApp.certfile +# Default: '' # c.BaseBeakerApp.certfile = '' ## The full path to a certificate authority certificate for SSL/TLS client # authentication. -# See also: ServerApp.client_ca +# Default: '' # c.BaseBeakerApp.client_ca = '' ## Full path of a config file. -# See also: JupyterApp.config_file +# Default: '' # c.BaseBeakerApp.config_file = '' # Default: '' # c.BaseBeakerApp.config_file_name = '' ## The config manager class to use -# See also: ServerApp.config_manager_class +# Default: 'jupyter_server.services.config.manager.ConfigManager' # c.BaseBeakerApp.config_manager_class = 'jupyter_server.services.config.manager.ConfigManager' # Default: '' # c.BaseBeakerApp.connection_dir = '' -## DEPRECATED. Use IdentityProvider.cookie_options -# See also: ServerApp.cookie_options -# c.BaseBeakerApp.cookie_options = {} - ## The random bytes used to secure cookies. -# See also: ServerApp.cookie_secret +# By default this is generated on first start of the server and persisted across server +# sessions by writing the cookie secret into the `cookie_secret_file` file. +# When using an executable config file you can override this to be random at each server restart. +# +# Note: Cookie secrets should be kept private, do not share config files with +# cookie_secret stored in plaintext (you can read the value from a file). +# Default: b'' # c.BaseBeakerApp.cookie_secret = b'' ## The file where the cookie secret is stored. -# See also: ServerApp.cookie_secret_file +# Default: '' # c.BaseBeakerApp.cookie_secret_file = '' ## Override URL shown to users. -# See also: ServerApp.custom_display_url +# +# Replace actual URL, including protocol, address, port and base URL, +# with the given value when displaying URL to the users. Do not change +# the actual connection URL. If authentication token is enabled, the +# token is added to the custom URL automatically. +# +# This option is intended to be used when the URL to display to the user +# cannot be determined reliably by the Jupyter server (proxified +# or containerized setups for example). +# Default: '' # c.BaseBeakerApp.custom_display_url = '' ## The default URL to redirect to from `/` -# See also: ServerApp.default_url +# Default: '/' # c.BaseBeakerApp.default_url = '/' # Default: '/' @@ -132,65 +178,58 @@ # runtime_dir/external_kernels. Make sure that this directory is not filled with # left-over connection files, that could result in unnecessary kernel manager # creations. -# See also: ServerApp.external_connection_dir +# Default: None # c.BaseBeakerApp.external_connection_dir = None ## handlers that should be loaded at higher priority than the default services -# See also: ServerApp.extra_services +# Default: [] # c.BaseBeakerApp.extra_services = [] ## Extra paths to search for serving static files. -# See also: ServerApp.extra_static_paths +# +# This allows adding javascript/css to be available from the Jupyter server machine, +# or overriding individual files in the IPython +# Default: [] # c.BaseBeakerApp.extra_static_paths = [] ## Extra paths to search for serving jinja templates. -# See also: ServerApp.extra_template_paths +# +# Can be used to override templates from jupyter_server.templates. +# Default: [] # c.BaseBeakerApp.extra_template_paths = [] ## Open the named file when the application is launched. -# See also: ServerApp.file_to_run +# Default: '' # c.BaseBeakerApp.file_to_run = '' ## The URL prefix where files are opened directly. -# See also: ServerApp.file_url_prefix +# Default: 'notebooks' # c.BaseBeakerApp.file_url_prefix = 'notebooks' ## Generate default config file. -# See also: JupyterApp.generate_config +# Default: False # c.BaseBeakerApp.generate_config = False -## DEPRECATED. Use IdentityProvider.get_secure_cookie_kwargs -# See also: ServerApp.get_secure_cookie_kwargs -# c.BaseBeakerApp.get_secure_cookie_kwargs = {} - ## The identity provider class to use. -# See also: ServerApp.identity_provider_class +# Default: 'jupyter_server.auth.identity.PasswordIdentityProvider' # c.BaseBeakerApp.identity_provider_class = 'jupyter_server.auth.identity.PasswordIdentityProvider' -## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_data_rate_limit -# See also: ServerApp.iopub_data_rate_limit -# c.BaseBeakerApp.iopub_data_rate_limit = 0.0 - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_msg_rate_limit -# See also: ServerApp.iopub_msg_rate_limit -# c.BaseBeakerApp.iopub_msg_rate_limit = 0.0 - ## The IP address the Jupyter server will listen on. -# See also: ServerApp.ip +# Default: 'localhost' # c.BaseBeakerApp.ip = 'localhost' ## Supply extra arguments that will be passed to Jinja environment. -# See also: ServerApp.jinja_environment_options +# Default: {} # c.BaseBeakerApp.jinja_environment_options = {} ## Extra variables to supply to jinja templates when rendering. -# See also: ServerApp.jinja_template_vars +# Default: {} # c.BaseBeakerApp.jinja_template_vars = {} ## Dict of Python modules to load as Jupyter server extensions.Entry values can # be used to enable and disable the loading ofthe extensions. The extensions # will be loaded in alphabetical order. -# See also: ServerApp.jpserver_extensions +# Default: {} # c.BaseBeakerApp.jpserver_extensions = {} ## Include local kernel specs @@ -202,113 +241,137 @@ # c.BaseBeakerApp.kernel_spec_managers = {} ## The kernel websocket connection class to use. -# See also: ServerApp.kernel_websocket_connection_class +# Default: 'jupyter_server.services.kernels.connection.base.BaseKernelWebsocketConnection' # c.BaseBeakerApp.kernel_websocket_connection_class = 'jupyter_server.services.kernels.connection.base.BaseKernelWebsocketConnection' -## DEPRECATED. Use ZMQChannelsWebsocketConnection.kernel_ws_protocol -# See also: ServerApp.kernel_ws_protocol -# c.BaseBeakerApp.kernel_ws_protocol = '' - ## The full path to a private key file for usage with SSL/TLS. -# See also: ServerApp.keyfile +# Default: '' # c.BaseBeakerApp.keyfile = '' -## DEPRECATED. Use ZMQChannelsWebsocketConnection.limit_rate -# See also: ServerApp.limit_rate -# c.BaseBeakerApp.limit_rate = False - ## Hostnames to allow as local when allow_remote_access is False. -# See also: ServerApp.local_hostnames +# +# Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted +# as local as well. +# Default: ['localhost'] # c.BaseBeakerApp.local_hostnames = ['localhost'] ## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt +# Default: '%Y-%m-%d %H:%M:%S' # c.BaseBeakerApp.log_datefmt = '%Y-%m-%d %H:%M:%S' ## The Logging format template -# See also: Application.log_format +# Default: '[%(name)s]%(highlevel)s %(message)s' # c.BaseBeakerApp.log_format = '[%(name)s]%(highlevel)s %(message)s' ## Set the log level by value or name. -# See also: Application.log_level +# Choices: any of [0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL'] +# Default: 30 # c.BaseBeakerApp.log_level = 30 ## Enable request logging # Default: False # c.BaseBeakerApp.log_requests = False -## -# See also: Application.logging_config +## Configure additional log handlers. +# +# The default stderr logs handler is configured by the log_level, log_datefmt +# and log_format settings. +# +# This configuration can be used to configure additional handlers (e.g. to +# output the log to a file) or for finer control over the default handlers. +# +# If provided this should be a logging configuration dictionary, for more +# information see: +# https://docs.python.org/3/library/logging.config.html#logging-config- +# dictschema +# +# This dictionary is merged with the base logging configuration which defines +# the following: +# +# * A logging formatter intended for interactive use called +# ``console``. +# * A logging handler that writes to stderr called +# ``console`` which uses the formatter ``console``. +# * A logger with the name of this application set to ``DEBUG`` +# level. +# +# This example adds a new handler that writes to a file: +# +# .. code-block:: python +# +# c.Application.logging_config = { +# "handlers": { +# "file": { +# "class": "logging.FileHandler", +# "level": "DEBUG", +# "filename": "", +# } +# }, +# "loggers": { +# "": { +# "level": "DEBUG", +# # NOTE: if you don't list the default "console" +# # handler here then it will be disabled +# "handlers": ["console", "file"], +# }, +# }, +# } +# Default: {} # c.BaseBeakerApp.logging_config = {} ## The login handler class to use. -# See also: ServerApp.login_handler_class +# Default: 'jupyter_server.auth.login.LegacyLoginHandler' # c.BaseBeakerApp.login_handler_class = 'jupyter_server.auth.login.LegacyLoginHandler' ## The logout handler class to use. -# See also: ServerApp.logout_handler_class +# Default: 'jupyter_server.auth.logout.LogoutHandler' # c.BaseBeakerApp.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' -## -# See also: ServerApp.max_body_size +## Sets the maximum allowed size of the client request body, specified in the +# Content-Length request header field. If the size in a request exceeds the +# configured value, a malformed HTTP message is returned to the client. +# +# Note: max_body_size is applied even in streaming mode. +# Default: 536870912 # c.BaseBeakerApp.max_body_size = 536870912 -## -# See also: ServerApp.max_buffer_size +## Gets or sets the maximum amount of memory, in bytes, that is allocated for use +# by the buffer manager. +# Default: 536870912 # c.BaseBeakerApp.max_buffer_size = 536870912 -## -# See also: ServerApp.min_open_files_limit +## Gets or sets a lower bound on the open file handles process resource limit. +# This may need to be increased if you run into an OSError: [Errno 24] Too many +# open files. This is not applicable when running on Windows. +# Default: 0 # c.BaseBeakerApp.min_open_files_limit = 0 # Default: 'beaker' # c.BaseBeakerApp.name = 'beaker' -## DEPRECATED, use root_dir. -# See also: ServerApp.notebook_dir -# c.BaseBeakerApp.notebook_dir = '' - # Default: False # c.BaseBeakerApp.open_browser = False -## DEPRECATED in 2.0. Use PasswordIdentityProvider.hashed_password -# See also: ServerApp.password -# c.BaseBeakerApp.password = '' - -## DEPRECATED in 2.0. Use PasswordIdentityProvider.password_required -# See also: ServerApp.password_required -# c.BaseBeakerApp.password_required = False - ## The port the server will listen on (env: JUPYTER_PORT). -# See also: ServerApp.port +# Default: 0 # c.BaseBeakerApp.port = 0 ## The number of additional ports to try if the specified port is not available # (env: JUPYTER_PORT_RETRIES). -# See also: ServerApp.port_retries +# Default: 50 # c.BaseBeakerApp.port_retries = 50 -## Preferred starting directory to use for notebooks and kernels. -# ServerApp.preferred_dir is deprecated in jupyter-server 2.0. Use -# FileContentsManager.preferred_dir instead -# See also: ServerApp.preferred_dir -# c.BaseBeakerApp.preferred_dir = '' - -## -# See also: ServerApp.pylab +## DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. +# Default: 'disabled' # c.BaseBeakerApp.pylab = 'disabled' ## If True, display controls to shut down the Jupyter server, such as menu items # or buttons. -# See also: ServerApp.quit_button +# Default: True # c.BaseBeakerApp.quit_button = True -## DEPRECATED. Use ZMQChannelsWebsocketConnection.rate_limit_window -# See also: ServerApp.rate_limit_window -# c.BaseBeakerApp.rate_limit_window = 0.0 - ## The directory to use for notebooks and kernels. -# See also: ServerApp.root_dir +# Default: '' # c.BaseBeakerApp.root_dir = '' ## Username under which the Beaker service is running @@ -316,11 +379,11 @@ # c.BaseBeakerApp.service_user = '' ## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config +# Default: False # c.BaseBeakerApp.show_config = False ## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json +# Default: False # c.BaseBeakerApp.show_config_json = False ## Shut down the server after N seconds with no kernelsrunning and no activity. @@ -328,23 +391,27 @@ # (MappingKernelManager.cull_idle_timeout) to shutdown the Jupyter server when # it's not in use. This is not precisely timed: it may shut down up to a minute # later. 0 (the default) disables this automatic shutdown. -# See also: ServerApp.shutdown_no_activity_timeout +# Default: 0 # c.BaseBeakerApp.shutdown_no_activity_timeout = 0 ## The UNIX socket the Jupyter server will listen on. -# See also: ServerApp.sock +# Default: '' # c.BaseBeakerApp.sock = '' ## The permissions mode for UNIX socket creation (default: 0600). -# See also: ServerApp.sock_mode +# Default: '0600' # c.BaseBeakerApp.sock_mode = '0600' ## Supply SSL options for the tornado HTTPServer. -# See also: ServerApp.ssl_options +# See the tornado docs for details. +# Default: {} # c.BaseBeakerApp.ssl_options = {} -## -# See also: ServerApp.static_immutable_cache +## Paths to set up static files as immutable. +# +# This allow setting up the cache control of static files as immutable. It +# should be used for static file named with a hash for instance. +# Default: [] # c.BaseBeakerApp.static_immutable_cache = [] ## Username under which subkernels (Python, R, etc.) are executed @@ -352,25 +419,27 @@ # c.BaseBeakerApp.subkernel_user = '' ## Supply overrides for terminado. Currently only supports "shell_command". -# See also: ServerApp.terminado_settings +# Default: {} # c.BaseBeakerApp.terminado_settings = {} ## Set to False to disable terminals. -# See also: ServerApp.terminals_enabled +# +# This does *not* make the server more secure by itself. +# Anything the user can in a terminal, they can also do in a notebook. +# +# Terminals may also be automatically disabled if the terminado package +# is not available. +# Default: False # c.BaseBeakerApp.terminals_enabled = False -## DEPRECATED. Use IdentityProvider.token -# See also: ServerApp.token -# c.BaseBeakerApp.token = '' - ## Supply overrides for the tornado.web.Application that the Jupyter server uses. -# See also: ServerApp.tornado_settings +# Default: {} # c.BaseBeakerApp.tornado_settings = {} ## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL -# See also: ServerApp.trust_xheaders +# Default: False # c.BaseBeakerApp.trust_xheaders = False ## Working directory for kernel execution and file operations @@ -378,27 +447,68 @@ # c.BaseBeakerApp.ui_path = '' ## Disable launching browser by redirect file -# See also: ServerApp.use_redirect_file +# For versions of notebook > 5.7.2, a security feature measure was added that +# prevented the authentication token used to launch the browser from being visible. +# This feature makes it difficult for other users on a multi-user system from +# running code in your Jupyter session as you. +# However, some environments (like Windows Subsystem for Linux (WSL) and Chromebooks), +# launching a browser using a redirect file can lead the browser failing to load. +# This is because of the difference in file structures/paths between the runtime and +# the browser. +# +# Disabling this setting to False will disable this behavior, allowing the browser +# to launch by using a URL and visible token (as before). +# Default: True # c.BaseBeakerApp.use_redirect_file = True ## Specify where to open the server on startup. This is the -# See also: ServerApp.webbrowser_open_new +# `new` argument passed to the standard library method `webbrowser.open`. +# The behaviour is not guaranteed, but depends on browser support. Valid +# values are: +# +# - 2 opens a new tab, +# - 1 opens a new window, +# - 0 opens in an existing window. +# +# See the `webbrowser.open` documentation for details. +# Default: 2 # c.BaseBeakerApp.webbrowser_open_new = 2 -## -# See also: ServerApp.websocket_compression_options +## Set the tornado compression options for websocket connections. +# +# This value will be returned from +# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable +# compression. A dict (even an empty one) will enable compression. +# +# See the tornado docs for WebSocketHandler.get_compression_options for details. +# Default: None # c.BaseBeakerApp.websocket_compression_options = None -## -# See also: ServerApp.websocket_ping_interval +## Configure the websocket ping interval in seconds. +# +# Websockets are long-lived connections that are used by some Jupyter Server +# extensions. +# +# Periodic pings help to detect disconnected clients and keep the connection +# active. If this is set to None, then no pings will be performed. +# +# When a ping is sent, the client has ``websocket_ping_timeout`` seconds to +# respond. If no response is received within this period, the connection will be +# closed from the server side. +# Default: 0 # c.BaseBeakerApp.websocket_ping_interval = 0 -## -# See also: ServerApp.websocket_ping_timeout +## Configure the websocket ping timeout in seconds. +# +# See ``websocket_ping_interval`` for details. +# Default: 0 # c.BaseBeakerApp.websocket_ping_timeout = 0 ## The base URL for websockets, -# See also: ServerApp.websocket_url +# if it differs from the HTTP server (hint: it almost certainly doesn't). +# +# Should be in the form of an HTTP origin: ws[s]://hostname[:port] +# Default: '' # c.BaseBeakerApp.websocket_url = '' ## Working directory for kernel execution and file operations @@ -413,170 +523,197 @@ # c.BeakerIdentityProvider.beaker_kernel_header = 'X-AUTH-BEAKER' ## Name of the cookie to set for persisting login. Default: username-${Host}. -# See also: IdentityProvider.cookie_name +# Default: '' # c.BeakerIdentityProvider.cookie_name = '' ## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's # set_secure_cookie docs for details. -# See also: IdentityProvider.cookie_options +# Default: {} # c.BeakerIdentityProvider.cookie_options = {} ## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's # get_secure_cookie docs for details. -# See also: IdentityProvider.get_secure_cookie_kwargs +# Default: {} # c.BeakerIdentityProvider.get_secure_cookie_kwargs = {} ## The login handler class to use, if any. -# See also: IdentityProvider.login_handler_class +# Default: 'jupyter_server.auth.login.LoginFormHandler' # c.BeakerIdentityProvider.login_handler_class = 'jupyter_server.auth.login.LoginFormHandler' ## The logout handler class to use. -# See also: IdentityProvider.logout_handler_class +# Default: 'jupyter_server.auth.logout.LogoutHandler' # c.BeakerIdentityProvider.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' ## Specify whether login cookie should have the `secure` property (HTTPS- # only).Only needed when protocol-detection gives the wrong answer due to # proxies. -# See also: IdentityProvider.secure_cookie +# Default: None # c.BeakerIdentityProvider.secure_cookie = None ## Token used for authenticating first-time connections to the server. -# See also: IdentityProvider.token +# +# The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set directly +# with the JUPYTER_TOKEN environment variable. +# +# When no password is enabled, +# the default is to generate a new, random token. +# +# Setting to an empty string disables authentication altogether, which +# is NOT RECOMMENDED. +# +# Prior to 2.0: configured as ServerApp.token +# Default: '' # c.BeakerIdentityProvider.token = '' -#------------------------------------------------------------------------------ -# NotebookIdentityProvider(BeakerIdentityProvider, IdentityProvider) configuration -#------------------------------------------------------------------------------ -## Header name for Beaker kernel authentication -# See also: BeakerIdentityProvider.beaker_kernel_header -# c.NotebookIdentityProvider.beaker_kernel_header = 'X-AUTH-BEAKER' - -## Name of the cookie to set for persisting login. Default: username-${Host}. -# See also: IdentityProvider.cookie_name -# c.NotebookIdentityProvider.cookie_name = '' - -## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's -# set_secure_cookie docs for details. -# See also: IdentityProvider.cookie_options -# c.NotebookIdentityProvider.cookie_options = {} - -## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's -# get_secure_cookie docs for details. -# See also: IdentityProvider.get_secure_cookie_kwargs -# c.NotebookIdentityProvider.get_secure_cookie_kwargs = {} - -## The login handler class to use, if any. -# See also: IdentityProvider.login_handler_class -# c.NotebookIdentityProvider.login_handler_class = 'jupyter_server.auth.login.LoginFormHandler' - -## The logout handler class to use. -# See also: IdentityProvider.logout_handler_class -# c.NotebookIdentityProvider.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' - -## Specify whether login cookie should have the `secure` property (HTTPS- -# only).Only needed when protocol-detection gives the wrong answer due to -# proxies. -# See also: IdentityProvider.secure_cookie -# c.NotebookIdentityProvider.secure_cookie = None - -## Token used for authenticating first-time connections to the server. -# See also: IdentityProvider.token -# c.NotebookIdentityProvider.token = '' #------------------------------------------------------------------------------ # BeakerContentsManager(AsyncLargeFileManager) configuration #------------------------------------------------------------------------------ ## Allow access to hidden files -# See also: ContentsManager.allow_hidden +# Default: False # c.BeakerContentsManager.allow_hidden = False ## If True, deleting a non-empty directory will always be allowed. -# See also: FileContentsManager.always_delete_dir +# WARNING this may result in files being permanently removed; e.g. on Windows, +# if the data size is too big for the trash/recycle bin the directory will be permanently +# deleted. If False (default), the non-empty directory will be sent to the trash only +# if safe. And if ``delete_to_trash`` is True, the directory won't be deleted. +# Default: False # c.BeakerContentsManager.always_delete_dir = False -# See also: AsyncContentsManager.checkpoints +# Default: None # c.BeakerContentsManager.checkpoints = None -# See also: AsyncContentsManager.checkpoints_class +# Default: 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' # c.BeakerContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' -# See also: AsyncContentsManager.checkpoints_kwargs +# Default: {} # c.BeakerContentsManager.checkpoints_kwargs = {} ## If True (default), deleting files will send them to the -# See also: FileContentsManager.delete_to_trash +# platform's trash/recycle bin, where they can be recovered. If False, +# deleting files really deletes them. +# Default: True # c.BeakerContentsManager.delete_to_trash = True -# See also: ContentsManager.event_logger +# Default: None # c.BeakerContentsManager.event_logger = None ## handler class to use when serving raw file requests. -# See also: ContentsManager.files_handler_class +# +# Default is a fallback that talks to the ContentsManager API, +# which may be inefficient, especially for large files. +# +# Local files-based ContentsManagers can use a StaticFileHandler subclass, +# which will be much more efficient. +# +# Access to these files should be Authenticated. +# Default: 'jupyter_server.files.handlers.FilesHandler' # c.BeakerContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' ## Extra parameters to pass to files_handler_class. -# See also: ContentsManager.files_handler_params +# +# For example, StaticFileHandlers generally expect a `path` argument +# specifying the root directory from which to serve files. +# Default: {} # c.BeakerContentsManager.files_handler_params = {} ## Hash algorithm to use for file content, support by hashlib -# See also: FileManagerMixin.hash_algorithm +# Choices: any of ['blake2s', 'md5', 'sha256', 'sha3_512', 'sm3', 'shake_256', 'sha512_256', 'sha3_224', 'ripemd160', 'sha1', 'blake2b', 'sha512', 'sha3_256', 'shake_128', 'sha384', 'sha224', 'sha3_384', 'sha512_224', 'md5-sha1'] +# Default: 'sha256' # c.BeakerContentsManager.hash_algorithm = 'sha256' -## -# See also: ContentsManager.hide_globs +## Glob patterns to hide in file and directory listings. +# Default: ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] # c.BeakerContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] ## The max folder size that can be copied -# See also: FileContentsManager.max_copy_folder_size_mb +# Default: 500 # c.BeakerContentsManager.max_copy_folder_size_mb = 500 ## Python callable or importstring thereof -# See also: ContentsManager.post_save_hook +# +# to be called on the path of a file just saved. +# +# This can be used to process the file on disk, +# such as converting the notebook to a script or HTML via nbconvert. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(os_path=os_path, model=model, contents_manager=instance) +# +# - path: the filesystem path to the file just written +# - model: the model representing the file +# - contents_manager: this ContentsManager instance +# Default: None # c.BeakerContentsManager.post_save_hook = None ## Python callable or importstring thereof -# See also: ContentsManager.pre_save_hook +# +# To be called on a contents model prior to save. +# +# This can be used to process the structure, +# such as removing notebook outputs or other side effects that +# should not be saved. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(path=path, model=model, contents_manager=self) +# +# - model: the model to be saved. Includes file contents. +# Modifying this dict will affect the file that is stored. +# - path: the API path of the save destination +# - contents_manager: this ContentsManager instance +# Default: None # c.BeakerContentsManager.pre_save_hook = None ## Preferred starting directory to use for notebooks. This is an API path (`/` # separated, relative to root dir) -# See also: ContentsManager.preferred_dir +# Default: '' # c.BeakerContentsManager.preferred_dir = '' -# See also: FileContentsManager.root_dir +# Default: '' # c.BeakerContentsManager.root_dir = '' ## The base name used when creating untitled directories. -# See also: ContentsManager.untitled_directory +# Default: 'Untitled Folder' # c.BeakerContentsManager.untitled_directory = 'Untitled Folder' ## The base name used when creating untitled files. -# See also: ContentsManager.untitled_file +# Default: 'untitled' # c.BeakerContentsManager.untitled_file = 'untitled' ## The base name used when creating untitled notebooks. -# See also: ContentsManager.untitled_notebook +# Default: 'Untitled' # c.BeakerContentsManager.untitled_notebook = 'Untitled' -## By default notebooks are saved on disk on a temporary file and then if -# successfully written, it replaces the old ones. -# See also: FileManagerMixin.use_atomic_writing +## By default notebooks are saved on disk on a temporary file and then if successfully written, it replaces the old ones. +# This procedure, namely 'atomic_writing', causes some bugs on file system without operation order enforcement (like some networked fs). +# If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) +# Default: True # c.BeakerContentsManager.use_atomic_writing = True #------------------------------------------------------------------------------ # BeakerKernelMappingManager(AsyncMappingKernelManager) configuration #------------------------------------------------------------------------------ ## Whether to send tracebacks to clients on exceptions. -# See also: MappingKernelManager.allow_tracebacks +# Default: True # c.BeakerKernelMappingManager.allow_tracebacks = True ## White list of allowed kernel message types. -# See also: MappingKernelManager.allowed_message_types +# When the list is empty, all message types are allowed. +# Default: [] # c.BeakerKernelMappingManager.allowed_message_types = [] ## Whether messages from kernels whose frontends have disconnected should be # buffered in-memory. -# See also: MappingKernelManager.buffer_offline_messages +# +# When True (default), messages are buffered and replayed on reconnect, +# avoiding lost messages due to interrupted connectivity. +# +# Disable if long-running kernels will produce too much output while +# no frontends are connected. +# Default: True # c.BeakerKernelMappingManager.buffer_offline_messages = True ## Directory for kernel connection files @@ -584,11 +721,13 @@ # c.BeakerKernelMappingManager.connection_dir = '/home/matt/.local/share/beaker/runtime/kernelfiles' ## Whether to consider culling kernels which are busy. -# See also: MappingKernelManager.cull_busy +# Only effective if cull_idle_timeout > 0. +# Default: False # c.BeakerKernelMappingManager.cull_busy = False ## Whether to consider culling kernels which have one or more connections. -# See also: MappingKernelManager.cull_connected +# Only effective if cull_idle_timeout > 0. +# Default: False # c.BeakerKernelMappingManager.cull_connected = False ## Timeout in seconds for culling idle kernels @@ -597,55 +736,66 @@ ## The interval (in seconds) on which to check for idle kernels exceeding the # cull timeout value. -# See also: MappingKernelManager.cull_interval +# Default: 300 # c.BeakerKernelMappingManager.cull_interval = 300 ## The name of the default kernel to start -# See also: MultiKernelManager.default_kernel_name +# Default: 'python3' # c.BeakerKernelMappingManager.default_kernel_name = 'python3' ## Timeout for giving up on a kernel (in seconds). -# See also: MappingKernelManager.kernel_info_timeout +# +# On starting and restarting kernels, we check whether the +# kernel is running and responsive by sending kernel_info_requests. +# This sets the timeout in seconds for how long the kernel can take +# before being presumed dead. +# This affects the MappingKernelManager (which handles kernel restarts) +# and the ZMQChannelsHandler (which handles the startup). +# Default: 60 # c.BeakerKernelMappingManager.kernel_info_timeout = 60 -# See also: MappingKernelManager.root_dir +# Default: '' # c.BeakerKernelMappingManager.root_dir = '' ## Share a single zmq.Context to talk to all my kernels -# See also: MultiKernelManager.shared_context +# Default: True # c.BeakerKernelMappingManager.shared_context = True ## Message to print when allow_tracebacks is False, and an exception occurs -# See also: MappingKernelManager.traceback_replacement_message +# Default: 'An exception occurred at runtime, which is not shown due to security reasons.' # c.BeakerKernelMappingManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' ## List of kernel message types excluded from user activity tracking. -# See also: MappingKernelManager.untracked_message_types +# +# This should be a superset of the message types sent on any channel other +# than the shell channel. +# Default: ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] # c.BeakerKernelMappingManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] ## Whether to make kernels available before the process has started. The -# See also: AsyncMultiKernelManager.use_pending_kernels +# kernel has a `.ready` future which can be awaited before connecting +# Default: False # c.BeakerKernelMappingManager.use_pending_kernels = False #------------------------------------------------------------------------------ # BeakerKernelSpecManager(KernelSpecManager) configuration #------------------------------------------------------------------------------ ## List of allowed kernel names. -# See also: KernelSpecManager.allowed_kernelspecs +# +# By default, all installed kernels are allowed. +# Default: set() # c.BeakerKernelSpecManager.allowed_kernelspecs = set() ## If there is no Python kernelspec registered and the IPython -# See also: KernelSpecManager.ensure_native_kernel +# kernel is available, ensure it is added to the spec list. +# Default: True # c.BeakerKernelSpecManager.ensure_native_kernel = True ## The kernel spec class. This is configurable to allow -# See also: KernelSpecManager.kernel_spec_class +# subclassing of the KernelSpecManager for customized behavior. +# Default: 'jupyter_client.kernelspec.KernelSpec' # c.BeakerKernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' -## Deprecated, use `KernelSpecManager.allowed_kernelspecs` -# See also: KernelSpecManager.whitelist -# c.BeakerKernelSpecManager.whitelist = set() - #------------------------------------------------------------------------------ # BeakerSessionManager(SessionManager) configuration #------------------------------------------------------------------------------ @@ -653,7 +803,7 @@ # /path/to/session_database.db). By default, the session database is stored in- # memory (i.e. `:memory:` setting from sqlite3) and does not persist when the # current Jupyter Server shuts down. -# See also: SessionManager.database_filepath +# Default: ':memory:' # c.BeakerSessionManager.database_filepath = ':memory:' #------------------------------------------------------------------------------ @@ -662,7 +812,7 @@ ## Mixin for configurable classes that work with connection files ## JSON file in which to store connection info [default: kernel-.json] -# +# # This file will contain the IP, ports, and authentication key needed to connect # clients to this kernel. By default, this file will be created in the security dir # of the current profile, but can be specified by absolute path. @@ -700,36 +850,12 @@ # Default: 'tcp' # c.ConnectionFileMixin.transport = 'tcp' -#------------------------------------------------------------------------------ -# KernelSpecManager(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## A manager for kernel specs. - -## List of allowed kernel names. -# -# By default, all installed kernels are allowed. -# Default: set() -# c.KernelSpecManager.allowed_kernelspecs = set() - -## If there is no Python kernelspec registered and the IPython -# kernel is available, ensure it is added to the spec list. -# Default: True -# c.KernelSpecManager.ensure_native_kernel = True - -## The kernel spec class. This is configurable to allow -# subclassing of the KernelSpecManager for customized behavior. -# Default: 'jupyter_client.kernelspec.KernelSpec' -# c.KernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' - -## Deprecated, use `KernelSpecManager.allowed_kernelspecs` -# Default: set() -# c.KernelSpecManager.whitelist = set() #------------------------------------------------------------------------------ # KernelManager(ConnectionFileMixin) configuration #------------------------------------------------------------------------------ ## Manages a single kernel in a subprocess on this host. -# +# # This version starts kernels with Popen. ## Should we autorestart the kernel if it dies. @@ -741,30 +867,6 @@ # Default: False # c.KernelManager.cache_ports = False -## JSON file in which to store connection info [default: kernel-.json] -# See also: ConnectionFileMixin.connection_file -# c.KernelManager.connection_file = '' - -## set the control (ROUTER) port [default: random] -# See also: ConnectionFileMixin.control_port -# c.KernelManager.control_port = 0 - -## set the heartbeat port [default: random] -# See also: ConnectionFileMixin.hb_port -# c.KernelManager.hb_port = 0 - -## set the iopub (PUB) port [default: random] -# See also: ConnectionFileMixin.iopub_port -# c.KernelManager.iopub_port = 0 - -## Set the kernel's IP address [default localhost]. -# See also: ConnectionFileMixin.ip -# c.KernelManager.ip = '' - -## set the shell (ROUTER) port [default: random] -# See also: ConnectionFileMixin.shell_port -# c.KernelManager.shell_port = 0 - ## Time to wait for a kernel to terminate before killing it, in seconds. When a # shutdown request is initiated, the kernel will be immediately sent an # interrupt (SIGINT), followedby a shutdown_request message, after 1/2 of @@ -776,77 +878,49 @@ # Default: 5.0 # c.KernelManager.shutdown_wait_time = 5.0 -## set the stdin (ROUTER) port [default: random] -# See also: ConnectionFileMixin.stdin_port -# c.KernelManager.stdin_port = 0 - -# See also: ConnectionFileMixin.transport -# c.KernelManager.transport = 'tcp' - #------------------------------------------------------------------------------ # AsyncMultiKernelManager(MultiKernelManager) configuration #------------------------------------------------------------------------------ -## The name of the default kernel to start -# See also: MultiKernelManager.default_kernel_name -# c.AsyncMultiKernelManager.default_kernel_name = 'python3' - ## The kernel manager class. This is configurable to allow # subclassing of the AsyncKernelManager for customized behavior. # Default: 'jupyter_client.ioloop.AsyncIOLoopKernelManager' # c.AsyncMultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' -## Share a single zmq.Context to talk to all my kernels -# See also: MultiKernelManager.shared_context -# c.AsyncMultiKernelManager.shared_context = True - -## Whether to make kernels available before the process has started. The -# kernel has a `.ready` future which can be awaited before connecting -# Default: False -# c.AsyncMultiKernelManager.use_pending_kernels = False - #------------------------------------------------------------------------------ # MultiKernelManager(LoggingConfigurable) configuration #------------------------------------------------------------------------------ ## A class for managing multiple kernels. -## The name of the default kernel to start -# Default: 'python3' -# c.MultiKernelManager.default_kernel_name = 'python3' - ## The kernel manager class. This is configurable to allow # subclassing of the KernelManager for customized behavior. # Default: 'jupyter_client.ioloop.IOLoopKernelManager' # c.MultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' -## Share a single zmq.Context to talk to all my kernels -# Default: True -# c.MultiKernelManager.shared_context = True - #------------------------------------------------------------------------------ # Session(Configurable) configuration #------------------------------------------------------------------------------ ## Object for handling serialization and sending of messages. -# +# # The Session object handles building messages and sending them with ZMQ sockets # or ZMQStream objects. Objects can communicate with each other over the # network via Session objects, and only need to work with the dict-based IPython # message spec. The Session will handle serialization/deserialization, security, # and metadata. -# +# # Sessions support configurable serialization via packer/unpacker traits, and # signing with HMAC digests via the key/keyfile traits. -# +# # Parameters ---------- -# +# # debug : bool # whether to trigger extra debugging statements # packer/unpacker : str : 'json', 'pickle' or import_string # importstrings for methods to serialize message parts. If just # 'json' or 'pickle', predefined JSON and pickle packers will be used. # Otherwise, the entire importstring must be used. -# +# # The functions must accept at least valid JSON input, and output *bytes*. -# +# # For example, to use msgpack: # packer = 'msgpack.packb', unpacker='msgpack.unpackb' # pack/unpack : callables @@ -868,7 +942,7 @@ # c.Session.buffer_threshold = 1024 ## Whether to check PID to protect against calls after fork. -# +# # This check can be disabled if fork-safety is handled elsewhere. # Default: True # c.Session.check_pid = True @@ -882,7 +956,7 @@ # c.Session.debug = False ## The maximum number of digests to remember. -# +# # The digest history will be culled when it exceeds this value. # Default: 65536 # c.Session.digest_history_size = 65536 @@ -934,120 +1008,25 @@ #------------------------------------------------------------------------------ ## Base class for Jupyter applications -## Answer yes to any prompts. -# Default: False -# c.JupyterApp.answer_yes = False - -## Full path of a config file. -# Default: '' -# c.JupyterApp.config_file = '' - ## Specify a config file to load. # Default: '' # c.JupyterApp.config_file_name = '' -## Generate default config file. -# Default: False -# c.JupyterApp.generate_config = False - -## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt -# c.JupyterApp.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# See also: Application.log_format -# c.JupyterApp.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# See also: Application.log_level -# c.JupyterApp.log_level = 30 - -## -# See also: Application.logging_config -# c.JupyterApp.logging_config = {} - -## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config -# c.JupyterApp.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json -# c.JupyterApp.show_config_json = False - #------------------------------------------------------------------------------ # EventLogger(LoggingConfigurable) configuration #------------------------------------------------------------------------------ ## An Event logger for emitting structured events. -# +# # Event schemas must be registered with the EventLogger using the # `register_schema` or `register_schema_file` methods. Every schema will be # validated against Jupyter Event's metaschema. ## A list of logging.Handler instances to send events to. -# +# # When set to None (the default), all events are discarded. # Default: None # c.EventLogger.handlers = None -#------------------------------------------------------------------------------ -# IdentityProvider(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## Interface for providing identity management and authentication. -# -# Two principle methods: -# -# - :meth:`~jupyter_server.auth.IdentityProvider.get_user` returns a :class:`~.User` object -# for successful authentication, or None for no-identity-found. -# - :meth:`~jupyter_server.auth.IdentityProvider.identity_model` turns a :class:`~jupyter_server.auth.User` into a JSONable dict. -# The default is to use :py:meth:`dataclasses.asdict`, -# and usually shouldn't need override. -# -# Additional methods can customize authentication. -# -# .. versionadded:: 2.0 - -## Name of the cookie to set for persisting login. Default: username-${Host}. -# Default: '' -# c.IdentityProvider.cookie_name = '' - -## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's -# set_secure_cookie docs for details. -# Default: {} -# c.IdentityProvider.cookie_options = {} - -## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's -# get_secure_cookie docs for details. -# Default: {} -# c.IdentityProvider.get_secure_cookie_kwargs = {} - -## The login handler class to use, if any. -# Default: 'jupyter_server.auth.login.LoginFormHandler' -# c.IdentityProvider.login_handler_class = 'jupyter_server.auth.login.LoginFormHandler' - -## The logout handler class to use. -# Default: 'jupyter_server.auth.logout.LogoutHandler' -# c.IdentityProvider.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' - -## Specify whether login cookie should have the `secure` property (HTTPS- -# only).Only needed when protocol-detection gives the wrong answer due to -# proxies. -# Default: None -# c.IdentityProvider.secure_cookie = None - -## Token used for authenticating first-time connections to the server. -# -# The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set directly -# with the JUPYTER_TOKEN environment variable. -# -# When no password is enabled, -# the default is to generate a new, random token. -# -# Setting to an empty string disables authentication altogether, which -# is NOT RECOMMENDED. -# -# Prior to 2.0: configured as ServerApp.token -# Default: '' -# c.IdentityProvider.token = '' #------------------------------------------------------------------------------ # GatewayWebSocketConnection(BaseKernelWebsocketConnection) configuration @@ -1057,7 +1036,7 @@ # Default: '' # c.GatewayWebSocketConnection.kernel_ws_protocol = '' -# See also: BaseKernelWebsocketConnection.session +# Default: None # c.GatewayWebSocketConnection.session = None #------------------------------------------------------------------------------ @@ -1083,13 +1062,13 @@ ## The authorization header's key name (typically 'Authorization') used in the # HTTP headers. The header will be formatted as:: -# +# # {'{auth_header_key}': '{auth_scheme} {auth_token}'} -# +# # If the authorization header key takes a single value, `auth_scheme` should be # set to None and 'auth_token' should be configured to use the appropriate # value. -# +# # (JUPYTER_GATEWAY_AUTH_HEADER_KEY env var) # Default: '' # c.GatewayClient.auth_header_key = '' @@ -1101,9 +1080,9 @@ ## The authorization token used in the HTTP headers. The header will be formatted # as:: -# +# # {'{auth_header_key}': '{auth_scheme} {auth_token}'} -# +# # (JUPYTER_GATEWAY_AUTH_TOKEN env var) # Default: None # c.GatewayClient.auth_token = None @@ -1127,10 +1106,6 @@ # Default: 40.0 # c.GatewayClient.connect_timeout = 40.0 -## Deprecated, use `GatewayClient.allowed_envs` -# Default: '' -# c.GatewayClient.env_whitelist = '' - # Default: None # c.GatewayClient.event_logger = None @@ -1213,26 +1188,6 @@ # Default: None # c.GatewayClient.ws_url = None -#------------------------------------------------------------------------------ -# GatewayKernelSpecManager(KernelSpecManager) configuration -#------------------------------------------------------------------------------ -## A gateway kernel spec manager. - -## List of allowed kernel names. -# See also: KernelSpecManager.allowed_kernelspecs -# c.GatewayKernelSpecManager.allowed_kernelspecs = set() - -## If there is no Python kernelspec registered and the IPython -# See also: KernelSpecManager.ensure_native_kernel -# c.GatewayKernelSpecManager.ensure_native_kernel = True - -## The kernel spec class. This is configurable to allow -# See also: KernelSpecManager.kernel_spec_class -# c.GatewayKernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' - -## Deprecated, use `KernelSpecManager.allowed_kernelspecs` -# See also: KernelSpecManager.whitelist -# c.GatewayKernelSpecManager.whitelist = set() #------------------------------------------------------------------------------ # GatewayMappingKernelManager(AsyncMappingKernelManager) configuration @@ -1240,253 +1195,38 @@ ## Kernel manager that supports remote kernels hosted by Jupyter Kernel or # Enterprise Gateway. -## Whether to send tracebacks to clients on exceptions. -# See also: MappingKernelManager.allow_tracebacks -# c.GatewayMappingKernelManager.allow_tracebacks = True - -## White list of allowed kernel message types. -# See also: MappingKernelManager.allowed_message_types -# c.GatewayMappingKernelManager.allowed_message_types = [] - -## Whether messages from kernels whose frontends have disconnected should be -# buffered in-memory. -# See also: MappingKernelManager.buffer_offline_messages -# c.GatewayMappingKernelManager.buffer_offline_messages = True - -## Whether to consider culling kernels which are busy. -# See also: MappingKernelManager.cull_busy -# c.GatewayMappingKernelManager.cull_busy = False - -## Whether to consider culling kernels which have one or more connections. -# See also: MappingKernelManager.cull_connected -# c.GatewayMappingKernelManager.cull_connected = False - -## Timeout (in seconds) after which a kernel is considered idle and ready to be -# culled. -# See also: MappingKernelManager.cull_idle_timeout +## Timeout (in seconds) after which a kernel is considered idle and ready to be culled. +# Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled +# for users with poor network connections. +# Default: 0 # c.GatewayMappingKernelManager.cull_idle_timeout = 0 -## The interval (in seconds) on which to check for idle kernels exceeding the -# cull timeout value. -# See also: MappingKernelManager.cull_interval -# c.GatewayMappingKernelManager.cull_interval = 300 - -## The name of the default kernel to start -# See also: MultiKernelManager.default_kernel_name -# c.GatewayMappingKernelManager.default_kernel_name = 'python3' - -## Timeout for giving up on a kernel (in seconds). -# See also: MappingKernelManager.kernel_info_timeout -# c.GatewayMappingKernelManager.kernel_info_timeout = 60 - -## The kernel manager class. This is configurable to allow -# See also: AsyncMultiKernelManager.kernel_manager_class -# c.GatewayMappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' - -# See also: MappingKernelManager.root_dir -# c.GatewayMappingKernelManager.root_dir = '' - -## Share a single zmq.Context to talk to all my kernels -# See also: MultiKernelManager.shared_context -# c.GatewayMappingKernelManager.shared_context = True - -## Message to print when allow_tracebacks is False, and an exception occurs -# See also: MappingKernelManager.traceback_replacement_message -# c.GatewayMappingKernelManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' - -## List of kernel message types excluded from user activity tracking. -# See also: MappingKernelManager.untracked_message_types -# c.GatewayMappingKernelManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] - -## Whether to make kernels available before the process has started. The -# See also: AsyncMultiKernelManager.use_pending_kernels -# c.GatewayMappingKernelManager.use_pending_kernels = False - -#------------------------------------------------------------------------------ -# GatewaySessionManager(SessionManager) configuration -#------------------------------------------------------------------------------ -## A gateway session manager. - -## The filesystem path to SQLite Database file (e.g. -# /path/to/session_database.db). By default, the session database is stored in- -# memory (i.e. `:memory:` setting from sqlite3) and does not persist when the -# current Jupyter Server shuts down. -# See also: SessionManager.database_filepath -# c.GatewaySessionManager.database_filepath = ':memory:' #------------------------------------------------------------------------------ # ServerApp(JupyterApp) configuration #------------------------------------------------------------------------------ ## The Jupyter Server application class. -## Set the Access-Control-Allow-Credentials: true header -# Default: False -# c.ServerApp.allow_credentials = False - -## Whether or not to allow external kernels, whose connection files are placed in -# external_connection_dir. -# Default: False -# c.ServerApp.allow_external_kernels = False - ## Set the Access-Control-Allow-Origin header -# +# # Use '*' to allow any origin to access your server. -# +# # Takes precedence over allow_origin_pat. # Default: '' # c.ServerApp.allow_origin = '' -## Use a regular expression for the Access-Control-Allow-Origin header -# -# Requests from an origin matching the expression will get replies with: -# -# Access-Control-Allow-Origin: origin -# -# where `origin` is the origin of the request. -# -# Ignored if allow_origin is set. -# Default: '' -# c.ServerApp.allow_origin_pat = '' - -## DEPRECATED in 2.0. Use PasswordIdentityProvider.allow_password_change -# Default: True -# c.ServerApp.allow_password_change = True - -## Allow requests where the Host header doesn't point to a local server -# -# By default, requests get a 403 forbidden response if the 'Host' header -# shows that the browser thinks it's on a non-local domain. -# Setting this option to True disables this check. -# -# This protects against 'DNS rebinding' attacks, where a remote web server -# serves you a page and then changes its DNS to send later requests to a -# local IP, bypassing same-origin checks. -# -# Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, -# along with hostnames configured in local_hostnames. -# Default: False -# c.ServerApp.allow_remote_access = False - -## Whether to allow the user to run the server as root. -# Default: False -# c.ServerApp.allow_root = False - -## Allow unauthenticated access to endpoints without authentication rule. -# -# When set to `True` (default in jupyter-server 2.0, subject to change -# in the future), any request to an endpoint without an authentication rule -# (either `@tornado.web.authenticated`, or `@allow_unauthenticated`) -# will be permitted, regardless of whether user has logged in or not. -# -# When set to `False`, logging in will be required for access to each endpoint, -# excluding the endpoints marked with `@allow_unauthenticated` decorator. -# -# This option can be configured using `JUPYTER_SERVER_ALLOW_UNAUTHENTICATED_ACCESS` -# environment variable: any non-empty value other than "true" and "yes" will -# prevent unauthenticated access to endpoints without `@allow_unauthenticated`. -# Default: True -# c.ServerApp.allow_unauthenticated_access = True - -## Answer yes to any prompts. -# See also: JupyterApp.answer_yes -# c.ServerApp.answer_yes = False - -## " -# Require authentication to access prometheus metrics. -# Default: True -# c.ServerApp.authenticate_prometheus = True - -## The authorizer class to use. -# Default: 'jupyter_server.auth.authorizer.AllowAllAuthorizer' -# c.ServerApp.authorizer_class = 'jupyter_server.auth.authorizer.AllowAllAuthorizer' - -## Reload the webapp when changes are made to any Python src files. -# Default: False -# c.ServerApp.autoreload = False - -## The base URL for the Jupyter server. -# -# Leading and trailing slashes can be omitted, -# and will automatically be added. -# Default: '/' -# c.ServerApp.base_url = '/' - -## Specify what command to use to invoke a web -# browser when starting the server. If not specified, the -# default browser will be determined by the `webbrowser` -# standard library module, which allows setting of the -# BROWSER environment variable to override it. -# Default: '' -# c.ServerApp.browser = '' - -## The full path to an SSL/TLS certificate file. -# Default: '' -# c.ServerApp.certfile = '' - -## The full path to a certificate authority certificate for SSL/TLS client -# authentication. -# Default: '' -# c.ServerApp.client_ca = '' - -## Full path of a config file. -# See also: JupyterApp.config_file -# c.ServerApp.config_file = '' - -## Specify a config file to load. -# See also: JupyterApp.config_file_name -# c.ServerApp.config_file_name = '' - -## The config manager class to use -# Default: 'jupyter_server.services.config.manager.ConfigManager' -# c.ServerApp.config_manager_class = 'jupyter_server.services.config.manager.ConfigManager' - -## The content manager class to use. -# Default: 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' -# c.ServerApp.contents_manager_class = 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' - -## DEPRECATED. Use IdentityProvider.cookie_options -# Default: {} -# c.ServerApp.cookie_options = {} - -## The random bytes used to secure cookies. -# By default this is generated on first start of the server and persisted across server -# sessions by writing the cookie secret into the `cookie_secret_file` file. -# When using an executable config file you can override this to be random at each server restart. -# -# Note: Cookie secrets should be kept private, do not share config files with -# cookie_secret stored in plaintext (you can read the value from a file). -# Default: b'' -# c.ServerApp.cookie_secret = b'' - -## The file where the cookie secret is stored. -# Default: '' -# c.ServerApp.cookie_secret_file = '' - -## Override URL shown to users. -# -# Replace actual URL, including protocol, address, port and base URL, -# with the given value when displaying URL to the users. Do not change -# the actual connection URL. If authentication token is enabled, the -# token is added to the custom URL automatically. -# -# This option is intended to be used when the URL to display to the user -# cannot be determined reliably by the Jupyter server (proxified -# or containerized setups for example). -# Default: '' -# c.ServerApp.custom_display_url = '' - -## The default URL to redirect to from `/` -# Default: '/' -# c.ServerApp.default_url = '/' +## The content manager class to use. +# Default: 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' +# c.ServerApp.contents_manager_class = 'jupyter_server.services.contents.largefilemanager.AsyncLargeFileManager' ## Disable cross-site-request-forgery protection -# +# # Jupyter server includes protection from cross-site request forgeries, # requiring API requests to either: -# +# # - originate from pages served by this server (validated with XSRF cookie and token), or # - authenticate with a token -# +# # Some anonymous compute resources still desire the ability to run code, # completely without authentication. # These services can disable all authentication and security checks, @@ -1494,819 +1234,69 @@ # Default: False # c.ServerApp.disable_check_xsrf = False -## The directory to look at for external kernel connection files, if -# allow_external_kernels is True. Defaults to Jupyter -# runtime_dir/external_kernels. Make sure that this directory is not filled with -# left-over connection files, that could result in unnecessary kernel manager -# creations. -# Default: None -# c.ServerApp.external_connection_dir = None - -## handlers that should be loaded at higher priority than the default services -# Default: [] -# c.ServerApp.extra_services = [] - -## Extra paths to search for serving static files. -# -# This allows adding javascript/css to be available from the Jupyter server machine, -# or overriding individual files in the IPython -# Default: [] -# c.ServerApp.extra_static_paths = [] - -## Extra paths to search for serving jinja templates. -# -# Can be used to override templates from jupyter_server.templates. -# Default: [] -# c.ServerApp.extra_template_paths = [] - -## Open the named file when the application is launched. -# Default: '' -# c.ServerApp.file_to_run = '' - -## The URL prefix where files are opened directly. -# Default: 'notebooks' -# c.ServerApp.file_url_prefix = 'notebooks' - -## Generate default config file. -# See also: JupyterApp.generate_config -# c.ServerApp.generate_config = False - -## DEPRECATED. Use IdentityProvider.get_secure_cookie_kwargs -# Default: {} -# c.ServerApp.get_secure_cookie_kwargs = {} - -## The identity provider class to use. -# Default: 'jupyter_server.auth.identity.PasswordIdentityProvider' -# c.ServerApp.identity_provider_class = 'jupyter_server.auth.identity.PasswordIdentityProvider' - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_data_rate_limit -# Default: 0.0 -# c.ServerApp.iopub_data_rate_limit = 0.0 - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.iopub_msg_rate_limit -# Default: 0.0 -# c.ServerApp.iopub_msg_rate_limit = 0.0 - -## The IP address the Jupyter server will listen on. -# Default: 'localhost' -# c.ServerApp.ip = 'localhost' - -## Supply extra arguments that will be passed to Jinja environment. -# Default: {} -# c.ServerApp.jinja_environment_options = {} - -## Extra variables to supply to jinja templates when rendering. -# Default: {} -# c.ServerApp.jinja_template_vars = {} - -## Dict of Python modules to load as Jupyter server extensions.Entry values can -# be used to enable and disable the loading ofthe extensions. The extensions -# will be loaded in alphabetical order. -# Default: {} -# c.ServerApp.jpserver_extensions = {} - ## The kernel manager class to use. # Default: 'jupyter_server.services.kernels.kernelmanager.MappingKernelManager' # c.ServerApp.kernel_manager_class = 'jupyter_server.services.kernels.kernelmanager.MappingKernelManager' ## The kernel spec manager class to use. Should be a subclass of # `jupyter_client.kernelspec.KernelSpecManager`. -# +# # The Api of KernelSpecManager is provisional and might change without warning # between this version of Jupyter and the next stable one. # Default: 'builtins.object' # c.ServerApp.kernel_spec_manager_class = 'builtins.object' -## The kernel websocket connection class to use. -# Default: 'jupyter_server.services.kernels.connection.base.BaseKernelWebsocketConnection' -# c.ServerApp.kernel_websocket_connection_class = 'jupyter_server.services.kernels.connection.base.BaseKernelWebsocketConnection' - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.kernel_ws_protocol -# Default: '' -# c.ServerApp.kernel_ws_protocol = '' - -## The full path to a private key file for usage with SSL/TLS. -# Default: '' -# c.ServerApp.keyfile = '' - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.limit_rate -# Default: False -# c.ServerApp.limit_rate = False - -## Hostnames to allow as local when allow_remote_access is False. -# -# Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted -# as local as well. -# Default: ['localhost'] -# c.ServerApp.local_hostnames = ['localhost'] - -## The date format used by logging formatters for %(asctime)s -# See also: Application.log_datefmt -# c.ServerApp.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# See also: Application.log_format -# c.ServerApp.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# See also: Application.log_level -# c.ServerApp.log_level = 30 - -## -# See also: Application.logging_config -# c.ServerApp.logging_config = {} - -## The login handler class to use. -# Default: 'jupyter_server.auth.login.LegacyLoginHandler' -# c.ServerApp.login_handler_class = 'jupyter_server.auth.login.LegacyLoginHandler' - -## The logout handler class to use. -# Default: 'jupyter_server.auth.logout.LogoutHandler' -# c.ServerApp.logout_handler_class = 'jupyter_server.auth.logout.LogoutHandler' - -## Sets the maximum allowed size of the client request body, specified in the -# Content-Length request header field. If the size in a request exceeds the -# configured value, a malformed HTTP message is returned to the client. -# -# Note: max_body_size is applied even in streaming mode. -# Default: 536870912 -# c.ServerApp.max_body_size = 536870912 - -## Gets or sets the maximum amount of memory, in bytes, that is allocated for use -# by the buffer manager. -# Default: 536870912 -# c.ServerApp.max_buffer_size = 536870912 - -## Gets or sets a lower bound on the open file handles process resource limit. -# This may need to be increased if you run into an OSError: [Errno 24] Too many -# open files. This is not applicable when running on Windows. -# Default: 0 -# c.ServerApp.min_open_files_limit = 0 - -## DEPRECATED, use root_dir. -# Default: '' -# c.ServerApp.notebook_dir = '' - -## Whether to open in a browser after starting. -# The specific browser used is platform dependent and -# determined by the python standard library `webbrowser` -# module, unless it is overridden using the --browser -# (ServerApp.browser) configuration option. -# Default: False -# c.ServerApp.open_browser = False - -## DEPRECATED in 2.0. Use PasswordIdentityProvider.hashed_password -# Default: '' -# c.ServerApp.password = '' - -## DEPRECATED in 2.0. Use PasswordIdentityProvider.password_required -# Default: False -# c.ServerApp.password_required = False - -## The port the server will listen on (env: JUPYTER_PORT). -# Default: 0 -# c.ServerApp.port = 0 - -## The number of additional ports to try if the specified port is not available -# (env: JUPYTER_PORT_RETRIES). -# Default: 50 -# c.ServerApp.port_retries = 50 - -## Preferred starting directory to use for notebooks and kernels. -# ServerApp.preferred_dir is deprecated in jupyter-server 2.0. Use -# FileContentsManager.preferred_dir instead -# Default: '' -# c.ServerApp.preferred_dir = '' - -## DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. -# Default: 'disabled' -# c.ServerApp.pylab = 'disabled' - -## If True, display controls to shut down the Jupyter server, such as menu items -# or buttons. -# Default: True -# c.ServerApp.quit_button = True - -## DEPRECATED. Use ZMQChannelsWebsocketConnection.rate_limit_window -# Default: 0.0 -# c.ServerApp.rate_limit_window = 0.0 - -## Reraise exceptions encountered loading server extensions? -# Default: False -# c.ServerApp.reraise_server_extension_failures = False - -## The directory to use for notebooks and kernels. -# Default: '' -# c.ServerApp.root_dir = '' - -## The session manager class to use. -# Default: 'builtins.object' -# c.ServerApp.session_manager_class = 'builtins.object' - -## Instead of starting the Application, dump configuration to stdout -# See also: Application.show_config -# c.ServerApp.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# See also: Application.show_config_json -# c.ServerApp.show_config_json = False - -## Shut down the server after N seconds with no kernelsrunning and no activity. -# This can be used together with culling idle kernels -# (MappingKernelManager.cull_idle_timeout) to shutdown the Jupyter server when -# it's not in use. This is not precisely timed: it may shut down up to a minute -# later. 0 (the default) disables this automatic shutdown. -# Default: 0 -# c.ServerApp.shutdown_no_activity_timeout = 0 - -## The UNIX socket the Jupyter server will listen on. -# Default: '' -# c.ServerApp.sock = '' - -## The permissions mode for UNIX socket creation (default: 0600). -# Default: '0600' -# c.ServerApp.sock_mode = '0600' - -## Supply SSL options for the tornado HTTPServer. -# See the tornado docs for details. -# Default: {} -# c.ServerApp.ssl_options = {} - -## Paths to set up static files as immutable. -# -# This allow setting up the cache control of static files as immutable. It -# should be used for static file named with a hash for instance. -# Default: [] -# c.ServerApp.static_immutable_cache = [] - -## Supply overrides for terminado. Currently only supports "shell_command". -# Default: {} -# c.ServerApp.terminado_settings = {} - -## Set to False to disable terminals. -# -# This does *not* make the server more secure by itself. -# Anything the user can in a terminal, they can also do in a notebook. -# -# Terminals may also be automatically disabled if the terminado package -# is not available. -# Default: False -# c.ServerApp.terminals_enabled = False - -## DEPRECATED. Use IdentityProvider.token -# Default: '' -# c.ServerApp.token = '' - -## Supply overrides for the tornado.web.Application that the Jupyter server uses. -# Default: {} -# c.ServerApp.tornado_settings = {} - -## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- -# For headerssent by the upstream reverse proxy. Necessary if the proxy handles -# SSL -# Default: False -# c.ServerApp.trust_xheaders = False - -## Disable launching browser by redirect file -# For versions of notebook > 5.7.2, a security feature measure was added that -# prevented the authentication token used to launch the browser from being visible. -# This feature makes it difficult for other users on a multi-user system from -# running code in your Jupyter session as you. -# However, some environments (like Windows Subsystem for Linux (WSL) and Chromebooks), -# launching a browser using a redirect file can lead the browser failing to load. -# This is because of the difference in file structures/paths between the runtime and -# the browser. -# -# Disabling this setting to False will disable this behavior, allowing the browser -# to launch by using a URL and visible token (as before). -# Default: True -# c.ServerApp.use_redirect_file = True - -## Specify where to open the server on startup. This is the -# `new` argument passed to the standard library method `webbrowser.open`. -# The behaviour is not guaranteed, but depends on browser support. Valid -# values are: -# -# - 2 opens a new tab, -# - 1 opens a new window, -# - 0 opens in an existing window. -# -# See the `webbrowser.open` documentation for details. -# Default: 2 -# c.ServerApp.webbrowser_open_new = 2 - -## Set the tornado compression options for websocket connections. -# -# This value will be returned from -# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable -# compression. A dict (even an empty one) will enable compression. -# -# See the tornado docs for WebSocketHandler.get_compression_options for details. -# Default: None -# c.ServerApp.websocket_compression_options = None - -## Configure the websocket ping interval in seconds. -# -# Websockets are long-lived connections that are used by some Jupyter Server -# extensions. -# -# Periodic pings help to detect disconnected clients and keep the connection -# active. If this is set to None, then no pings will be performed. -# -# When a ping is sent, the client has ``websocket_ping_timeout`` seconds to -# respond. If no response is received within this period, the connection will be -# closed from the server side. -# Default: 0 -# c.ServerApp.websocket_ping_interval = 0 - -## Configure the websocket ping timeout in seconds. -# -# See ``websocket_ping_interval`` for details. -# Default: 0 -# c.ServerApp.websocket_ping_timeout = 0 - -## The base URL for websockets, -# if it differs from the HTTP server (hint: it almost certainly doesn't). -# -# Should be in the form of an HTTP origin: ws[s]://hostname[:port] -# Default: '' -# c.ServerApp.websocket_url = '' - -#------------------------------------------------------------------------------ -# ConfigManager(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## Config Manager used for storing frontend config - -## Name of the config directory. -# Default: 'serverconfig' -# c.ConfigManager.config_dir_name = 'serverconfig' - -#------------------------------------------------------------------------------ -# AsyncFileManagerMixin(FileManagerMixin) configuration -#------------------------------------------------------------------------------ -## Mixin for ContentsAPI classes that interact with the filesystem -# asynchronously. - -## Hash algorithm to use for file content, support by hashlib -# See also: FileManagerMixin.hash_algorithm -# c.AsyncFileManagerMixin.hash_algorithm = 'sha256' - -## By default notebooks are saved on disk on a temporary file and then if -# successfully written, it replaces the old ones. -# See also: FileManagerMixin.use_atomic_writing -# c.AsyncFileManagerMixin.use_atomic_writing = True - -#------------------------------------------------------------------------------ -# FileManagerMixin(LoggingConfigurable, Configurable) configuration -#------------------------------------------------------------------------------ -## Mixin for ContentsAPI classes that interact with the filesystem. -# -# Provides facilities for reading, writing, and copying files. -# -# Shared by FileContentsManager and FileCheckpoints. -# -# Note ---- Classes using this mixin must provide the following attributes: -# -# root_dir : unicode -# A directory against against which API-style paths are to be resolved. -# -# log : logging.Logger - -## Hash algorithm to use for file content, support by hashlib -# Choices: any of ['sm3', 'sha256', 'shake_128', 'blake2b', 'sha512', 'shake_256', 'sha3_512', 'md5-sha1', 'sha384', 'sha224', 'sha512_224', 'sha3_384', 'md5', 'sha1', 'blake2s', 'sha3_256', 'ripemd160', 'sha512_256', 'sha3_224'] -# Default: 'sha256' -# c.FileManagerMixin.hash_algorithm = 'sha256' - -## By default notebooks are saved on disk on a temporary file and then if successfully written, it replaces the old ones. -# This procedure, namely 'atomic_writing', causes some bugs on file system without operation order enforcement (like some networked fs). -# If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota ) -# Default: True -# c.FileManagerMixin.use_atomic_writing = True - -#------------------------------------------------------------------------------ -# AsyncFileContentsManager(FileContentsManager, AsyncFileManagerMixin, AsyncContentsManager) configuration -#------------------------------------------------------------------------------ -## An async file contents manager. - -## Allow access to hidden files -# See also: ContentsManager.allow_hidden -# c.AsyncFileContentsManager.allow_hidden = False - -## If True, deleting a non-empty directory will always be allowed. -# See also: FileContentsManager.always_delete_dir -# c.AsyncFileContentsManager.always_delete_dir = False - -# See also: AsyncContentsManager.checkpoints -# c.AsyncFileContentsManager.checkpoints = None - -# See also: AsyncContentsManager.checkpoints_class -# c.AsyncFileContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' - -# See also: AsyncContentsManager.checkpoints_kwargs -# c.AsyncFileContentsManager.checkpoints_kwargs = {} - -## If True (default), deleting files will send them to the -# See also: FileContentsManager.delete_to_trash -# c.AsyncFileContentsManager.delete_to_trash = True - -# See also: ContentsManager.event_logger -# c.AsyncFileContentsManager.event_logger = None - -## handler class to use when serving raw file requests. -# See also: ContentsManager.files_handler_class -# c.AsyncFileContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' - -## Extra parameters to pass to files_handler_class. -# See also: ContentsManager.files_handler_params -# c.AsyncFileContentsManager.files_handler_params = {} - -## Hash algorithm to use for file content, support by hashlib -# See also: FileManagerMixin.hash_algorithm -# c.AsyncFileContentsManager.hash_algorithm = 'sha256' - -## -# See also: ContentsManager.hide_globs -# c.AsyncFileContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] - -## The max folder size that can be copied -# See also: FileContentsManager.max_copy_folder_size_mb -# c.AsyncFileContentsManager.max_copy_folder_size_mb = 500 - -## Python callable or importstring thereof -# See also: ContentsManager.post_save_hook -# c.AsyncFileContentsManager.post_save_hook = None - -## Python callable or importstring thereof -# See also: ContentsManager.pre_save_hook -# c.AsyncFileContentsManager.pre_save_hook = None - -## Preferred starting directory to use for notebooks. This is an API path (`/` -# separated, relative to root dir) -# See also: ContentsManager.preferred_dir -# c.AsyncFileContentsManager.preferred_dir = '' - -# See also: FileContentsManager.root_dir -# c.AsyncFileContentsManager.root_dir = '' - -## The base name used when creating untitled directories. -# See also: ContentsManager.untitled_directory -# c.AsyncFileContentsManager.untitled_directory = 'Untitled Folder' - -## The base name used when creating untitled files. -# See also: ContentsManager.untitled_file -# c.AsyncFileContentsManager.untitled_file = 'untitled' - -## The base name used when creating untitled notebooks. -# See also: ContentsManager.untitled_notebook -# c.AsyncFileContentsManager.untitled_notebook = 'Untitled' - -## By default notebooks are saved on disk on a temporary file and then if -# successfully written, it replaces the old ones. -# See also: FileManagerMixin.use_atomic_writing -# c.AsyncFileContentsManager.use_atomic_writing = True - -#------------------------------------------------------------------------------ -# FileContentsManager(FileManagerMixin, ContentsManager) configuration -#------------------------------------------------------------------------------ -## A file contents manager. - -## Allow access to hidden files -# See also: ContentsManager.allow_hidden -# c.FileContentsManager.allow_hidden = False - -## If True, deleting a non-empty directory will always be allowed. -# WARNING this may result in files being permanently removed; e.g. on Windows, -# if the data size is too big for the trash/recycle bin the directory will be permanently -# deleted. If False (default), the non-empty directory will be sent to the trash only -# if safe. And if ``delete_to_trash`` is True, the directory won't be deleted. -# Default: False -# c.FileContentsManager.always_delete_dir = False - -# See also: ContentsManager.checkpoints -# c.FileContentsManager.checkpoints = None - -# See also: ContentsManager.checkpoints_class -# c.FileContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.Checkpoints' - -# See also: ContentsManager.checkpoints_kwargs -# c.FileContentsManager.checkpoints_kwargs = {} - -## If True (default), deleting files will send them to the -# platform's trash/recycle bin, where they can be recovered. If False, -# deleting files really deletes them. -# Default: True -# c.FileContentsManager.delete_to_trash = True - -# See also: ContentsManager.event_logger -# c.FileContentsManager.event_logger = None - -## handler class to use when serving raw file requests. -# See also: ContentsManager.files_handler_class -# c.FileContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' - -## Extra parameters to pass to files_handler_class. -# See also: ContentsManager.files_handler_params -# c.FileContentsManager.files_handler_params = {} - -## Hash algorithm to use for file content, support by hashlib -# See also: FileManagerMixin.hash_algorithm -# c.FileContentsManager.hash_algorithm = 'sha256' - -## -# See also: ContentsManager.hide_globs -# c.FileContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] - -## The max folder size that can be copied -# Default: 500 -# c.FileContentsManager.max_copy_folder_size_mb = 500 - -## Python callable or importstring thereof -# See also: ContentsManager.post_save_hook -# c.FileContentsManager.post_save_hook = None - -## Python callable or importstring thereof -# See also: ContentsManager.pre_save_hook -# c.FileContentsManager.pre_save_hook = None - -## Preferred starting directory to use for notebooks. This is an API path (`/` -# separated, relative to root dir) -# See also: ContentsManager.preferred_dir -# c.FileContentsManager.preferred_dir = '' - -# Default: '' -# c.FileContentsManager.root_dir = '' - -## The base name used when creating untitled directories. -# See also: ContentsManager.untitled_directory -# c.FileContentsManager.untitled_directory = 'Untitled Folder' - -## The base name used when creating untitled files. -# See also: ContentsManager.untitled_file -# c.FileContentsManager.untitled_file = 'untitled' - -## The base name used when creating untitled notebooks. -# See also: ContentsManager.untitled_notebook -# c.FileContentsManager.untitled_notebook = 'Untitled' - -## By default notebooks are saved on disk on a temporary file and then if -# successfully written, it replaces the old ones. -# See also: FileManagerMixin.use_atomic_writing -# c.FileContentsManager.use_atomic_writing = True - -#------------------------------------------------------------------------------ -# AsyncLargeFileManager(AsyncFileContentsManager) configuration -#------------------------------------------------------------------------------ -## Handle large file upload asynchronously - -## Allow access to hidden files -# See also: ContentsManager.allow_hidden -# c.AsyncLargeFileManager.allow_hidden = False - -## If True, deleting a non-empty directory will always be allowed. -# See also: FileContentsManager.always_delete_dir -# c.AsyncLargeFileManager.always_delete_dir = False - -# See also: AsyncContentsManager.checkpoints -# c.AsyncLargeFileManager.checkpoints = None - -# See also: AsyncContentsManager.checkpoints_class -# c.AsyncLargeFileManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' - -# See also: AsyncContentsManager.checkpoints_kwargs -# c.AsyncLargeFileManager.checkpoints_kwargs = {} - -## If True (default), deleting files will send them to the -# See also: FileContentsManager.delete_to_trash -# c.AsyncLargeFileManager.delete_to_trash = True - -# See also: ContentsManager.event_logger -# c.AsyncLargeFileManager.event_logger = None - -## handler class to use when serving raw file requests. -# See also: ContentsManager.files_handler_class -# c.AsyncLargeFileManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' - -## Extra parameters to pass to files_handler_class. -# See also: ContentsManager.files_handler_params -# c.AsyncLargeFileManager.files_handler_params = {} - -## Hash algorithm to use for file content, support by hashlib -# See also: FileManagerMixin.hash_algorithm -# c.AsyncLargeFileManager.hash_algorithm = 'sha256' - -## -# See also: ContentsManager.hide_globs -# c.AsyncLargeFileManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] - -## The max folder size that can be copied -# See also: FileContentsManager.max_copy_folder_size_mb -# c.AsyncLargeFileManager.max_copy_folder_size_mb = 500 - -## Python callable or importstring thereof -# See also: ContentsManager.post_save_hook -# c.AsyncLargeFileManager.post_save_hook = None - -## Python callable or importstring thereof -# See also: ContentsManager.pre_save_hook -# c.AsyncLargeFileManager.pre_save_hook = None - -## Preferred starting directory to use for notebooks. This is an API path (`/` -# separated, relative to root dir) -# See also: ContentsManager.preferred_dir -# c.AsyncLargeFileManager.preferred_dir = '' - -# See also: FileContentsManager.root_dir -# c.AsyncLargeFileManager.root_dir = '' - -## The base name used when creating untitled directories. -# See also: ContentsManager.untitled_directory -# c.AsyncLargeFileManager.untitled_directory = 'Untitled Folder' - -## The base name used when creating untitled files. -# See also: ContentsManager.untitled_file -# c.AsyncLargeFileManager.untitled_file = 'untitled' - -## The base name used when creating untitled notebooks. -# See also: ContentsManager.untitled_notebook -# c.AsyncLargeFileManager.untitled_notebook = 'Untitled' - -## By default notebooks are saved on disk on a temporary file and then if -# successfully written, it replaces the old ones. -# See also: FileManagerMixin.use_atomic_writing -# c.AsyncLargeFileManager.use_atomic_writing = True - -#------------------------------------------------------------------------------ -# AsyncContentsManager(ContentsManager) configuration -#------------------------------------------------------------------------------ -## Base class for serving files and directories asynchronously. - -## Allow access to hidden files -# See also: ContentsManager.allow_hidden -# c.AsyncContentsManager.allow_hidden = False - -# Default: None -# c.AsyncContentsManager.checkpoints = None - -# Default: 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' -# c.AsyncContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.AsyncCheckpoints' - -# Default: {} -# c.AsyncContentsManager.checkpoints_kwargs = {} - -# See also: ContentsManager.event_logger -# c.AsyncContentsManager.event_logger = None - -## handler class to use when serving raw file requests. -# See also: ContentsManager.files_handler_class -# c.AsyncContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' - -## Extra parameters to pass to files_handler_class. -# See also: ContentsManager.files_handler_params -# c.AsyncContentsManager.files_handler_params = {} - -## -# See also: ContentsManager.hide_globs -# c.AsyncContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] +## Whether to open in a browser after starting. +# The specific browser used is platform dependent and +# determined by the python standard library `webbrowser` +# module, unless it is overridden using the --browser +# (ServerApp.browser) configuration option. +# Default: False +# c.ServerApp.open_browser = False -## Python callable or importstring thereof -# See also: ContentsManager.post_save_hook -# c.AsyncContentsManager.post_save_hook = None +## Reraise exceptions encountered loading server extensions? +# Default: False +# c.ServerApp.reraise_server_extension_failures = False -## Python callable or importstring thereof -# See also: ContentsManager.pre_save_hook -# c.AsyncContentsManager.pre_save_hook = None +## The session manager class to use. +# Default: 'builtins.object' +# c.ServerApp.session_manager_class = 'builtins.object' -## Preferred starting directory to use for notebooks. This is an API path (`/` -# separated, relative to root dir) -# See also: ContentsManager.preferred_dir -# c.AsyncContentsManager.preferred_dir = '' +#------------------------------------------------------------------------------ +# ConfigManager(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ +## Config Manager used for storing frontend config -# See also: ContentsManager.root_dir -# c.AsyncContentsManager.root_dir = '/' +## Name of the config directory. +# Default: 'serverconfig' +# c.ConfigManager.config_dir_name = 'serverconfig' -## The base name used when creating untitled directories. -# See also: ContentsManager.untitled_directory -# c.AsyncContentsManager.untitled_directory = 'Untitled Folder' -## The base name used when creating untitled files. -# See also: ContentsManager.untitled_file -# c.AsyncContentsManager.untitled_file = 'untitled' -## The base name used when creating untitled notebooks. -# See also: ContentsManager.untitled_notebook -# c.AsyncContentsManager.untitled_notebook = 'Untitled' #------------------------------------------------------------------------------ -# ContentsManager(LoggingConfigurable) configuration +# FileContentsManager(FileManagerMixin, ContentsManager) configuration #------------------------------------------------------------------------------ -## Base class for serving files and directories. -# -# This serves any text or binary file, as well as directories, with special -# handling for JSON notebook documents. -# -# Most APIs take a path argument, which is always an API-style unicode path, and -# always refers to a directory. -# -# - unicode, not url-escaped -# - '/'-separated -# - leading and trailing '/' will be stripped -# - if unspecified, path defaults to '', -# indicating the root path. - -## Allow access to hidden files -# Default: False -# c.ContentsManager.allow_hidden = False +## A file contents manager. # Default: None -# c.ContentsManager.checkpoints = None +# c.FileContentsManager.checkpoints = None # Default: 'jupyter_server.services.contents.checkpoints.Checkpoints' -# c.ContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.Checkpoints' - -# Default: {} -# c.ContentsManager.checkpoints_kwargs = {} - -# Default: None -# c.ContentsManager.event_logger = None - -## handler class to use when serving raw file requests. -# -# Default is a fallback that talks to the ContentsManager API, -# which may be inefficient, especially for large files. -# -# Local files-based ContentsManagers can use a StaticFileHandler subclass, -# which will be much more efficient. -# -# Access to these files should be Authenticated. -# Default: 'jupyter_server.files.handlers.FilesHandler' -# c.ContentsManager.files_handler_class = 'jupyter_server.files.handlers.FilesHandler' +# c.FileContentsManager.checkpoints_class = 'jupyter_server.services.contents.checkpoints.Checkpoints' -## Extra parameters to pass to files_handler_class. -# -# For example, StaticFileHandlers generally expect a `path` argument -# specifying the root directory from which to serve files. # Default: {} -# c.ContentsManager.files_handler_params = {} - -## Glob patterns to hide in file and directory listings. -# Default: ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] -# c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*~'] - -## Python callable or importstring thereof -# -# to be called on the path of a file just saved. -# -# This can be used to process the file on disk, -# such as converting the notebook to a script or HTML via nbconvert. -# -# It will be called as (all arguments passed by keyword):: -# -# hook(os_path=os_path, model=model, contents_manager=instance) -# -# - path: the filesystem path to the file just written -# - model: the model representing the file -# - contents_manager: this ContentsManager instance -# Default: None -# c.ContentsManager.post_save_hook = None +# c.FileContentsManager.checkpoints_kwargs = {} -## Python callable or importstring thereof -# -# To be called on a contents model prior to save. -# -# This can be used to process the structure, -# such as removing notebook outputs or other side effects that -# should not be saved. -# -# It will be called as (all arguments passed by keyword):: -# -# hook(path=path, model=model, contents_manager=self) -# -# - model: the model to be saved. Includes file contents. -# Modifying this dict will affect the file that is stored. -# - path: the API path of the save destination -# - contents_manager: this ContentsManager instance -# Default: None -# c.ContentsManager.pre_save_hook = None -## Preferred starting directory to use for notebooks. This is an API path (`/` -# separated, relative to root dir) -# Default: '' -# c.ContentsManager.preferred_dir = '' +#------------------------------------------------------------------------------ +# AsyncContentsManager(ContentsManager) configuration +#------------------------------------------------------------------------------ +## Base class for serving files and directories asynchronously. # Default: '/' -# c.ContentsManager.root_dir = '/' - -## The base name used when creating untitled directories. -# Default: 'Untitled Folder' -# c.ContentsManager.untitled_directory = 'Untitled Folder' - -## The base name used when creating untitled files. -# Default: 'untitled' -# c.ContentsManager.untitled_file = 'untitled' +# c.AsyncContentsManager.root_dir = '/' -## The base name used when creating untitled notebooks. -# Default: 'Untitled' -# c.ContentsManager.untitled_notebook = 'Untitled' #------------------------------------------------------------------------------ # BaseKernelWebsocketConnection(LoggingConfigurable) configuration @@ -2320,9 +1310,6 @@ # Default: None # c.BaseKernelWebsocketConnection.kernel_ws_protocol = None -# Default: None -# c.BaseKernelWebsocketConnection.session = None - #------------------------------------------------------------------------------ # ZMQChannelsWebsocketConnection(BaseKernelWebsocketConnection) configuration #------------------------------------------------------------------------------ @@ -2340,13 +1327,6 @@ # Default: 1000 # c.ZMQChannelsWebsocketConnection.iopub_msg_rate_limit = 1000 -## Preferred kernel message protocol over websocket to use (default: None). If an -# empty string is passed, select the legacy protocol. If None, the selected -# protocol will depend on what the front-end supports (usually the most recent -# protocol supported by the back-end and the front-end). -# See also: BaseKernelWebsocketConnection.kernel_ws_protocol -# c.ZMQChannelsWebsocketConnection.kernel_ws_protocol = None - ## Whether to limit the rate of IOPub messages (default: True). If True, use # iopub_msg_rate_limit, iopub_data_rate_limit and/or rate_limit_window to tune # the rate. @@ -2358,171 +1338,8 @@ # Default: 3 # c.ZMQChannelsWebsocketConnection.rate_limit_window = 3 -# See also: BaseKernelWebsocketConnection.session -# c.ZMQChannelsWebsocketConnection.session = None - -#------------------------------------------------------------------------------ -# AsyncMappingKernelManager(MappingKernelManager, AsyncMultiKernelManager) configuration -#------------------------------------------------------------------------------ -## An asynchronous mapping kernel manager. - -## Whether to send tracebacks to clients on exceptions. -# See also: MappingKernelManager.allow_tracebacks -# c.AsyncMappingKernelManager.allow_tracebacks = True - -## White list of allowed kernel message types. -# See also: MappingKernelManager.allowed_message_types -# c.AsyncMappingKernelManager.allowed_message_types = [] - -## Whether messages from kernels whose frontends have disconnected should be -# buffered in-memory. -# See also: MappingKernelManager.buffer_offline_messages -# c.AsyncMappingKernelManager.buffer_offline_messages = True - -## Whether to consider culling kernels which are busy. -# See also: MappingKernelManager.cull_busy -# c.AsyncMappingKernelManager.cull_busy = False - -## Whether to consider culling kernels which have one or more connections. -# See also: MappingKernelManager.cull_connected -# c.AsyncMappingKernelManager.cull_connected = False - -## Timeout (in seconds) after which a kernel is considered idle and ready to be -# culled. -# See also: MappingKernelManager.cull_idle_timeout -# c.AsyncMappingKernelManager.cull_idle_timeout = 0 - -## The interval (in seconds) on which to check for idle kernels exceeding the -# cull timeout value. -# See also: MappingKernelManager.cull_interval -# c.AsyncMappingKernelManager.cull_interval = 300 - -## The name of the default kernel to start -# See also: MultiKernelManager.default_kernel_name -# c.AsyncMappingKernelManager.default_kernel_name = 'python3' - -## Timeout for giving up on a kernel (in seconds). -# See also: MappingKernelManager.kernel_info_timeout -# c.AsyncMappingKernelManager.kernel_info_timeout = 60 - -## The kernel manager class. This is configurable to allow -# See also: AsyncMultiKernelManager.kernel_manager_class -# c.AsyncMappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager' - -# See also: MappingKernelManager.root_dir -# c.AsyncMappingKernelManager.root_dir = '' - -## Share a single zmq.Context to talk to all my kernels -# See also: MultiKernelManager.shared_context -# c.AsyncMappingKernelManager.shared_context = True - -## Message to print when allow_tracebacks is False, and an exception occurs -# See also: MappingKernelManager.traceback_replacement_message -# c.AsyncMappingKernelManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' - -## List of kernel message types excluded from user activity tracking. -# See also: MappingKernelManager.untracked_message_types -# c.AsyncMappingKernelManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] - -## Whether to make kernels available before the process has started. The -# See also: AsyncMultiKernelManager.use_pending_kernels -# c.AsyncMappingKernelManager.use_pending_kernels = False - -#------------------------------------------------------------------------------ -# MappingKernelManager(MultiKernelManager) configuration -#------------------------------------------------------------------------------ -## A KernelManager that handles - File mapping - HTTP error handling - Kernel -# message filtering - -## Whether to send tracebacks to clients on exceptions. -# Default: True -# c.MappingKernelManager.allow_tracebacks = True - -## White list of allowed kernel message types. -# When the list is empty, all message types are allowed. -# Default: [] -# c.MappingKernelManager.allowed_message_types = [] - -## Whether messages from kernels whose frontends have disconnected should be -# buffered in-memory. -# -# When True (default), messages are buffered and replayed on reconnect, -# avoiding lost messages due to interrupted connectivity. -# -# Disable if long-running kernels will produce too much output while -# no frontends are connected. -# Default: True -# c.MappingKernelManager.buffer_offline_messages = True - -## Whether to consider culling kernels which are busy. -# Only effective if cull_idle_timeout > 0. -# Default: False -# c.MappingKernelManager.cull_busy = False - -## Whether to consider culling kernels which have one or more connections. -# Only effective if cull_idle_timeout > 0. -# Default: False -# c.MappingKernelManager.cull_connected = False - -## Timeout (in seconds) after which a kernel is considered idle and ready to be culled. -# Values of 0 or lower disable culling. Very short timeouts may result in kernels being culled -# for users with poor network connections. -# Default: 0 -# c.MappingKernelManager.cull_idle_timeout = 0 - -## The interval (in seconds) on which to check for idle kernels exceeding the -# cull timeout value. -# Default: 300 -# c.MappingKernelManager.cull_interval = 300 - -## The name of the default kernel to start -# See also: MultiKernelManager.default_kernel_name -# c.MappingKernelManager.default_kernel_name = 'python3' - -## Timeout for giving up on a kernel (in seconds). -# -# On starting and restarting kernels, we check whether the -# kernel is running and responsive by sending kernel_info_requests. -# This sets the timeout in seconds for how long the kernel can take -# before being presumed dead. -# This affects the MappingKernelManager (which handles kernel restarts) -# and the ZMQChannelsHandler (which handles the startup). -# Default: 60 -# c.MappingKernelManager.kernel_info_timeout = 60 - -## The kernel manager class. This is configurable to allow -# See also: MultiKernelManager.kernel_manager_class -# c.MappingKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' - -# Default: '' -# c.MappingKernelManager.root_dir = '' - -## Share a single zmq.Context to talk to all my kernels -# See also: MultiKernelManager.shared_context -# c.MappingKernelManager.shared_context = True - -## Message to print when allow_tracebacks is False, and an exception occurs -# Default: 'An exception occurred at runtime, which is not shown due to security reasons.' -# c.MappingKernelManager.traceback_replacement_message = 'An exception occurred at runtime, which is not shown due to security reasons.' - -## List of kernel message types excluded from user activity tracking. -# -# This should be a superset of the message types sent on any channel other -# than the shell channel. -# Default: ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] -# c.MappingKernelManager.untracked_message_types = ['comm_info_request', 'comm_info_reply', 'kernel_info_request', 'kernel_info_reply', 'shutdown_request', 'shutdown_reply', 'interrupt_request', 'interrupt_reply', 'debug_request', 'debug_reply', 'stream', 'display_data', 'update_display_data', 'execute_input', 'execute_result', 'error', 'status', 'clear_output', 'debug_event', 'input_request', 'input_reply'] -#------------------------------------------------------------------------------ -# SessionManager(LoggingConfigurable) configuration -#------------------------------------------------------------------------------ -## A session manager. -## The filesystem path to SQLite Database file (e.g. -# /path/to/session_database.db). By default, the session database is stored in- -# memory (i.e. `:memory:` setting from sqlite3) and does not persist when the -# current Jupyter Server shuts down. -# Default: ':memory:' -# c.SessionManager.database_filepath = ':memory:' #------------------------------------------------------------------------------ # NotebookNotary(LoggingConfigurable) configuration @@ -2530,7 +1347,7 @@ ## A class for computing and verifying notebook signatures. ## The hashing algorithm used to sign notebooks. -# Choices: any of ['sha3_512', 'sha1', 'blake2b', 'sha256', 'sha3_384', 'sha512', 'blake2s', 'sha3_256', 'md5', 'sha3_224', 'sha384', 'sha224'] +# Choices: any of ['sha384', 'sha512', 'blake2s', 'sha3_256', 'sha224', 'md5', 'sha3_384', 'sha3_224', 'sha256', 'sha1', 'sha3_512', 'blake2b'] # Default: 'sha256' # c.NotebookNotary.algorithm = 'sha256' @@ -2557,75 +1374,3 @@ # Default: traitlets.Undefined # c.NotebookNotary.store_factory = traitlets.Undefined -#------------------------------------------------------------------------------ -# Application(SingletonConfigurable) configuration -#------------------------------------------------------------------------------ -## This is an application. - -## The date format used by logging formatters for %(asctime)s -# Default: '%Y-%m-%d %H:%M:%S' -# c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' - -## The Logging format template -# Default: '[%(name)s]%(highlevel)s %(message)s' -# c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' - -## Set the log level by value or name. -# Choices: any of [0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL'] -# Default: 30 -# c.Application.log_level = 30 - -## Configure additional log handlers. -# -# The default stderr logs handler is configured by the log_level, log_datefmt -# and log_format settings. -# -# This configuration can be used to configure additional handlers (e.g. to -# output the log to a file) or for finer control over the default handlers. -# -# If provided this should be a logging configuration dictionary, for more -# information see: -# https://docs.python.org/3/library/logging.config.html#logging-config- -# dictschema -# -# This dictionary is merged with the base logging configuration which defines -# the following: -# -# * A logging formatter intended for interactive use called -# ``console``. -# * A logging handler that writes to stderr called -# ``console`` which uses the formatter ``console``. -# * A logger with the name of this application set to ``DEBUG`` -# level. -# -# This example adds a new handler that writes to a file: -# -# .. code-block:: python -# -# c.Application.logging_config = { -# "handlers": { -# "file": { -# "class": "logging.FileHandler", -# "level": "DEBUG", -# "filename": "", -# } -# }, -# "loggers": { -# "": { -# "level": "DEBUG", -# # NOTE: if you don't list the default "console" -# # handler here then it will be disabled -# "handlers": ["console", "file"], -# }, -# }, -# } -# Default: {} -# c.Application.logging_config = {} - -## Instead of starting the Application, dump configuration to stdout -# Default: False -# c.Application.show_config = False - -## Instead of starting the Application, dump configuration to stdout (as JSON) -# Default: False -# c.Application.show_config_json = False diff --git a/beaker_kernel/service/base.py b/beaker_kernel/service/base.py index e2e483b0..5566830d 100644 --- a/beaker_kernel/service/base.py +++ b/beaker_kernel/service/base.py @@ -14,6 +14,7 @@ from traitlets.config.application import Application, ClassesType from traitlets.config.configurable import Configurable from traitlets.config.loader import ConfigFileNotFound +from traitlets.utils.text import indent, wrap_paragraphs from jupyter_client.ioloop.manager import AsyncIOLoopKernelManager from jupyter_client import kernelspec @@ -688,6 +689,57 @@ def class_sort_key(cls: type): config_classes = list(self._classes_with_config_traits(classes)) config_classes.sort(key=class_sort_key) + added = set() for cls in config_classes: - lines.append(cls.class_config_section(config_classes)) + lines.append(self.generate_config_section(cls, config_classes, added)) return "\n".join(lines) + + def generate_config_section(self, cls, classes, added): + def c(s: str) -> str: + """return a commented, wrapped block.""" + s = "\n\n".join(wrap_paragraphs(s, 78)) + + return "## " + s.replace("\n", "\n# ") + adding = set() + + # section header + breaker = "#" + "-" * 78 + parent_classes = ", ".join(p.__name__ for p in cls.__bases__ if issubclass(p, Configurable)) + + s = f"# {cls.__name__}({parent_classes}) configuration" + lines = [breaker, s, breaker] + # get the description trait + desc = cls.class_traits().get("description") + if desc: + desc = desc.default_value + if not desc: + # no description from trait, use __doc__ + desc = getattr(cls, "__doc__", "") # type:ignore[arg-type] + if desc: + lines.append(c(desc)) # type:ignore[arg-type] + lines.append("") + + for name, trait in sorted(cls.class_traits(config=True).items()): + default_repr = trait.default_value_repr() + if trait in added: + continue + + + if trait.help: + if 'deprecated' in trait.help.lower(): + continue + lines.append(c(trait.help)) + if "Enum" in type(trait).__name__: + # include Enum choices + lines.append("# Choices: %s" % trait.info()) + lines.append("# Default: %s" % default_repr) + + lines.append(f"# c.{cls.__name__}.{name} = {default_repr}") + lines.append("") + adding.add(trait) + + if adding: + added.update(adding) + return "\n".join(lines) + else: + return "" From 8b62582b6bdb1854260ae9d0e9f322929aa18c20 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Fri, 10 Oct 2025 10:01:24 -0600 Subject: [PATCH 06/16] Updates to fix per-user pathing --- beaker_kernel/lib/utils.py | 3 ++ beaker_kernel/service/base.py | 70 +++++++++++++++++++++-------------- 2 files changed, 46 insertions(+), 27 deletions(-) diff --git a/beaker_kernel/lib/utils.py b/beaker_kernel/lib/utils.py index 748237cd..30ef725b 100644 --- a/beaker_kernel/lib/utils.py +++ b/beaker_kernel/lib/utils.py @@ -10,6 +10,7 @@ import typing import warnings from frozendict import frozendict +from collections import namedtuple from contextlib import AbstractAsyncContextManager, AbstractContextManager from functools import wraps, update_wrapper from importlib import import_module @@ -23,6 +24,8 @@ JupyterMessage, JupyterMessageTuple) +BeakerEntryPoint = namedtuple("BeakerEntryPoint", ("type", "import_string")) + logger = logging.getLogger(__name__) execution_context_var = contextvars.ContextVar('execution_context', default=None) diff --git a/beaker_kernel/service/base.py b/beaker_kernel/service/base.py index 5566830d..868649ee 100644 --- a/beaker_kernel/service/base.py +++ b/beaker_kernel/service/base.py @@ -119,19 +119,24 @@ async def start_kernel_for_session(self, session_id, path, name, type, kernel_na dict Session information from parent class """ - if kernel_name == "beaker_kernel": - user: BeakerUser = current_user.get() - if user: - path = os.path.join(user.home_dir, path) - virtual_home_root = self.kernel_manager.root_dir - virtual_home_dir = os.path.join(virtual_home_root, user.home_dir) - - subkernel_user = self.parent.subkernel_user - if not os.path.isdir(virtual_home_dir): - os.makedirs(virtual_home_dir, exist_ok=True) - shutil.chown(virtual_home_dir, user=subkernel_user, group=subkernel_user) - path = os.path.join(os.path.relpath(virtual_home_dir, self.kernel_manager.root_dir), name) - return await super().start_kernel_for_session(session_id, path, name, type, kernel_name) + user: BeakerUser = current_user.get() + if user: + virtual_home_root = self.kernel_manager.root_dir + virtual_home_dir = os.path.join(virtual_home_root, user.home_dir) + + subkernel_user = self.parent.subkernel_user + if not os.path.isdir(virtual_home_dir): + os.makedirs(virtual_home_dir, exist_ok=True) + shutil.chown(virtual_home_dir, user=subkernel_user, group=subkernel_user) + path = os.path.relpath(virtual_home_dir, self.kernel_manager.root_dir) + + kernel_env = self.get_kernel_env(path, name) + kernel_id = await self.kernel_manager.start_kernel( + path=path, + kernel_name=kernel_name, + env=kernel_env, + ) + return cast(str, kernel_id) class BeakerKernelSpecManager(kernelspec.KernelSpecManager): @@ -372,7 +377,6 @@ async def _async_pre_start_kernel(self, **kw): if os.getuid() == 0 or os.geteuid() == 0: kw["group"] = user_info.pw_gid kw["extra_groups"] = group_list[1:] - kw["cwd"] = self.app.working_dir # Update keyword args that are passed to Popen() kw["env"] = env @@ -395,7 +399,7 @@ async def _async_interrupt_kernel(self): class BeakerKernelMappingManager(AsyncMappingKernelManager): - kernel_manager_class = "beaker_kernel.service.base.BeakerKernelManager" + kernel_manager_class = traitlets.DottedObjectName("beaker_kernel.service.base.BeakerKernelManager") connection_dir = Unicode( os.path.join(config.beaker_run_path, "kernelfiles"), help="Directory for kernel connection files", @@ -472,11 +476,22 @@ class BaseBeakerApp(ServerApp): name = traitlets.Unicode("beaker", config=True) app_slug = traitlets.Unicode(config=True) - kernel_manager_class = BeakerKernelMappingManager - session_manager_class = BeakerSessionManager - reraise_server_extension_failures = True - contents_manager_class = BeakerContentsManager - kernel_spec_manager_class = BeakerKernelSpecManager + kernel_manager_class = traitlets.Type( + f"{__module__}.BeakerKernelMappingManager", + config=True + ) + session_manager_class = traitlets.Type( + f"{__module__}.BeakerSessionManager", + config=True + ) + contents_manager_class = traitlets.Type( + f"{__module__}.BeakerContentsManager", + config=True + ) + kernel_spec_manager_class = traitlets.Type( + f"{__module__}.BeakerKernelSpecManager", + config=True + ) kernel_spec_include_local = traitlets.Bool(True, help="Include local kernel specs", config=True) kernel_spec_managers = traitlets.Dict(help="Kernel specification managers indexed by extension name", config=True) @@ -525,7 +540,7 @@ def _default_authorizer_class(self): @traitlets.default("config_file_name") def _default_config_file_name(self): if self.app_slug: - return f"beaker_{self.name}_config" + return f"beaker_{self.app_slug}_config" else: return f"beaker_config" @@ -559,13 +574,14 @@ def initialize_handlers(self): self.web_app.add_handlers(".*", self.handlers) def load_config_file(self, suppress_errors = True): - default_config_file_name = self._default_config_file_name() + default_config_files = (self._default_config_file_name(), "beaker_config") try: - # Load default configuration file first - try: - Application.load_config_file(self, default_config_file_name, path=self.beaker_config_path) - except ConfigFileNotFound: - self.log.debug("Config file not found, skipping: %s", self.config_file_name) + # Load default configuration files first + for default_config_file_name in default_config_files: + try: + Application.load_config_file(self, default_config_file_name, path=self.beaker_config_path) + except ConfigFileNotFound: + self.log.debug("Config file not found, skipping: %s", self.config_file_name) # If another configuration file is defined, load it second so it overrides any defaults if self.config_file_name != default_config_file_name: From dccf9c1f3f5c982c2388f0d9a890938302fbde88 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Fri, 17 Oct 2025 18:04:59 -0600 Subject: [PATCH 07/16] Fix for app handling --- beaker_kernel/service/base.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/beaker_kernel/service/base.py b/beaker_kernel/service/base.py index 868649ee..2a08470f 100644 --- a/beaker_kernel/service/base.py +++ b/beaker_kernel/service/base.py @@ -565,6 +565,20 @@ def __init__(self, **kwargs): def initialize(self, argv = None, find_extensions = False, new_httpserver = True, starter_extension = None): super().initialize(argv, find_extensions, new_httpserver, starter_extension) + beaker_app_slug = os.environ.get("BEAKER_APP", self.config.get("beaker_app", None)) + if beaker_app_slug: + app_cls: type[BeakerApp] = import_dotted_class(beaker_app_slug) + beaker_app: BeakerApp = app_cls() + + self.config.update({ + "app_cls": app_cls, + "app": beaker_app, + }) + else: + self.config.update({ + "app_cls": None, + "app": None, + }) self.initialize_handlers() def initialize_handlers(self): From be67fa8e2b4fffe3e9165867f6ca66bcbe18f1cf Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Mon, 20 Oct 2025 19:17:00 +0000 Subject: [PATCH 08/16] Set beaker kernel dir to beaker home dir --- beaker_kernel/service/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/beaker_kernel/service/base.py b/beaker_kernel/service/base.py index 2a08470f..e67b3b14 100644 --- a/beaker_kernel/service/base.py +++ b/beaker_kernel/service/base.py @@ -362,7 +362,8 @@ async def _async_pre_start_kernel(self, **kw): if self.kernel_name == "beaker_kernel": kernel_user = self.app.agent_user home_dir = os.path.expanduser(f"~{kernel_user}") - kw["cwd"] = self.app.working_dir + kw["cwd"] = home_dir + env["HOME"] = home_dir else: kernel_user = self.app.subkernel_user home_dir = kw.get("cwd") From 3add089c08d61de711fd4e4606557e5e6d589668 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Thu, 23 Oct 2025 10:23:51 -0600 Subject: [PATCH 09/16] wip --- beaker-vue/src/pages/BaseInterface.vue | 37 ++++++----- .../src/pages/NextNotebookInterface.vue | 1 + beaker_kernel/service/api/handlers.py | 66 ++++++++++++++----- beaker_kernel/service/base.py | 39 ++++------- 4 files changed, 86 insertions(+), 57 deletions(-) diff --git a/beaker-vue/src/pages/BaseInterface.vue b/beaker-vue/src/pages/BaseInterface.vue index b5c8b0b5..93310740 100644 --- a/beaker-vue/src/pages/BaseInterface.vue +++ b/beaker-vue/src/pages/BaseInterface.vue @@ -243,15 +243,19 @@ const connectionFailure = (error: Error) => { onMounted(async () => { const session: Session = beakerSession.value.session; await session.sessionReady; // Ensure content service is up + const sessionId = session.sessionId; var notebookData: {[key: string]: any}; try { - notebookData = JSON.parse(localStorage.getItem("notebookData")) || {}; + notebookData = await fetch(`/beaker/notebook/?session=${session.sessionId}`).then(res => res.json()) || {}; + console.log("1", notebookData); } catch (e) { + console.log("2", notebookData); console.error(e); notebookData = {}; } + console.log("2", notebookData); // Connect listener for authentication message session.session.ready.then(() => { @@ -259,9 +263,9 @@ onMounted(async () => { sessionContext.iopubMessage.connect(iopubMessage); }); - const sessionId = session.sessionId; - const sessionData = notebookData[sessionId]; + const sessionData = notebookData; + sessionData["data"] = sessionData["content"] if (sessionData) { nextTick(async () => { if (sessionData.filename !== undefined) { @@ -312,22 +316,23 @@ onUnmounted(() => { }); const snapshot = async () => { - var notebookData: {[key: string]: any}; - try { - notebookData = JSON.parse(localStorage.getItem("notebookData")) || {}; - } - catch (e) { - console.error(e); - notebookData = {}; - } + // var notebookData: {[key: string]: any}; + // try { + // notebookData = JSON.parse(localStorage.getItem("notebookData")) || {}; + // } + // catch (e) { + // console.error(e); + // notebookData = {}; + // } const session: Session = beakerSession.value.session; const sessionId = session.sessionId ; - if (!Object.keys(notebookData).includes(sessionId)) { - notebookData[sessionId] = {}; - } - const sessionData = notebookData[sessionId]; + // if (!Object.keys(notebookData).includes(sessionId)) { + // notebookData[sessionId] = {}; + // } + // const sessionData = notebookData[sessionId]; + const sessionData: {[key: string]: any} = {}; // Only save state if there is state to save if (session.notebook) { @@ -368,7 +373,7 @@ const snapshot = async () => { sessionData['data'] = notebookContent; sessionData['checksum'] = notebookChecksum; } - localStorage.setItem("notebookData", JSON.stringify(notebookData)); + // localStorage.setItem("notebookData", JSON.stringify(notebookData)); } }; diff --git a/beaker-vue/src/pages/NextNotebookInterface.vue b/beaker-vue/src/pages/NextNotebookInterface.vue index ee049986..b8f074a2 100644 --- a/beaker-vue/src/pages/NextNotebookInterface.vue +++ b/beaker-vue/src/pages/NextNotebookInterface.vue @@ -346,6 +346,7 @@ const { setupQueryCellFlattening, resetProcessedEvents } = useQueryCellFlattenin setupQueryCellFlattening(() => beakerSession.value?.session?.notebook?.cells); const handleLoadNotebook = (notebookJSON: any, filename: string) => { + console.log("Loading notebook:", notebookJSON, filename); resetProcessedEvents(); loadNotebook(notebookJSON, filename); }; diff --git a/beaker_kernel/service/api/handlers.py b/beaker_kernel/service/api/handlers.py index 17cf0dcc..bd50d777 100644 --- a/beaker_kernel/service/api/handlers.py +++ b/beaker_kernel/service/api/handlers.py @@ -1,22 +1,12 @@ -import asyncio import importlib -import json import logging import os -import traceback -import uuid +import sys import urllib.parse from typing import get_origin, get_args from dataclasses import is_dataclass, asdict -from collections.abc import Mapping, Collection -from typing import get_origin, get_args, GenericAlias, Union, Generic, Generator, Optional, Any +from typing import TYPE_CHECKING, get_origin, get_args, GenericAlias, Union, Generic, Generator, Optional, Any -from jupyter_server.auth.decorator import authorized -from jupyter_server.base.handlers import JupyterHandler -from jupyter_server.extension.handler import ExtensionHandlerMixin -from jupyterlab_server import LabServerApp -from tornado import web, httputil -from tornado.web import StaticFileHandler, RedirectHandler, RequestHandler, HTTPError from beaker_kernel.lib.autodiscovery import autodiscover from beaker_kernel.lib.app import BeakerApp @@ -26,10 +16,56 @@ from beaker_kernel.lib.config import config, locate_config, Config, Table, Choice, recursiveOptionalUpdate, reset_config from beaker_kernel.service import admin_utils +if TYPE_CHECKING: + from beaker_kernel.service.base import BaseBeakerApp + logger = logging.getLogger(__name__) PREFIX = '/beaker/' +def find_api_handlers(base=None) -> Generator[tuple[str, Any, str], None, None]: + """Discover and yield API handlers from registered Beaker extensions. + + This function uses the Beaker extension autodiscovery mechanism to find + all extensions that may provide API handlers. It then iterates through + each extension's handlers and yields them one by one. + + Yields + ------ + tuple[str, Any, str] + A tuple containing the URL pattern, handler class, and optional name + for each discovered API handler. + """ + if base is None: + package = __package__ + base_dir = os.path.dirname(sys.modules[package].__file__) + else: + match base: + case str(): + if os.path.pathsep in base: + base_dir = base + else: + try: + mod = importlib.import_module(base) + base_dir = os.path.dirname(mod.__file__) + except ImportError: + logger.error(f"Could not import module {base} for API handler discovery") + return + case os.PathLike(): + base_dir = os.fspath(base) + case _: + logger.error(f"Invalid base parameter type: {type(base)}") + return + + for f in os.listdir(base_dir): + if f.endswith('.py') and f != '__init__.py' and f != 'handlers.py': + s = f'beaker_kernel.service.api.{f[:-3]}' + mod = importlib.import_module(s) + if "handlers" in dir(mod): + logger.warning(f"Found handlers in {s}") + for handlers in getattr(mod, "handlers"): + yield handlers + def add_handler_prefix(prefix: str, handler_tuple: tuple[str]): path, rest = handler_tuple[0], handler_tuple[1:] @@ -37,13 +73,11 @@ def add_handler_prefix(prefix: str, handler_tuple: tuple[str]): prefix = prefix + '/' if path.startswith('/'): path = path.lstrip('/') - return (urllib.parse.urljoin(prefix, path), *rest) -def register_api_handlers(app: LabServerApp): - from .integrations import handlers as integration_handlers +def register_api_handlers(app: "BaseBeakerApp"): app.handlers.extend([ add_handler_prefix(PREFIX, handler) - for handler in integration_handlers + for handler in find_api_handlers() ]) diff --git a/beaker_kernel/service/base.py b/beaker_kernel/service/base.py index e67b3b14..1a88e094 100644 --- a/beaker_kernel/service/base.py +++ b/beaker_kernel/service/base.py @@ -18,7 +18,6 @@ from jupyter_client.ioloop.manager import AsyncIOLoopKernelManager from jupyter_client import kernelspec -from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager from jupyter_server.services.sessions.sessionmanager import SessionManager from jupyter_server.serverapp import ServerApp @@ -37,26 +36,6 @@ version = "1.0.0" -class BeakerContentsManager(AsyncLargeFileManager): - def _get_os_path(self, path): - """Override path resolution to use user-specific home directory. - - Parameters - ---------- - path : str - Relative path to resolve - - Returns - ------- - str - Absolute path within user's home directory - """ - user: BeakerUser = current_user.get() - if user: - path = os.path.join(user.home_dir, path) - return super()._get_os_path(path) - - class BeakerSessionManager(SessionManager): def get_kernel_env(self, path, name = None): @@ -478,19 +457,23 @@ class BaseBeakerApp(ServerApp): app_slug = traitlets.Unicode(config=True) kernel_manager_class = traitlets.Type( - f"{__module__}.BeakerKernelMappingManager", + f"{__package__}.base.BeakerKernelMappingManager", config=True ) session_manager_class = traitlets.Type( - f"{__module__}.BeakerSessionManager", + f"{__package__}.base.BeakerSessionManager", config=True ) contents_manager_class = traitlets.Type( - f"{__module__}.BeakerContentsManager", + f"{__package__}.storage.base.BeakerLocalContentsManager", config=True ) kernel_spec_manager_class = traitlets.Type( - f"{__module__}.BeakerKernelSpecManager", + f"{__package__}.base.BeakerKernelSpecManager", + config=True + ) + notebook_manager_class = traitlets.Type( + f"{__package__}.storage.notebook.NotebookFileManager", config=True ) @@ -564,6 +547,12 @@ def __init__(self, **kwargs): kwargs.update(defaults) super().__init__(**kwargs) + def init_configurables(self): + # Initialize configurables first to ensure config is loaded before other initializations + super().init_configurables() + + self.notebook_manager = self.notebook_manager_class(parent=self, contents_manager=self.contents_manager) + def initialize(self, argv = None, find_extensions = False, new_httpserver = True, starter_extension = None): super().initialize(argv, find_extensions, new_httpserver, starter_extension) beaker_app_slug = os.environ.get("BEAKER_APP", self.config.get("beaker_app", None)) From eb17ee0572d214bdcd3e5a76d4ebd433cc775e07 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Thu, 23 Oct 2025 14:07:42 -0600 Subject: [PATCH 10/16] Refactor on how notebooks are stored. --- .gitignore | 1 + beaker-vue/src/pages/BaseInterface.vue | 242 ++++++++++------ beaker_kernel/service/api/notebook.py | 115 ++++++++ beaker_kernel/service/auth/__init__.py | 16 ++ beaker_kernel/service/base.py | 23 +- beaker_kernel/service/storage/__init__.py | 0 beaker_kernel/service/storage/base.py | 62 ++++ beaker_kernel/service/storage/notebook.py | 332 ++++++++++++++++++++++ 8 files changed, 705 insertions(+), 86 deletions(-) create mode 100644 beaker_kernel/service/api/notebook.py create mode 100644 beaker_kernel/service/storage/__init__.py create mode 100644 beaker_kernel/service/storage/base.py create mode 100644 beaker_kernel/service/storage/notebook.py diff --git a/.gitignore b/.gitignore index d439e99b..7cd32d95 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ /*.ipynb **/.cache **/.ipython +**/.notebooks/ # Gradle .gradle/ diff --git a/beaker-vue/src/pages/BaseInterface.vue b/beaker-vue/src/pages/BaseInterface.vue index af8bbabb..d06e54c0 100644 --- a/beaker-vue/src/pages/BaseInterface.vue +++ b/beaker-vue/src/pages/BaseInterface.vue @@ -101,7 +101,7 @@ import InputText from 'primevue/inputtext'; import InputGroup from 'primevue/inputgroup'; import Button from 'primevue/button'; import ProviderSelector from '../components/misc/ProviderSelector.vue'; -import sum from 'hash-sum'; +import hashSum from 'hash-sum'; import {default as ConfigPanel, getConfigAndSchema, dropUnchangedValues, objectifyTables, tablifyObjects, saveConfig} from '../components/panels/ConfigPanel.vue'; import SideMenu, { type MenuPosition } from '../components/sidemenu/SideMenu.vue'; @@ -114,6 +114,17 @@ const toast = useToast(); const lastSaveChecksum = ref(); const mainRef = ref(); +const notebookInfo = ref<{ + id: string; + name: string; + created: string; + last_modified: string; + size: number; + type?: string; + session_id?: string; + content?: any; + checksum?: string; +}>(null); // TODO -- WARNING: showToast is only defined locally, but provided/used everywhere. Move to session? export interface ShowToastOptions { @@ -240,53 +251,29 @@ const connectionFailure = (error: Error) => { }); } +// Wrapper to allow removal from beforeunload event +const saveSnapshotWrapper = () => { + saveSnapshot(); +} + onMounted(async () => { const session: Session = beakerSession.value.session; await session.sessionReady; // Ensure content service is up const sessionId = session.sessionId; - var notebookData: {[key: string]: any}; - try { - notebookData = await fetch(`/beaker/notebook/?session=${session.sessionId}`).then(res => res.json()) || {}; - console.log("1", notebookData); - } - catch (e) { - console.log("2", notebookData); - console.error(e); - notebookData = {}; - } - console.log("2", notebookData); - // Connect listener for authentication message session.session.ready.then(() => { const sessionContext = toRaw(session.session.session) sessionContext.iopubMessage.connect(iopubMessage); }); + await loadSnapshot(); - const sessionData = notebookData; - sessionData["data"] = sessionData["content"] - if (sessionData) { - nextTick(async () => { - if (sessionData.filename !== undefined) { - - const contentsService = session.services.contents; - const result = await contentsService.get(sessionData.filename) - lastSaveChecksum.value = sessionData.checksum; - emit('open-file', result.content, result.path, {selectedCell: sessionData.selectedCell}); - } - else if (sessionData.data !== undefined) { - emit('open-file', sessionData.data, undefined, {selectedCell: sessionData.selectedCell}); - } - if (sessionData.selectedCell !== undefined && beakerSession.value.notebookComponent) { - nextTick(() => beakerSession.value.notebookComponent.selectCell(sessionData.selectedCell)); - } - }); - } - saveInterval.value = setInterval(snapshot, 30000); - window.addEventListener("beforeunload", snapshot); + saveInterval.value = setInterval(saveSnapshot, 10000); + window.addEventListener("beforeunload", saveSnapshotWrapper); }); + const iopubMessage = (_sessionConn, msg) => { if (msg.header.msg_type === "llm_auth_failure") { authDialogVisible.value = true; @@ -312,69 +299,160 @@ const setAgentModel = async (modelConfig = null, rerunLastCommand = false) => { onUnmounted(() => { clearInterval(saveInterval.value); saveInterval.value = null; - window.removeEventListener("beforeunload", snapshot); + window.removeEventListener("beforeunload", saveSnapshotWrapper); }); -const snapshot = async () => { - // var notebookData: {[key: string]: any}; - // try { - // notebookData = JSON.parse(localStorage.getItem("notebookData")) || {}; - // } - // catch (e) { - // console.error(e); - // notebookData = {}; - // } - - const session: Session = beakerSession.value.session; - const sessionId = session.sessionId ; +const saveSnapshot = async (ignoreSession: boolean = false) => { + const session: Session = beakerSession.value?.session; + const sessionId = session?.sessionId ; - // if (!Object.keys(notebookData).includes(sessionId)) { - // notebookData[sessionId] = {}; - // } - // const sessionData = notebookData[sessionId]; - const sessionData: {[key: string]: any} = {}; + // TODO: Check session id matches + const notebookData: {[key: string]: any} = { + ...(notebookInfo.value || {}), + }; // Only save state if there is state to save if (session.notebook) { - sessionData['data'] = session.notebook.toIPynb(); + if (!ignoreSession) { + notebookData.content = session.notebook.toIPynb(); + } + + const notebookChecksum: string = hashSum(notebookData.content); const notebookComponent = beakerSession.value.notebookComponent; + + if (notebookChecksum === notebookData.checksum) { + // No changes since last save + return; + } + else { + notebookData.checksum = notebookChecksum; + } + if (notebookComponent) { - sessionData['selectedCell'] = notebookComponent.selectedCellId + notebookData.selectedCell = notebookComponent.selectedCellId } - if (props.savefile && typeof props.savefile === "string") { - - const notebookContent = session.notebook.toIPynb(); - const notebookChecksum: string = sum(notebookContent); - - if (!lastSaveChecksum.value || lastSaveChecksum.value != notebookChecksum) { - lastSaveChecksum.value = notebookChecksum; - - const contentsService = session.services.contents; - const path = props.savefile; - const result = await contentsService.save(path, { - type: "notebook", - content: notebookContent, - format: 'text', - }); - emit("notebook-autosaved", result.path); - showToast({ - title: "Autosave", - detail: `Auto-saved notebook to file ${props.savefile}.`, - }); - sessionData['filename'] = result.path; - sessionData['checksum'] = notebookChecksum; + if (!notebookData.filename && (props.savefile && typeof props.savefile === "string")) { + notebookData.filename = props.savefile; + } + + if (notebookData.selectedCell) { + // Store selected cell in notebook metadata before saving + notebookData.content.metadata = notebookData.content.metadata || {}; + notebookData.content.metadata.selected_cell = notebookData.selectedCell; + } + + if (notebookInfo.value?.type === "browserStorage" && notebookData.id) { + const localRecordString = JSON.stringify(notebookData); + window.localStorage.setItem(notebookData.id, localRecordString); + notebookInfo.value = { + ...notebookInfo.value, + checksum: notebookChecksum, + }; + } + else { + const notebookId = notebookInfo.value?.id || ""; + const saveRequest = await fetch(`/beaker/notebook/${notebookId}?session=${session.sessionId}`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(notebookData), + }); + notebookInfo.value = saveRequest.ok ? await saveRequest.json() : notebookInfo.value; + } + } +}; + +const loadSnapshot = async () => { + const session: Session = beakerSession.value.session; + await session.sessionReady; // Ensure content service is up + const sessionId = session.sessionId; + + try { + const notebookInfoResponse = await fetch(`/beaker/notebook/?session=${session.sessionId}`); + if (notebookInfoResponse.ok) { + notebookInfo.value = await notebookInfoResponse.json(); + } + } + catch (e) { + console.error(e); + notebookInfo.value = { + id: sessionId, + name: sessionId, + created: "", + last_modified: "", + size: 0, + session_id: sessionId, + }; + } + + const notebookData = { + ...(notebookInfo.value || {}), + content: undefined, + selectedCell: undefined, + }; + + if (notebookInfo.value?.type === "browserStorage") { + // Notebook is stored in browser local storage, load it from there + const fullNotebookData = localStorage.getItem("notebookData"); + const fullData = JSON.parse(fullNotebookData || "null"); + const localRecord = JSON.parse(window.localStorage.getItem(notebookData.id) || "null"); + + const hasLocalRecord = notebookData.id in window.localStorage; + const hasLegacyRecord = fullData && sessionId in fullData; + + if (hasLegacyRecord && !hasLocalRecord) { + console.log(`Migrating notebook data for session ${sessionId} from full localStorage to per-notebook storage.`); + notebookData.content = fullData[sessionId]?.data || undefined; + notebookData.selectedCell = fullData[sessionId]?.selectedCell || undefined; + if (fullData[sessionId]?.name) { + notebookData.name = fullData[sessionId]?.name; } + + notebookInfo.value = { + id: notebookData.id, + name: notebookData.name, + created: notebookData.created, + last_modified: notebookData.last_modified, + size: notebookData.size, + type: "browserStorage", + content: notebookData.content, + session_id: sessionId, + }; + saveSnapshot(true).then(() => { + const fullData = JSON.parse(fullNotebookData || "null"); + delete fullData[sessionId]; + window.localStorage.setItem("notebookData", JSON.stringify(fullData)); + }).then(async () => { + await loadSnapshot(); + console.log("Migration of notebook data complete."); + }); + return; + } + else if (hasLocalRecord && hasLegacyRecord) { + // Remove legacy record + delete fullData[sessionId]; + window.localStorage.setItem("notebookData", JSON.stringify(fullData)); } else { - const notebookContent = session.notebook.toIPynb(); - const notebookChecksum: string = sum(notebookContent); - sessionData['filename'] = undefined; - sessionData['data'] = notebookContent; - sessionData['checksum'] = notebookChecksum; + notebookData.content = localRecord?.content || undefined; + notebookData.selectedCell = localRecord?.selectedCell || undefined; + if (localRecord?.name) { + notebookData.name = localRecord?.name; + } + } + } + + if (notebookData && notebookData.content) { + emit('open-file', notebookData.content, notebookData.name, {selectedCell: notebookData.selectedCell}); + if (notebookData.selectedCell !== undefined) { + nextTick(() => { + beakerSession.value.notebookComponent?.selectCell(notebookData.selectedCell); + }); } - // localStorage.setItem("notebookData", JSON.stringify(notebookData)); } + return notebookData; }; const providerConfig = () => { diff --git a/beaker_kernel/service/api/notebook.py b/beaker_kernel/service/api/notebook.py new file mode 100644 index 00000000..b2a7d38d --- /dev/null +++ b/beaker_kernel/service/api/notebook.py @@ -0,0 +1,115 @@ +import datetime +import json +import logging +import typing +import uuid +from dataclasses import is_dataclass, asdict +from queue import Empty + +from jupyter_client.jsonutil import json_default +from jupyter_server.base.handlers import JupyterHandler + +from beaker_kernel.lib.utils import ensure_async + +import tornado + +if typing.TYPE_CHECKING: + from beaker_kernel.service.storage.notebook import BaseNotebookManager, NotebookInfo, NotebookContent + +logger = logging.getLogger(__name__) + + +class NotebookHandler(JupyterHandler): + """ + Base handler for Beaker notebook-related API endpoints. + """ + + def set_default_headers(self): + self.set_header("Content-Type", "application/json") + + def write(self, chunk): + if is_dataclass(chunk): + chunk = asdict(chunk) + elif isinstance(chunk, list): + chunk = [asdict(item) if is_dataclass(item) else item for item in chunk] + if isinstance(chunk, (dict, list)): + chunk = json.dumps(chunk, default=json_default) + return super().write(chunk) + + @property + def notebook_manager(self) -> "BaseNotebookManager": + notebook_manager = getattr(self.serverapp, "notebook_manager", None) + if notebook_manager is None: + raise tornado.web.HTTPError(404, "Notebook manager not found") + return notebook_manager + + async def head(self, notebook_id=None): + self.write({}) + + async def get(self, notebook_id=None): + notebook_id = notebook_id or None + session_id = self.get_query_argument("session", None) + notebook = await self.notebook_manager.get_notebook(notebook_id, session_id) + if notebook is None: + raise tornado.web.HTTPError(404, "Notebook not found") + self.finish(notebook) + # if notebook_id: + # try: + # notebook = await self.notebook_manager.get_notebook(notebook_id) + # self.finish(notebook) + # return + # except FileNotFoundError: + # raise tornado.web.HTTPError(404, f"Notebook {notebook_id} not found") + + # notebooks = await self.notebook_manager.list_notebooks() + + # If only a single notebook with ID "*", return it directly to allow browser to use alternative storage + # if len(notebooks) == 1 and notebooks[0].id == "*": + # notebooks[0].session_id = session_id + # return self.finish(notebooks[0]) + + # if session_id is not None: + # for nb in notebooks: + # if nb.session_id == session_id: + # notebook = await self.notebook_manager.get_notebook(nb.id) + # self.finish(notebook) + # raise tornado.web.HTTPError(404, f"No notebook found for session {session_id}") + # else: + # self.finish(notebooks) + + async def post(self, notebook_id=None): + notebook_id = notebook_id or None + session = self.get_query_argument("session", None) + name = self.get_query_argument("name", None) + body = tornado.escape.json_decode(self.request.body) + content: "typing.Optional[NotebookContent]" = body.get("content", None) + if content is None: + raise tornado.web.HTTPError(400, "No notebook content provided in request body") + + notebook: "NotebookInfo" = await self.notebook_manager.save_notebook( + content=content, + notebook_id=notebook_id, + session=session, + name=name, + ) + self.write(notebook) + + # async def patch(self, notebook_id=None): + # body = tornado.escape.json_decode(self.request.body) + # self.write({}) + # + # async def put(self, notebook_id=None): + # body = tornado.escape.json_decode(self.request.body) + # self.write({}) + + async def delete(self, notebook_id=None): + if not notebook_id: + raise tornado.web.HTTPError(400, "No notebook ID provided for deletion") + await self.notebook_manager.delete_notebook(notebook_id) + self.set_status(204) + self.finish() + + +handlers = [ + (r"/notebook/?(?P.*)/?$", NotebookHandler), +] diff --git a/beaker_kernel/service/auth/__init__.py b/beaker_kernel/service/auth/__init__.py index 533f369f..960fa779 100644 --- a/beaker_kernel/service/auth/__init__.py +++ b/beaker_kernel/service/auth/__init__.py @@ -121,6 +121,7 @@ class BeakerAuthorizer(Authorizer): @dataclass class BeakerUser(User): home_dir: Optional[str] = field(default=None) + config: Optional[dict] = field(default=None) def __post_init__(self): """Initialize home directory if not provided. @@ -130,6 +131,9 @@ def __post_init__(self): """ if self.home_dir is None: self.home_dir = self._sanitize_homedir(self.username) + if self.config is None: + # TODO: Fetch config from somewhere + self.config = {} return super().__post_init__() @staticmethod @@ -159,6 +163,18 @@ def _sanitize_homedir(path_string: str): return full_path +@dataclass +class BeakerPermission: + name: str + description: str = "" + +@dataclass +class BeakerRole: + name: str + config: dict = field(default_factory=lambda: {}) + permissions: list[str] = field(default_factory=lambda: []) + + @dataclass class RoleBasedUser(BeakerUser): roles: list[str] = field(default_factory=lambda: []) diff --git a/beaker_kernel/service/base.py b/beaker_kernel/service/base.py index 1a88e094..27f30521 100644 --- a/beaker_kernel/service/base.py +++ b/beaker_kernel/service/base.py @@ -473,7 +473,8 @@ class BaseBeakerApp(ServerApp): config=True ) notebook_manager_class = traitlets.Type( - f"{__package__}.storage.notebook.NotebookFileManager", + f"{__package__}.storage.notebook.BaseNotebookManager", + # default_value=f"{__package__}.storage.notebook.FileNotebookManager", config=True ) @@ -536,13 +537,24 @@ def _default_beaker_config_path(self): def _default_app_slug(self): return self._app_slug() + @traitlets.default("notebook_manager_class") + def _default_notebook_manager_class(self): + from beaker_kernel.service.storage.notebook import FileNotebookManager + return FileNotebookManager + def __init__(self, **kwargs): # Apply defaults from defaults classvar defaults = getattr(self.__class__, "defaults", None) if defaults and isinstance(defaults, dict): from traitlets.config import Config - config = Config(**defaults) - self.config.update(config) + + if config.jupyter_token: + identity_dict = defaults.get("IdentityProvider", {}) + identity_dict.setdefault("token", config.jupyter_token) + defaults["IdentityProvider"] = identity_dict + + trait_config = Config(**defaults) + self.config.update(trait_config) kwargs.update(defaults) super().__init__(**kwargs) @@ -551,7 +563,10 @@ def init_configurables(self): # Initialize configurables first to ensure config is loaded before other initializations super().init_configurables() - self.notebook_manager = self.notebook_manager_class(parent=self, contents_manager=self.contents_manager) + self.notebook_manager = self.notebook_manager_class( + parent=self, + # contents_manager=self.contents_manager + ) def initialize(self, argv = None, find_extensions = False, new_httpserver = True, starter_extension = None): super().initialize(argv, find_extensions, new_httpserver, starter_extension) diff --git a/beaker_kernel/service/storage/__init__.py b/beaker_kernel/service/storage/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/beaker_kernel/service/storage/base.py b/beaker_kernel/service/storage/base.py new file mode 100644 index 00000000..c3ad8811 --- /dev/null +++ b/beaker_kernel/service/storage/base.py @@ -0,0 +1,62 @@ +import os +import os.path +from dataclasses import dataclass +from typing import Any + +import traitlets + +from jupyter_server.services.contents.manager import ContentsManager +from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager +from beaker_kernel.service.auth import current_user, BeakerUser, BeakerAuthorizer, BeakerIdentityProvider + + +def with_hidden_files(func): + """Decorator to temporarily enable hidden files during a method call.""" + async def wrapper(self, *args, **kwargs): + orig_allow_hidden = self.contents_manager.allow_hidden + self.contents_manager.allow_hidden = True + try: + result = await func(self, *args, **kwargs) + finally: + self.contents_manager.allow_hidden = orig_allow_hidden + return result + return wrapper + + +class BaseBeakerContentsManager(ContentsManager): + pass + +class BeakerLocalContentsManager(AsyncLargeFileManager, BaseBeakerContentsManager): + def _get_os_path(self, path): + """Override path resolution to use user-specific home directory. + + Parameters + ---------- + path : str + Relative path to resolve + + Returns + ------- + str + Absolute path within user's home directory + """ + user: BeakerUser = current_user.get() + if user: + path = os.path.join(user.home_dir, path) + return super()._get_os_path(path) + + async def _notebook_model(self, path, content=True, require_hash=False): + """ + Override to include session_id in notebook model. + """ + model = await super()._notebook_model(path, True, require_hash) + metadata = model.get("content", {}).get("metadata", {}) + model["session_id"] = metadata.get("beaker", {}).get("session_id", None) + if not content: + del model["content"] + del model["format"] + return model + + +class BeakerStorageManager(BaseBeakerContentsManager): + pass diff --git a/beaker_kernel/service/storage/notebook.py b/beaker_kernel/service/storage/notebook.py new file mode 100644 index 00000000..3e6922aa --- /dev/null +++ b/beaker_kernel/service/storage/notebook.py @@ -0,0 +1,332 @@ +import os.path +import uuid +from dataclasses import dataclass +from typing import Any, Optional, TypeAlias, Literal + +import traitlets +from traitlets.config import Configurable + +from jupyter_server.services.contents.manager import ContentsManager +from jupyter_server.services.contents.filemanager import AsyncFileContentsManager +from beaker_kernel.service.auth import BeakerUser + + +def with_hidden_files(func): + """Decorator to temporarily enable hidden files during a method call.""" + async def wrapper(self, *args, **kwargs): + orig_allow_hidden = self.contents_manager.allow_hidden + self.contents_manager.allow_hidden = True + try: + result = await func(self, *args, **kwargs) + finally: + self.contents_manager.allow_hidden = orig_allow_hidden + return result + return wrapper + + +NotebookContent: TypeAlias = dict[str, Any] +NotebookType: TypeAlias = Literal["notebook", "browserStorage", "other"] + +@dataclass +class NotebookInfo: + id: str + name: str + created: str + last_modified: str + size: int + type: NotebookType = "notebook" + session_id: Optional[str] = None + content: Optional[NotebookContent] = None + + +class BaseNotebookManager(Configurable): + async def get_notebook_info(self, notebook_id: str) -> NotebookInfo: + raise NotImplementedError() + + async def list_notebooks(self) -> list[NotebookInfo]: + raise NotImplementedError() + + async def get_notebook(self, notebook_id: Optional[str] = None, session_id: Optional[str] = None) -> Optional[NotebookInfo]: + raise NotImplementedError() + + async def save_notebook(self, notebook_id: str, content: NotebookContent) -> NotebookInfo: + raise NotImplementedError() + + async def delete_notebook(self, notebook_id: str) -> None: + raise NotImplementedError() + + +class FileNotebookManager(BaseNotebookManager): + + contents_manager = traitlets.Instance( + ContentsManager, + help="Contents manager used by the NotebookManager", + allow_none=False, + config=True, + ) + notebook_path = traitlets.Unicode( + ".notebooks/", + help="Base path for storing notebooks, relative to contents manager root", + config=True, + ) + + @traitlets.default("contents_manager") + def _default_contents_manager(self): + if getattr(self.parent, "contents_manager", None): + return self.parent.contents_manager + else: + return AsyncFileContentsManager(parent=self.parent) + + @with_hidden_files + async def _find_notebook(self, notebook_id: str) -> str: + """Find the file path for a given notebook session ID. + + Parameters + ---------- + notebook_id : str + The session ID of the notebook. + + Returns + ------- + str + The file path of the notebook. + """ + path = os.path.join(self.notebook_path, notebook_id) + if not await self.contents_manager.file_exists(path): + raise FileNotFoundError(f"Notebook with session ID {notebook_id} not found") + return path + + async def get_notebook_info(self, notebook_id: str) -> NotebookInfo: + """Retrieve notebook metadata for a given session ID. + + Parameters + ---------- + notebook_id : str + The session ID of the notebook. + + Returns + ------- + NotebookInfo + Metadata about the notebook. + """ + + path = await self._find_notebook(notebook_id) + notebook = await self.contents_manager.get( + path, + content=False + ) + return NotebookInfo( + id=notebook['name'], + name=notebook['name'], + created=notebook.get('created', None), + last_modified=notebook.get('last_modified', None), + size=notebook.get('size', None), + ) + + @with_hidden_files + async def list_notebooks(self) -> list[NotebookInfo]: + """ + List all notebooks managed by this NotebookManager. + + Returns + ------- + list[NotebookInfo] + A list of metadata for all notebooks. + """ + + try: + path = self.notebook_path.format(notebook_id="") + except KeyError: + path = self.notebook_path + if await self.contents_manager.dir_exists(path): + files = await self.contents_manager.get(path, content=True) + else: + files = { + "content": [] + } + return sorted( + [ + NotebookInfo( + id=file['name'], + name=file['name'], + created=file.get('created', None), + last_modified=file.get('last_modified', None), + size=file.get('size', None), + session_id=file.get('session_id', None), + ) + for file + in files.get("content", []) if file['type'] == 'notebook' + ], + key=lambda notebook: notebook.last_modified, reverse=True + ) + + @with_hidden_files + async def get_notebook( + self, + notebook_id: Optional[str] = None, + session_id: Optional[str] = None, + ) -> Optional[NotebookInfo]: + """ + Retrieve a notebook's content and metadata by its session ID. + + Parameters + ---------- + notebook_id : str + The unique ID of the notebook. + + Returns + ------- + NotebookInfo + The notebook's metadata and content. + """ + match notebook_id, session_id: + case None, None: + raise ValueError("Either notebook_id or session_id must be provided") + case str(), _: + try: + path = await self._find_notebook(notebook_id) + except KeyError: + path = self.notebook_path + file = await self.contents_manager.get(path, content=True) + notebook = NotebookInfo( + id=file['name'], + name=file['name'], + created=file.get('created', None), + last_modified=file.get('last_modified', None), + size=file.get('size', None), + content=file.get('content', None), + session_id=file.get('session_id', None), + ) + return notebook + case _, str(): + # Search for notebook with matching session ID + notebooks = await self.list_notebooks() + notebook_meta = next( + (nb for nb in notebooks if nb.session_id == session_id), + None, + ) + if notebook_meta is None: + raise FileNotFoundError(f"No notebook found for session ID {session_id}") + path = await self._find_notebook(notebook_meta.id) + file = await self.contents_manager.get(path, content=True) + notebook = NotebookInfo( + id=file['name'], + name=file['name'], + created=file.get('created', None), + last_modified=file.get('last_modified', None), + size=file.get('size', None), + content=file.get('content', None), + session_id=file.get('session_id', None), + ) + return notebook + case _: + raise ValueError("Invalid arguments provided") + + @with_hidden_files + async def save_notebook( + self, + content: NotebookContent, + notebook_id: Optional[str] = None, + session: Optional[str] = None, + name: Optional[str] = None, + ) -> NotebookInfo: + """ + Save a notebook's content by its session ID. + + Parameters + ---------- + notebook_id : str + The ID of the notebook. + content : NotebookContent + The content of the notebook to save. + + Returns + ------- + NotebookInfo + The saved notebook's metadata.""" + if session is None: + session = str(uuid.uuid4()) + if notebook_id is None: + notebook_id = f"{session}.ipynb" + if name is None: + name = notebook_id + content.setdefault("metadata", {}) + content["metadata"].setdefault("beaker", {}) + content["metadata"]["beaker"]["session_id"] = session + path = os.path.join(self.notebook_path, notebook_id) + model = { + "type": "notebook", + "content": content, + "format": "json", + } + return await self.contents_manager.save(model=model, path=path) + + + + @with_hidden_files + async def delete_notebook(self, notebook_id: str) -> None: + """ + Delete a notebook by its ID. + + Parameters + ---------- + notebook_id : str + The ID of the notebook to delete. + """ + return await self.contents_manager.delete( + os.path.join(self.notebook_path, notebook_id) + ) + + +class BrowserLocalDataNotebookManager(BaseNotebookManager): + """ + Dummy implementation of notebook manager that stores notebooks in the browser's local storage. + """ + + async def get_notebook_info(self, notebook_id: str) -> NotebookInfo: + record_id = f"browser-{notebook_id}" + return NotebookInfo( + id=record_id, + name=notebook_id, + type="browserStorage", + created="", + last_modified="", + size=0, + ) + + async def list_notebooks(self) -> list[NotebookInfo]: + return [ + NotebookInfo( + id="*", + name="*", + type="browserStorage", + created="", + last_modified="", + size=0, + ) + ] + + async def get_notebook( + self, + notebook_id: Optional[str] = None, + session_id: Optional[str] = None, + ) -> Optional[NotebookInfo]: + if notebook_id: + record_id = f"browser-notebook:{notebook_id}" + elif session_id: + record_id = f"browser-session:{session_id}" + return NotebookInfo( + id=record_id, + name=notebook_id, + type="browserStorage", + created="", + last_modified="", + size=0, + session_id=session_id, + ) + + async def save_notebook(self, notebook_id: str, content: NotebookContent) -> NotebookInfo: + raise NotImplementedError("Browser local data notebooks cannot be saved to the server") + + async def delete_notebook(self, notebook_id: str) -> None: + raise NotImplementedError("Browser local data notebooks cannot be deleted from the server") From 6b2a6481ade9765f77e7d14576a3e7c8197a2dde Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Wed, 29 Oct 2025 11:50:20 -0600 Subject: [PATCH 11/16] Update vue build to be sequential --- beaker-vue/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beaker-vue/package.json b/beaker-vue/package.json index 11440533..6693a2c6 100644 --- a/beaker-vue/package.json +++ b/beaker-vue/package.json @@ -9,7 +9,7 @@ "build": "run-s clean build-all routes", "build-ui": "vite build --config vite.config.app.ts", "build-lib": "vite build --config vite.config.lib.ts", - "build-all": "run-p build-ui build-lib", + "build-all": "run-s build-ui build-lib", "build-and-serve": "run-s build serve", "routes": "NODE_ENV=production vite-node utils/extractRoutes.ts", "preview": "vite preview", From 05c3b9e3797e9f3652136d353bf5eb1e358b11cb Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Mon, 3 Nov 2025 17:01:07 -0700 Subject: [PATCH 12/16] Notebook storage improvement and session management cleanup --- beaker-vue/src/pages/BaseInterface.vue | 42 ++++++++++++----- beaker_kernel/service/api/notebook.py | 18 ++++--- beaker_kernel/service/base.py | 44 +++++++++++++++-- beaker_kernel/service/storage/base.py | 21 ++++++++- beaker_kernel/service/storage/notebook.py | 57 ++++++++++++++++------- 5 files changed, 142 insertions(+), 40 deletions(-) diff --git a/beaker-vue/src/pages/BaseInterface.vue b/beaker-vue/src/pages/BaseInterface.vue index d06e54c0..05bee705 100644 --- a/beaker-vue/src/pages/BaseInterface.vue +++ b/beaker-vue/src/pages/BaseInterface.vue @@ -124,6 +124,7 @@ const notebookInfo = ref<{ session_id?: string; content?: any; checksum?: string; + metadata?: {[key: string]: any}; }>(null); // TODO -- WARNING: showToast is only defined locally, but provided/used everywhere. Move to session? @@ -359,7 +360,14 @@ const saveSnapshot = async (ignoreSession: boolean = false) => { }, body: JSON.stringify(notebookData), }); - notebookInfo.value = saveRequest.ok ? await saveRequest.json() : notebookInfo.value; + if (saveRequest.ok) { + const response = await saveRequest.json(); + notebookInfo.value = { + ...notebookInfo.value, + metadata: response, + checksum: notebookChecksum + } + } } } }; @@ -372,7 +380,20 @@ const loadSnapshot = async () => { try { const notebookInfoResponse = await fetch(`/beaker/notebook/?session=${session.sessionId}`); if (notebookInfoResponse.ok) { - notebookInfo.value = await notebookInfoResponse.json(); + const response = await notebookInfoResponse.json(); + const content = response.content; + const metadata = { + ...response, + content: undefined, + }; + const checksum = hashSum(content) + notebookInfo.value = { + ...notebookInfo.value, + content, + metadata, + checksum, + }; + } } catch (e) { @@ -387,10 +408,8 @@ const loadSnapshot = async () => { }; } - const notebookData = { + const notebookData: {[key: string]: any} = { ...(notebookInfo.value || {}), - content: undefined, - selectedCell: undefined, }; if (notebookInfo.value?.type === "browserStorage") { @@ -426,7 +445,6 @@ const loadSnapshot = async () => { window.localStorage.setItem("notebookData", JSON.stringify(fullData)); }).then(async () => { await loadSnapshot(); - console.log("Migration of notebook data complete."); }); return; } @@ -446,17 +464,17 @@ const loadSnapshot = async () => { if (notebookData && notebookData.content) { emit('open-file', notebookData.content, notebookData.name, {selectedCell: notebookData.selectedCell}); - if (notebookData.selectedCell !== undefined) { - nextTick(() => { - beakerSession.value.notebookComponent?.selectCell(notebookData.selectedCell); - }); - } + } + + if (notebookData.selectedCell !== undefined) { + nextTick(() => { + beakerSession.value.notebookComponent?.selectCell(notebookData.selectedCell); + }); } return notebookData; }; const providerConfig = () => { - // console.log(); } defineExpose({ diff --git a/beaker_kernel/service/api/notebook.py b/beaker_kernel/service/api/notebook.py index b2a7d38d..f50a9cae 100644 --- a/beaker_kernel/service/api/notebook.py +++ b/beaker_kernel/service/api/notebook.py @@ -49,14 +49,17 @@ async def head(self, notebook_id=None): async def get(self, notebook_id=None): notebook_id = notebook_id or None session_id = self.get_query_argument("session", None) - notebook = await self.notebook_manager.get_notebook(notebook_id, session_id) + try: + notebook = await self.notebook_manager.get_notebook(notebook_id, session_id) + except FileNotFoundError: + notebook = None if notebook is None: raise tornado.web.HTTPError(404, "Notebook not found") - self.finish(notebook) + self.write(notebook) # if notebook_id: # try: # notebook = await self.notebook_manager.get_notebook(notebook_id) - # self.finish(notebook) + # self.write(notebook) # return # except FileNotFoundError: # raise tornado.web.HTTPError(404, f"Notebook {notebook_id} not found") @@ -66,18 +69,19 @@ async def get(self, notebook_id=None): # If only a single notebook with ID "*", return it directly to allow browser to use alternative storage # if len(notebooks) == 1 and notebooks[0].id == "*": # notebooks[0].session_id = session_id - # return self.finish(notebooks[0]) + # return self.write(notebooks[0]) # if session_id is not None: # for nb in notebooks: # if nb.session_id == session_id: # notebook = await self.notebook_manager.get_notebook(nb.id) - # self.finish(notebook) + # self.write(notebook) # raise tornado.web.HTTPError(404, f"No notebook found for session {session_id}") # else: - # self.finish(notebooks) + # self.write(notebooks) async def post(self, notebook_id=None): + user = self.get_current_user() notebook_id = notebook_id or None session = self.get_query_argument("session", None) name = self.get_query_argument("name", None) @@ -107,7 +111,7 @@ async def delete(self, notebook_id=None): raise tornado.web.HTTPError(400, "No notebook ID provided for deletion") await self.notebook_manager.delete_notebook(notebook_id) self.set_status(204) - self.finish() + # self.finish() handlers = [ diff --git a/beaker_kernel/service/base.py b/beaker_kernel/service/base.py index 27f30521..1155ba5f 100644 --- a/beaker_kernel/service/base.py +++ b/beaker_kernel/service/base.py @@ -38,6 +38,35 @@ class BeakerSessionManager(SessionManager): + async def prune_sessions(self, all=False) -> int: + """ + Removes sessions from the session store. + + Parameters + ---------- + all : bool + If true, all sessions are removed. + If false, only sessions without active kernels are removed. + + Returns + ------- + int + Number of sessions pruned. + """ + count = 0 + all_sessions = await self.list_sessions(include_missing=True) + for session in all_sessions: + kernel_model = session.get("kernel", None) + kernel_id = kernel_model and kernel_model.get("id") + if all or await self.kernel_culled(kernel_id): + await self.delete_session(session_id=session["id"]) + count += 1 + return count + + async def list_sessions(self, include_missing=False) -> list[dict]: + return await super().list_sessions() + + def get_kernel_env(self, path, name = None): """Get environment variables for Beaker kernel sessions. @@ -100,7 +129,7 @@ async def start_kernel_for_session(self, session_id, path, name, type, kernel_na """ user: BeakerUser = current_user.get() if user: - virtual_home_root = self.kernel_manager.root_dir + virtual_home_root = self.parent.virtual_home_root virtual_home_dir = os.path.join(virtual_home_root, user.home_dir) subkernel_user = self.parent.subkernel_user @@ -465,8 +494,9 @@ class BaseBeakerApp(ServerApp): config=True ) contents_manager_class = traitlets.Type( - f"{__package__}.storage.base.BeakerLocalContentsManager", - config=True + klass=f"{__package__}.storage.base.BaseBeakerContentsManager", + default_value=f"{__package__}.storage.base.BeakerLocalContentsManager", + config=True, ) kernel_spec_manager_class = traitlets.Type( f"{__package__}.base.BeakerKernelSpecManager", @@ -477,6 +507,10 @@ class BaseBeakerApp(ServerApp): # default_value=f"{__package__}.storage.notebook.FileNotebookManager", config=True ) + virtual_home_root = traitlets.Unicode( + help="Path pointing to where user directories should be stored. Defaults to 'root_dir' if not set.", + config=True, + ) kernel_spec_include_local = traitlets.Bool(True, help="Include local kernel specs", config=True) kernel_spec_managers = traitlets.Dict(help="Kernel specification managers indexed by extension name", config=True) @@ -664,6 +698,10 @@ def _default_kernel_spec_managers(self): result[extension_slug] = spec_manager(parent=self) return result + @traitlets.default("virtual_home_root") + def _default_virtual_home_root(self): + return self.root_dir + @property def _default_root_dir(self): return self.working_dir or super()._default_root_dir() diff --git a/beaker_kernel/service/storage/base.py b/beaker_kernel/service/storage/base.py index c3ad8811..212bf860 100644 --- a/beaker_kernel/service/storage/base.py +++ b/beaker_kernel/service/storage/base.py @@ -5,6 +5,7 @@ import traitlets +from jupyter_server.base.handlers import AuthenticatedFileHandler from jupyter_server.services.contents.manager import ContentsManager from jupyter_server.services.contents.largefilemanager import AsyncLargeFileManager from beaker_kernel.service.auth import current_user, BeakerUser, BeakerAuthorizer, BeakerIdentityProvider @@ -26,7 +27,25 @@ async def wrapper(self, *args, **kwargs): class BaseBeakerContentsManager(ContentsManager): pass + +class BeakerLocalContentsHandler(AuthenticatedFileHandler): + @classmethod + def get_content(cls, abspath, start = None, end = None): + return super().get_content(abspath, start, end) + + @classmethod + def get_absolute_path(cls, root, path): + return super().get_absolute_path(root, path) + + def parse_url_path(self, url_path): + os_path = super().parse_url_path(url_path) + return os.path.join(self.current_user.home_dir, os_path) + + class BeakerLocalContentsManager(AsyncLargeFileManager, BaseBeakerContentsManager): + + files_handler_class = BeakerLocalContentsHandler + def _get_os_path(self, path): """Override path resolution to use user-specific home directory. @@ -42,7 +61,7 @@ def _get_os_path(self, path): """ user: BeakerUser = current_user.get() if user: - path = os.path.join(user.home_dir, path) + path = os.path.join(self.parent.virtual_home_root, user.home_dir, path) return super()._get_os_path(path) async def _notebook_model(self, path, content=True, require_hash=False): diff --git a/beaker_kernel/service/storage/notebook.py b/beaker_kernel/service/storage/notebook.py index 3e6922aa..d0e9e55b 100644 --- a/beaker_kernel/service/storage/notebook.py +++ b/beaker_kernel/service/storage/notebook.py @@ -6,6 +6,7 @@ import traitlets from traitlets.config import Configurable +from jupyter_core.utils import ensure_async from jupyter_server.services.contents.manager import ContentsManager from jupyter_server.services.contents.filemanager import AsyncFileContentsManager from beaker_kernel.service.auth import BeakerUser @@ -17,7 +18,7 @@ async def wrapper(self, *args, **kwargs): orig_allow_hidden = self.contents_manager.allow_hidden self.contents_manager.allow_hidden = True try: - result = await func(self, *args, **kwargs) + result = await ensure_async(func(self, *args, **kwargs)) finally: self.contents_manager.allow_hidden = orig_allow_hidden return result @@ -58,10 +59,19 @@ async def delete_notebook(self, notebook_id: str) -> None: class FileNotebookManager(BaseNotebookManager): + contents_manager_class = traitlets.Type( + default_value=None, + klass=ContentsManager, + allow_none=True, + config=True, + ) + contents_manager_params = traitlets.Dict( + default_value={}, + config=True, + ) contents_manager = traitlets.Instance( - ContentsManager, + klass=ContentsManager, help="Contents manager used by the NotebookManager", - allow_none=False, config=True, ) notebook_path = traitlets.Unicode( @@ -72,6 +82,8 @@ class FileNotebookManager(BaseNotebookManager): @traitlets.default("contents_manager") def _default_contents_manager(self): + if self.contents_manager_class not in (traitlets.Undefined, None, ""): + return self.contents_manager_class(parent=self, **self.contents_manager_params) if getattr(self.parent, "contents_manager", None): return self.parent.contents_manager else: @@ -92,7 +104,7 @@ async def _find_notebook(self, notebook_id: str) -> str: The file path of the notebook. """ path = os.path.join(self.notebook_path, notebook_id) - if not await self.contents_manager.file_exists(path): + if not await ensure_async(self.contents_manager.file_exists(path)): raise FileNotFoundError(f"Notebook with session ID {notebook_id} not found") return path @@ -110,11 +122,11 @@ async def get_notebook_info(self, notebook_id: str) -> NotebookInfo: Metadata about the notebook. """ - path = await self._find_notebook(notebook_id) - notebook = await self.contents_manager.get( + path = await ensure_async(self._find_notebook(notebook_id)) + notebook = await ensure_async(self.contents_manager.get( path, content=False - ) + )) return NotebookInfo( id=notebook['name'], name=notebook['name'], @@ -138,8 +150,8 @@ async def list_notebooks(self) -> list[NotebookInfo]: path = self.notebook_path.format(notebook_id="") except KeyError: path = self.notebook_path - if await self.contents_manager.dir_exists(path): - files = await self.contents_manager.get(path, content=True) + if await ensure_async(self.contents_manager.dir_exists(path)): + files = await ensure_async(self.contents_manager.get(path, content=True)) else: files = { "content": [] @@ -184,10 +196,10 @@ async def get_notebook( raise ValueError("Either notebook_id or session_id must be provided") case str(), _: try: - path = await self._find_notebook(notebook_id) + path = await ensure_async(self._find_notebook(notebook_id)) except KeyError: path = self.notebook_path - file = await self.contents_manager.get(path, content=True) + file = await ensure_async(self.contents_manager.get(path, content=True)) notebook = NotebookInfo( id=file['name'], name=file['name'], @@ -200,15 +212,15 @@ async def get_notebook( return notebook case _, str(): # Search for notebook with matching session ID - notebooks = await self.list_notebooks() + notebooks = await ensure_async(self.list_notebooks()) notebook_meta = next( (nb for nb in notebooks if nb.session_id == session_id), None, ) if notebook_meta is None: raise FileNotFoundError(f"No notebook found for session ID {session_id}") - path = await self._find_notebook(notebook_meta.id) - file = await self.contents_manager.get(path, content=True) + path = await ensure_async(self._find_notebook(notebook_meta.id)) + file = await ensure_async(self.contents_manager.get(path, content=True)) notebook = NotebookInfo( id=file['name'], name=file['name'], @@ -258,8 +270,19 @@ async def save_notebook( "type": "notebook", "content": content, "format": "json", + "session_id": session, } - return await self.contents_manager.save(model=model, path=path) + if not await ensure_async(self.contents_manager.dir_exists(self.notebook_path)): + await ensure_async(self.contents_manager.new( + model={ + "type": "directory", + }, + path=self.notebook_path + )) + if await ensure_async(self.contents_manager.file_exists(path)): + return await ensure_async(self.contents_manager.save(model=model, path=path)) + else: + return await ensure_async(self.contents_manager.new(model=model, path=path)) @@ -273,9 +296,9 @@ async def delete_notebook(self, notebook_id: str) -> None: notebook_id : str The ID of the notebook to delete. """ - return await self.contents_manager.delete( + return await ensure_async(self.contents_manager.delete( os.path.join(self.notebook_path, notebook_id) - ) + )) class BrowserLocalDataNotebookManager(BaseNotebookManager): From 30593b05899fa53ee583917b65acbd019b8a99cc Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Tue, 4 Nov 2025 10:28:22 -0700 Subject: [PATCH 13/16] Fix for duplicated root in path --- beaker_kernel/service/storage/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beaker_kernel/service/storage/base.py b/beaker_kernel/service/storage/base.py index 212bf860..1f630e98 100644 --- a/beaker_kernel/service/storage/base.py +++ b/beaker_kernel/service/storage/base.py @@ -61,7 +61,7 @@ def _get_os_path(self, path): """ user: BeakerUser = current_user.get() if user: - path = os.path.join(self.parent.virtual_home_root, user.home_dir, path) + return os.path.join(self.parent.virtual_home_root, user.home_dir, path) return super()._get_os_path(path) async def _notebook_model(self, path, content=True, require_hash=False): From a485a33c9e2421fbc9fece6aa2df0fb3ab9eae16 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Thu, 20 Nov 2025 10:48:54 -0700 Subject: [PATCH 14/16] Allow auto-discovery/importing from both entrypoints and json files --- beaker_kernel/lib/autodiscovery.py | 98 ++++++++++++++++++++++++++---- 1 file changed, 87 insertions(+), 11 deletions(-) diff --git a/beaker_kernel/lib/autodiscovery.py b/beaker_kernel/lib/autodiscovery.py index dce7a4bd..19fbc4c5 100644 --- a/beaker_kernel/lib/autodiscovery.py +++ b/beaker_kernel/lib/autodiscovery.py @@ -1,10 +1,13 @@ +import importlib import json import logging import os import sys import typing -from collections.abc import Mapping +import warnings +from collections.abc import Mapping, ItemsView from importlib.metadata import entry_points, EntryPoints, EntryPoint +from traceback import format_exc logger = logging.getLogger(__name__) @@ -89,26 +92,86 @@ class AutodiscoveryItems(Mapping[str, type]): raw: EntryPoints mapping: dict[str, EntryPoint] + # Temporary transitional storage for use while migrating from json files to entrypoints + raw_jsons: dict[str, type|dict[str, str]] + + class AutodiscoveryItemsView(ItemsView): + """ + A view class that overrides the default ItemsView to handle exceptions during iteration. + Prevents the entire application from failing if an extension cannot be loaded. + """ + def __init__(self, mapping: "AutodiscoveryItems"): + super().__init__(mapping) + + def __iter__(self): + for key in self._mapping: + try: + yield (key, self._mapping[key]) + except Exception as err: + output = [ + f"Unable to load autodiscovery item '{key}'. Error: {err}", + " Exception traceback when loading item:", + f" ================ Traceback Start ================", + ] + indented_tb = [f" {line}" for line in format_exc().splitlines()] + output.extend(indented_tb) + output.append( + f" ================ Traceback Done =================", + ) + logger.warning("\n".join(output)) + continue + def __init__(self, entrypoints_instance: EntryPoints): self.raw = entrypoints_instance self.mapping = { item.name: item for item in self.raw } + self.raw_jsons = {} def __getitem__(self, key): - try: - item: EntryPoint = self.mapping.get(key, None) - if item: - item = item.load() - except ImportError: - raise - - return item + # Loading from etrypoints is the new preferred method. + # Load class from entrypoint + item: EntryPoint = self.mapping.get(key, None) + if item: + item = item.load() + return item + + # Fallback to loading from old json file + item = self.mapping.get(key, self.raw_jsons.get(key)) + if isinstance(item, (str, bytes, os.PathLike)) and os.path(path := os.fspath(item)) and path.endswith('.json'): + with open(path) as jsonfile: + item = json.load(jsonfile) + item["mapping_file"] = path + match item: + case type(): + return item + case {"slug": slug, "package": package, "class_name": class_name, **kw}: + mapping_file = kw.get("mapping_file", None) + module = importlib.import_module(package) + assert slug == key, f"Autoimported item's slug ('{slug}') does not match key ('{key}')" + discovered_class = getattr(module, class_name) + if mapping_file: + setattr(discovered_class, '_autodiscovery', { + "mapping_file": mapping_file, + **item + }) + self.mapping[key] = discovered_class + return discovered_class + case _: + raise ValueError(f"Unable to handle autodiscovery item '{item}' (type '{item.__class__}')") + + def add_json_mapping(self, key: str, value: type|dict[str, str]): + self.raw_jsons[key] = value + def __iter__(self): yield from self.mapping.keys() + yield from self.raw_jsons.__iter__() + + def items(self): + return self.AutodiscoveryItemsView(self) def __len__(self): - return len(self.raw) + return len(self.raw) + len(self.raw_jsons) def autodiscover(mapping_type: ResourceType) -> typing.Dict[str, type]: @@ -117,4 +180,17 @@ def autodiscover(mapping_type: ResourceType) -> typing.Dict[str, type]: """ group = f"beaker.{mapping_type}" eps = entry_points(group=group) - return AutodiscoveryItems(eps) + items: AutodiscoveryItems = AutodiscoveryItems(eps) + + # Add legacy json mappings + for mapping_file, data in find_mappings(mapping_type): + slug = data["slug"] + items.add_json_mapping(slug, {"mapping_file": mapping_file, **data}) + warnings.warn( + ( + f"Beaker is loading {mapping_type} from legacy JSON mapping file {mapping_file}.\n" + f" This package should be rebuilt using entrypoints for better performance and reliability." + ), + DeprecationWarning + ) + return items From 7739cb41f5ff36c327f762ac0a1c26c3b7ebd376 Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Thu, 20 Nov 2025 13:45:52 -0700 Subject: [PATCH 15/16] Changes for consistent xsrf header passing from frontend --- .../dev-interface/BeakerFilePane.vue | 1 + .../misc/StreamlineExportDialog.vue | 1 + .../notebook/BeakerNotebookToolbar.vue | 1 + .../src/components/panels/ConfigPanel.vue | 1 + .../components/panels/FileContentsPanel.vue | 1 + beaker-vue/src/main.ts | 8 ++++ beaker-vue/src/pages/BaseInterface.vue | 1 + beaker-vue/src/pages/BeakerAdmin.vue | 1 + beaker-vue/src/util/fetch.ts | 42 +++++++++++++++++++ beaker-vue/src/util/integration.ts | 2 + beaker_kernel/service/api/handlers.py | 1 - beaker_kernel/service/dev.py | 15 ++++--- 12 files changed, 66 insertions(+), 9 deletions(-) create mode 100644 beaker-vue/src/util/fetch.ts diff --git a/beaker-vue/src/components/dev-interface/BeakerFilePane.vue b/beaker-vue/src/components/dev-interface/BeakerFilePane.vue index c847ba5b..30a03e1e 100644 --- a/beaker-vue/src/components/dev-interface/BeakerFilePane.vue +++ b/beaker-vue/src/components/dev-interface/BeakerFilePane.vue @@ -70,6 +70,7 @@ import Panel from 'primevue/panel'; import Column from 'primevue/column'; import DataTable from 'primevue/datatable'; import cookie from 'cookie'; +import { fetch } from '@/util/fetch'; import { ContentsManager } from '@jupyterlab/services'; diff --git a/beaker-vue/src/components/misc/StreamlineExportDialog.vue b/beaker-vue/src/components/misc/StreamlineExportDialog.vue index 3f9474be..9ed761b2 100644 --- a/beaker-vue/src/components/misc/StreamlineExportDialog.vue +++ b/beaker-vue/src/components/misc/StreamlineExportDialog.vue @@ -63,6 +63,7 @@ import { ProgressSpinner, Button, Divider, ToggleSwitch, InputGroup, InputGroupA import { getDateTimeString, downloadFileDOM } from "@/util"; import { PageConfig, URLExt } from '@jupyterlab/coreutils'; import contentDisposition from "content-disposition"; +import { fetch } from '@/util/fetch'; const showOverlay = inject<(contents: string, header?: string) => void>('show_overlay'); diff --git a/beaker-vue/src/components/notebook/BeakerNotebookToolbar.vue b/beaker-vue/src/components/notebook/BeakerNotebookToolbar.vue index 4188f7bd..8f17dfd7 100644 --- a/beaker-vue/src/components/notebook/BeakerNotebookToolbar.vue +++ b/beaker-vue/src/components/notebook/BeakerNotebookToolbar.vue @@ -153,6 +153,7 @@ import OpenNotebookButton from "../misc/OpenNotebookButton.vue"; import { downloadFileDOM, getDateTimeString } from '../../util'; import StreamlineExportDialog from "../misc/StreamlineExportDialog.vue" import { type BeakerSessionComponentType } from "../session/BeakerSession.vue"; +import { fetch } from '@/util/fetch'; const session = inject('session'); const notebook = inject('notebook'); diff --git a/beaker-vue/src/components/panels/ConfigPanel.vue b/beaker-vue/src/components/panels/ConfigPanel.vue index 1d6466f5..f0663b42 100644 --- a/beaker-vue/src/components/panels/ConfigPanel.vue +++ b/beaker-vue/src/components/panels/ConfigPanel.vue @@ -35,6 +35,7 @@ import type { BeakerSessionComponentType } from '../session/BeakerSession.vue'; import ConfigEntryComponent from '../misc/ConfigEntryComponent.vue' import { useConfirm } from "primevue/useconfirm"; import ProgressSpinner from "primevue/progressspinner"; +import { fetch } from '@/util/fetch'; const beakerSession = inject("beakerSession"); diff --git a/beaker-vue/src/components/panels/FileContentsPanel.vue b/beaker-vue/src/components/panels/FileContentsPanel.vue index a7abb1c4..62a4b2de 100644 --- a/beaker-vue/src/components/panels/FileContentsPanel.vue +++ b/beaker-vue/src/components/panels/FileContentsPanel.vue @@ -82,6 +82,7 @@ import Toolbar from "primevue/toolbar"; import Button from "primevue/button"; import CodeEditor from "../misc/CodeEditor.vue"; import BeakerMimeBundle from "../render/BeakerMimeBundle.vue"; +import { fetch } from '@/util/fetch'; const codeEditorRef = ref(); const pdfPreviewRef = ref(); diff --git a/beaker-vue/src/main.ts b/beaker-vue/src/main.ts index 0a53a1c4..9e97bf52 100644 --- a/beaker-vue/src/main.ts +++ b/beaker-vue/src/main.ts @@ -12,6 +12,8 @@ import { vKeybindings } from './directives/keybindings'; import { vAutoScroll } from './directives/autoscroll'; import BeakerThemePlugin from './plugins/theme'; import BeakerAppConfigPlugin from './plugins/appconfig'; +import { fetch, client } from './util/fetch'; +import * as cookie from 'cookie'; import App from './App.vue'; import createRouter from './router'; @@ -25,6 +27,8 @@ const baseUrl = PageConfig.getBaseUrl(); const confUrl = URLExt.join(baseUrl, '/config') + `?q=${Date.now().toString()}`; const configResponse = await fetch(confUrl); const config = await configResponse.json(); +const baseHost = URLExt.parse(config.baseUrl).host; + const app = createApp(App, {config}); const router = createRouter(config); @@ -54,4 +58,8 @@ app.directive('focustrap', FocusTrap); app.directive('keybindings', vKeybindings); app.directive('autoscroll', vAutoScroll); +const cookies = cookie.parse(document.cookie); +const xsrfCookie = cookies._xsrf; +client.setDefaultHeaders(baseHost, {"X-XSRFToken": xsrfCookie}) + app.mount('#app'); diff --git a/beaker-vue/src/pages/BaseInterface.vue b/beaker-vue/src/pages/BaseInterface.vue index 05bee705..b3e807a2 100644 --- a/beaker-vue/src/pages/BaseInterface.vue +++ b/beaker-vue/src/pages/BaseInterface.vue @@ -101,6 +101,7 @@ import InputText from 'primevue/inputtext'; import InputGroup from 'primevue/inputgroup'; import Button from 'primevue/button'; import ProviderSelector from '../components/misc/ProviderSelector.vue'; +import { fetch } from '../util/fetch'; import hashSum from 'hash-sum'; import {default as ConfigPanel, getConfigAndSchema, dropUnchangedValues, objectifyTables, tablifyObjects, saveConfig} from '../components/panels/ConfigPanel.vue'; diff --git a/beaker-vue/src/pages/BeakerAdmin.vue b/beaker-vue/src/pages/BeakerAdmin.vue index 5298993e..65a6d16f 100644 --- a/beaker-vue/src/pages/BeakerAdmin.vue +++ b/beaker-vue/src/pages/BeakerAdmin.vue @@ -145,6 +145,7 @@ import Select from 'primevue/select'; import Toast from 'primevue/toast'; import { useConfirm } from 'primevue/useconfirm'; import { useToast } from 'primevue/usetoast'; +import { fetch } from '@/util/fetch'; const props = defineProps([ "config" diff --git a/beaker-vue/src/util/fetch.ts b/beaker-vue/src/util/fetch.ts new file mode 100644 index 00000000..45e6f355 --- /dev/null +++ b/beaker-vue/src/util/fetch.ts @@ -0,0 +1,42 @@ +import { URLExt } from '@jupyterlab/coreutils'; + +type DefaultHeaders = Record; + +class FetchClient { + private defaultHeaders: DefaultHeaders; + + constructor(defaultHeaders: DefaultHeaders = {}) { + this.defaultHeaders = defaultHeaders; + } + + setDefaultHeaders(urlRegex: string|RegExp, headers: HeadersInit) { + this.defaultHeaders[urlRegex.toString()] = headers; + } + + async fetch(url: string, options?: RequestInit): Promise { + const absUrl = URLExt.parse(url).href; + const headers = Object.entries(this.defaultHeaders).reduce((prev, [regex, headers]) => { + if (new RegExp(regex).test(absUrl)) { + return {...prev, ...headers}; + } + else { + return prev; + } + }, {}); + + return fetch(url, { + ...options, + headers: { + ...headers, + ...options?.headers, + }, + }); + } +} + +// Create a default fetch client instance +export const client = new FetchClient(); + +// Export the fetch method bound to the default instance +const fetchMethod = client.fetch.bind(client); +export {fetchMethod as fetch}; diff --git a/beaker-vue/src/util/integration.ts b/beaker-vue/src/util/integration.ts index a7df3c83..eb954ed7 100644 --- a/beaker-vue/src/util/integration.ts +++ b/beaker-vue/src/util/integration.ts @@ -1,3 +1,5 @@ +import { fetch } from '@/util/fetch'; + export interface IntegrationResource { // names must be coherent with python resource class resource_type: string diff --git a/beaker_kernel/service/api/handlers.py b/beaker_kernel/service/api/handlers.py index bd50d777..944cd3d0 100644 --- a/beaker_kernel/service/api/handlers.py +++ b/beaker_kernel/service/api/handlers.py @@ -62,7 +62,6 @@ def find_api_handlers(base=None) -> Generator[tuple[str, Any, str], None, None]: s = f'beaker_kernel.service.api.{f[:-3]}' mod = importlib.import_module(s) if "handlers" in dir(mod): - logger.warning(f"Found handlers in {s}") for handlers in getattr(mod, "handlers"): yield handlers diff --git a/beaker_kernel/service/dev.py b/beaker_kernel/service/dev.py index 18a54508..b2a02dc6 100644 --- a/beaker_kernel/service/dev.py +++ b/beaker_kernel/service/dev.py @@ -13,6 +13,8 @@ from beaker_kernel.service.notebook import BeakerNotebookApp from beaker_kernel.lib.autodiscovery import autodiscover from beaker_kernel.lib.config import config +from beaker_kernel.service.auth.dummy import DummyAuthorizer, DummyIdentityProvider + # Global notebook storage for notebook that lives for lifetime of service @@ -22,15 +24,12 @@ app_subprocess = None -def _jupyter_server_extension_points(): - return [{"module": "beaker_kernel.service.dev", "app": DevBeakerJupyterApp}] - - class DevBeakerJupyterApp(BeakerNotebookApp): - def initialize_settings(self): - # Override to allow cross domain websockets - self.settings["allow_origin"] = "*" - self.settings["disable_check_xsrf"] = True + + defaults = { + "authorizer_class": DummyAuthorizer, + "identity_provider_class": DummyIdentityProvider, + } class BeakerWatchDog(watchdog_events.FileSystemEventHandler): From d2baaa82a8069f2daa87683dd9ab922b7835964a Mon Sep 17 00:00:00 2001 From: Matthew Printz Date: Thu, 20 Nov 2025 14:07:59 -0700 Subject: [PATCH 16/16] Finishing touches to autodiscovery udpate --- beaker_kernel/builder/beaker.py | 2 ++ beaker_kernel/cli/main.py | 4 ++++ beaker_kernel/cli/running.py | 7 ------- beaker_kernel/lib/autodiscovery.py | 20 +++++++++++--------- 4 files changed, 17 insertions(+), 16 deletions(-) diff --git a/beaker_kernel/builder/beaker.py b/beaker_kernel/builder/beaker.py index 8f756dd4..0a470689 100644 --- a/beaker_kernel/builder/beaker.py +++ b/beaker_kernel/builder/beaker.py @@ -2,6 +2,7 @@ import importlib import importlib.util import json +import os import os.path import shutil import sys @@ -31,6 +32,7 @@ class BeakerBuildHook(BuildHookInterface): PLUGIN_NAME = "beaker" def __init__(self, root: str, config: dict[str, Any], build_config: Any, metadata: ProjectMetadata, directory: str, target_name: str, app: Application | None = None) -> None: + os.environ["BUILD_ACTIVE"] = "TRUE" super().__init__(root, config, build_config, metadata, directory, target_name, app) self.inserted_paths = set() diff --git a/beaker_kernel/cli/main.py b/beaker_kernel/cli/main.py index acc96bf0..366d656c 100644 --- a/beaker_kernel/cli/main.py +++ b/beaker_kernel/cli/main.py @@ -1,5 +1,6 @@ import click import importlib +import os from beaker_kernel.lib.autodiscovery import find_mappings, autodiscover @@ -14,6 +15,9 @@ def __init__(self, *args, **kwargs) -> None: self.subcommands = {} self.apps = {} + if os.environ.get("BUILD_ACTIVE", "FALSE").upper() == "TRUE": + return + # Register commands from extensions for group_name, entry in autodiscover("commands").items(): group = entry.as_group() diff --git a/beaker_kernel/cli/running.py b/beaker_kernel/cli/running.py index b651003a..118ff8f3 100644 --- a/beaker_kernel/cli/running.py +++ b/beaker_kernel/cli/running.py @@ -10,11 +10,6 @@ from beaker_kernel.service.notebook import BeakerNotebookApp -def set_config_from_app(app: "BeakerNotebookApp"): - os.environ.setdefault("JUPYTER_SERVER", app.connection_url) - os.environ.setdefault("JUPYTER_TOKEN", app.identity_provider.token) - - @click.command(context_settings={"ignore_unknown_options": True, "allow_extra_args": True}) @click.argument("extra_args", nargs=-1, type=click.UNPROCESSED) @click.pass_context @@ -31,7 +26,6 @@ def notebook(ctx, extra_args, beakerapp_cls=None): app = BeakerNotebookApp.instance(**{"IdentityProvider.token": config.jupyter_token}) app.initialize(argv=extra_args) config.jupyter_server = app.connection_url - set_config_from_app(app) app.start() except (InterruptedError, KeyboardInterrupt, EOFError) as err: print(err) @@ -67,7 +61,6 @@ def serve(open_notebook, extra_args): loop = ensure_event_loop() try: app = BeakerNotebookApp.instance() - set_config_from_app(app) app.initialize(argv=extra_args) if open_notebook: webbrowser.open(app.public_url) diff --git a/beaker_kernel/lib/autodiscovery.py b/beaker_kernel/lib/autodiscovery.py index 19fbc4c5..3088cd44 100644 --- a/beaker_kernel/lib/autodiscovery.py +++ b/beaker_kernel/lib/autodiscovery.py @@ -91,6 +91,7 @@ def find_mappings(resource_type: ResourceType) -> typing.Generator[typing.Dict[s class AutodiscoveryItems(Mapping[str, type]): raw: EntryPoints mapping: dict[str, EntryPoint] + rehydrated: dict[str, type] # Temporary transitional storage for use while migrating from json files to entrypoints raw_jsons: dict[str, type|dict[str, str]] @@ -109,19 +110,16 @@ def __iter__(self): yield (key, self._mapping[key]) except Exception as err: output = [ - f"Unable to load autodiscovery item '{key}'. Error: {err}", - " Exception traceback when loading item:", - f" ================ Traceback Start ================", + f"Warning: Error while attempting to load autodiscovered item '{key}':", ] - indented_tb = [f" {line}" for line in format_exc().splitlines()] + indented_tb = [f" {line}" for line in format_exc().splitlines()[-3:]] output.extend(indented_tb) - output.append( - f" ================ Traceback Done =================", - ) + output.append("") logger.warning("\n".join(output)) continue def __init__(self, entrypoints_instance: EntryPoints): + self.rehydrated = {} self.raw = entrypoints_instance self.mapping = { item.name: item for item in self.raw @@ -129,15 +127,19 @@ def __init__(self, entrypoints_instance: EntryPoints): self.raw_jsons = {} def __getitem__(self, key): + if key in self.rehydrated: + return self.rehydrated[key] + # Loading from etrypoints is the new preferred method. # Load class from entrypoint item: EntryPoint = self.mapping.get(key, None) if item: item = item.load() + self.rehydrated[key] = item return item # Fallback to loading from old json file - item = self.mapping.get(key, self.raw_jsons.get(key)) + item = self.raw_jsons.get(key) if isinstance(item, (str, bytes, os.PathLike)) and os.path(path := os.fspath(item)) and path.endswith('.json'): with open(path) as jsonfile: item = json.load(jsonfile) @@ -155,7 +157,7 @@ def __getitem__(self, key): "mapping_file": mapping_file, **item }) - self.mapping[key] = discovered_class + self.rehydrated[key] = discovered_class return discovered_class case _: raise ValueError(f"Unable to handle autodiscovery item '{item}' (type '{item.__class__}')")