diff --git a/.air.toml b/.air.toml
index 6e3adb0f8e..8e2a6358a8 100644
--- a/.air.toml
+++ b/.air.toml
@@ -5,4 +5,4 @@ indent-style = "space"
line-ending = "auto"
persistent-line-breaks = true
exclude = []
-default-exclude = true
\ No newline at end of file
+default-exclude = true
diff --git a/.ruff.toml b/.ruff.toml
new file mode 100644
index 0000000000..2b498ce86a
--- /dev/null
+++ b/.ruff.toml
@@ -0,0 +1,14 @@
+extend-exclude = [
+ "**/renv",
+ "**/.venv",
+ ".github",
+ ".vscode",
+ "**/node_modules",
+ "**/__pycache__",
+ "**/build",
+ # file with cell magic not parsed by ruff
+ "src/resources/jupyter/lang/python/cleanup.py",
+ "src/resources/jupyter/lang/python/setup.py",
+]
+# don't format .ipynb files as quarto cell comment
+include = ["**/*.py"]
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
index 4888f46730..9af1a2ca99 100644
--- a/.vscode/extensions.json
+++ b/.vscode/extensions.json
@@ -4,6 +4,8 @@
"esbenp.prettier-vscode",
"sumneko.lua",
"nvarner.typst-lsp",
- "Posit.air-vscode"
+ "Posit.air-vscode",
+ "charliermarsh.ruff",
+ "tamasfe.even-better-toml"
]
}
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 1802fcff71..31f30dea90 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -2,6 +2,18 @@
"[typescript]": {
"editor.defaultFormatter": "denoland.vscode-deno"
},
+ "notebook.formatOnSave.enabled": false,
+ "notebook.codeActionsOnSave": {
+ "notebook.source.organizeImports": "explicit"
+ },
+ "pylint.enabled": false,
+ "[python]": {
+ "editor.formatOnSave": true,
+ "editor.defaultFormatter": "charliermarsh.ruff",
+ "editor.codeActionsOnSave": {
+ "source.organizeImports": "explicit"
+ }
+ },
"[r]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "Posit.air-vscode"
@@ -13,6 +25,10 @@
"[html]": {
"editor.formatOnSave": false
},
+ "[toml]": {
+ "editor.formatOnSave": true,
+ "editor.defaultFormatter": "tamasfe.even-better-toml"
+ },
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2,
diff --git a/dev-docs/feature-format-matrix/create_table.py b/dev-docs/feature-format-matrix/create_table.py
index 1ead753162..450bfef5c2 100644
--- a/dev-docs/feature-format-matrix/create_table.py
+++ b/dev-docs/feature-format-matrix/create_table.py
@@ -2,8 +2,8 @@
import json
import pathlib
-class Trie:
+class Trie:
def __init__(self):
self.children = {}
self.values = []
@@ -37,13 +37,12 @@ def tabulator(self):
children = v.tabulator()
feature = k
if v.entry != "":
- link = "" % v.entry
+ link = (
+ ""
+ % v.entry
+ )
feature = "%s %s" % (link, k)
- d = {
- "sort_key": k,
- "feature": feature,
- **v.tabulator_leaf()
- }
+ d = {"sort_key": k, "feature": feature, **v.tabulator_leaf()}
if children:
d["_children"] = children
result.append(d)
@@ -60,13 +59,14 @@ def size(self):
return 1
return sum([v.size() for v in self.children.values()])
- def walk(self, visitor, path = None):
+ def walk(self, visitor, path=None):
if path is None:
path = []
visitor(self, path)
for k, v in self.children.items():
v.walk(visitor, path + [k])
+
def extract_metadata_from_file(file):
with open(file, "r") as f:
lines = f.readlines()
@@ -78,10 +78,13 @@ def extract_metadata_from_file(file):
start = i
else:
end = i
- metadata = yaml.load("".join(lines[start+1:end]), Loader=yaml.SafeLoader)
+ metadata = yaml.load(
+ "".join(lines[start + 1 : end]), Loader=yaml.SafeLoader
+ )
return metadata
raise ValueError("No metadata found in file %s" % file)
+
def table_cell(entry, _feature, _format_name, format_config):
if type(format_config) == str:
format_config = {}
@@ -90,8 +93,22 @@ def table_cell(entry, _feature, _format_name, format_config):
if quality is not None:
if type(quality) == str:
quality = quality.lower()
- qualities = {-1: "🚫", 0: "⚠", 1: "✓", 2: "✓✓", "unknown": "❓", "na": "NA"}
- colors = {-1: "bad", 0: "ok", 1: "good", 2: "good", "unknown": "unknown", "na": "na"}
+ qualities = {
+ -1: "🚫",
+ 0: "⚠",
+ 1: "✓",
+ 2: "✓✓",
+ "unknown": "❓",
+ "na": "NA",
+ }
+ colors = {
+ -1: "bad",
+ 0: "ok",
+ 1: "good",
+ 2: "good",
+ "unknown": "unknown",
+ "na": "na",
+ }
color = colors[quality]
quality_icon = qualities.get(quality, "❓")
result.append(f"{quality_icon}")
@@ -101,7 +118,8 @@ def table_cell(entry, _feature, _format_name, format_config):
result.append(f"💬")
return "".join(result)
-def compute_trie(detailed = False):
+
+def compute_trie(detailed=False):
trie = Trie()
pattern = "qmd-files/**/*.qmd" if detailed else "qmd-files/**/document.qmd"
for entry in pathlib.Path(".").glob(pattern):
@@ -115,26 +133,37 @@ def compute_trie(detailed = False):
except KeyError:
raise Exception("No format found in %s" % entry)
for format_name, format_config in format.items():
- trie.insert(feature, {
- "feature": "/".join(feature),
- "format": format_name,
- "entry": entry,
- "format_config": format_config,
- "table_cell": table_cell(entry, feature, format_name, format_config)
- })
+ trie.insert(
+ feature,
+ {
+ "feature": "/".join(feature),
+ "format": format_name,
+ "entry": entry,
+ "format_config": format_config,
+ "table_cell": table_cell(
+ entry, feature, format_name, format_config
+ ),
+ },
+ )
return trie
-def render_features_formats_data(trie = None):
+
+def render_features_formats_data(trie=None):
if trie is None:
trie = compute_trie()
entries = trie.tabulator()
- return "```{=html}\n\n```\n" % json.dumps(entries, indent=2)
+ return (
+ "```{=html}\n\n```\n"
+ % json.dumps(entries, indent=2)
+ )
+
-def compute_quality_summary(trie = None):
+def compute_quality_summary(trie=None):
if trie is None:
trie = compute_trie()
quality_summary = {"unknown": 0, -1: 0, 0: 0, 1: 0, 2: 0, "na": 0}
n_rows = 0
+
def visit(node, _path):
nonlocal n_rows
if not node.children or len(node.values):
@@ -149,5 +178,6 @@ def visit(node, _path):
if quality_summary.get(quality) is None:
raise ValueError("Invalid quality value %s" % quality)
quality_summary[quality] += 1
+
trie.walk(visit)
- return {"n_rows": n_rows, "quality": quality_summary}
\ No newline at end of file
+ return {"n_rows": n_rows, "quality": quality_summary}
diff --git a/quarto-cli.code-workspace b/quarto-cli.code-workspace
index 8f9b1f9a39..5fbe57aa10 100644
--- a/quarto-cli.code-workspace
+++ b/quarto-cli.code-workspace
@@ -37,5 +37,6 @@
"deno.inlayHints.variableTypes.enabled": false,
"deno.inlayHints.variableTypes.suppressWhenTypeMatchesName": false
},
- "typst-lsp.exportPdf": "never"
+ "typst-lsp.exportPdf": "never",
+ "notebook.formatOnSave.enabled": false
}
diff --git a/quarto_cli/__init__.py b/quarto_cli/__init__.py
index b1123d8da9..87e13a70a7 100644
--- a/quarto_cli/__init__.py
+++ b/quarto_cli/__init__.py
@@ -4,6 +4,7 @@
from pathlib import Path
import sys
+
def find_version():
g = str((Path(__file__).parent / "quarto-*").resolve())
g = str((Path(glob.glob(g)[0]) / "bin" / "quarto").resolve())
@@ -12,8 +13,10 @@ def find_version():
g += ".exe"
return g
+
def call_quarto(*args, **kwargs):
return subprocess.run([find_version(), *sys.argv[1:], *args], **kwargs)
+
def run_quarto(*args, **kwargs):
- call_quarto(*args, **kwargs)
\ No newline at end of file
+ call_quarto(*args, **kwargs)
diff --git a/setup.py b/setup.py
index 3f63286638..2c0e9816c8 100644
--- a/setup.py
+++ b/setup.py
@@ -16,6 +16,7 @@
shutil.rmtree("build", ignore_errors=True)
shutil.rmtree("quarto_cli.egg-info", ignore_errors=True)
+
def get_platform_suffix():
if sys.platform == "darwin":
return "macos.tar.gz"
@@ -31,6 +32,7 @@ def get_platform_suffix():
else:
raise Exception("Platform not supported")
+
def download_quarto(vers):
global output_location
global quarto_data
@@ -43,8 +45,28 @@ def download_quarto(vers):
name, resp = urlretrieve(quarto_url)
except Exception as e:
print("Error downloading Quarto:", e)
- commit=subprocess.run(["git","log","-1","--skip=1","--pretty=format:'%h'","--","version.txt"], check=True, text=True, capture_output=True, shell=True).stdout
- version = subprocess.run(["git","show", commit.replace("'", "")+":version.txt"], check=True, capture_output=True, text=True, shell=True).stdout.replace("\n", "")
+ commit = subprocess.run(
+ [
+ "git",
+ "log",
+ "-1",
+ "--skip=1",
+ "--pretty=format:'%h'",
+ "--",
+ "version.txt",
+ ],
+ check=True,
+ text=True,
+ capture_output=True,
+ shell=True,
+ ).stdout
+ version = subprocess.run(
+ ["git", "show", commit.replace("'", "") + ":version.txt"],
+ check=True,
+ capture_output=True,
+ text=True,
+ shell=True,
+ ).stdout.replace("\n", "")
quarto_url = f"https://github.com/quarto-dev/quarto-cli/releases/download/v{version}/quarto-{version}-{suffix}"
name, resp = urlretrieve(quarto_url)
@@ -53,43 +75,48 @@ def download_quarto(vers):
if suffix.endswith(".zip"):
import zipfile
- with zipfile.ZipFile(name, 'r') as zip_ref:
+
+ with zipfile.ZipFile(name, "r") as zip_ref:
zip_ref.extractall(output_location)
elif suffix.startswith("linux"):
import tarfile
+
with tarfile.open(name) as tf:
tf.extractall(Path(output_location).parent.resolve())
else:
import tarfile
+
with tarfile.open(name) as tf:
tf.extractall(output_location)
for path in glob.glob(str(Path(output_location, "**")), recursive=True):
quarto_data.append(path.replace("quarto_cli" + os.path.sep, ""))
+
def cleanup_quarto():
shutil.rmtree(output_location)
+
global version
version = open("version.txt").read().strip()
download_quarto(version)
setup(
version=version,
- name='quarto_cli',
+ name="quarto_cli",
install_requires=[
- 'jupyter',
- 'nbclient',
- 'wheel',
+ "jupyter",
+ "nbclient",
+ "wheel",
],
- packages=find_packages(include=['quarto_cli', 'quarto_cli.*']),
+ packages=find_packages(include=["quarto_cli", "quarto_cli.*"]),
entry_points={
- 'console_scripts': [
- 'quarto = quarto_cli:run_quarto',
+ "console_scripts": [
+ "quarto = quarto_cli:run_quarto",
]
},
package_data={
- 'quarto_cli': quarto_data,
+ "quarto_cli": quarto_data,
},
include_package_data=True,
)
diff --git a/src/resources/capabilities/jupyter.py b/src/resources/capabilities/jupyter.py
index d30fdd4d7c..7b397b4316 100644
--- a/src/resources/capabilities/jupyter.py
+++ b/src/resources/capabilities/jupyter.py
@@ -2,35 +2,36 @@
import os
import importlib
-sys.stdout.write('versionMajor: ' + str(sys.version_info.major))
-sys.stdout.write('\nversionMinor: ' + str(sys.version_info.minor))
-sys.stdout.write('\nversionPatch: ' + str(sys.version_info.micro))
-sys.stdout.write('\nversionStr: "' + str(sys.version).replace('\n', ' ') + '"')
-if os.path.exists(os.path.join(sys.prefix, 'conda-meta', 'history')):
- sys.stdout.write('\nconda: true')
+sys.stdout.write("versionMajor: " + str(sys.version_info.major))
+sys.stdout.write("\nversionMinor: " + str(sys.version_info.minor))
+sys.stdout.write("\nversionPatch: " + str(sys.version_info.micro))
+sys.stdout.write('\nversionStr: "' + str(sys.version).replace("\n", " ") + '"')
+if os.path.exists(os.path.join(sys.prefix, "conda-meta", "history")):
+ sys.stdout.write("\nconda: true")
else:
- sys.stdout.write('\nconda: false')
+ sys.stdout.write("\nconda: false")
sys.stdout.write('\nexecPrefix: "' + sys.exec_prefix.replace("\\", "/") + '"')
sys.stdout.write('\nexecutable: "' + sys.executable.replace("\\", "/") + '"')
+
def discover_package(pkg):
- sys.stdout.write('\n' + pkg + ': ')
- v = 'null'
- try:
+ sys.stdout.write("\n" + pkg + ": ")
+ v = "null"
try:
- from importlib.metadata import version
- v = version(pkg)
- except ImportError:
- imp = importlib.import_module(pkg)
- v = str(imp.__version__)
- except Exception:
- pass
- sys.stdout.write(v)
-
-discover_package('jupyter_core')
-discover_package('nbformat')
-discover_package('nbclient')
-discover_package('ipykernel')
-discover_package('shiny')
+ try:
+ from importlib.metadata import version
+
+ v = version(pkg)
+ except ImportError:
+ imp = importlib.import_module(pkg)
+ v = str(imp.__version__)
+ except Exception:
+ pass
+ sys.stdout.write(v)
+discover_package("jupyter_core")
+discover_package("nbformat")
+discover_package("nbclient")
+discover_package("ipykernel")
+discover_package("shiny")
diff --git a/src/resources/jupyter/jupyter.py b/src/resources/jupyter/jupyter.py
index 020b7a68a3..a865e2082b 100644
--- a/src/resources/jupyter/jupyter.py
+++ b/src/resources/jupyter/jupyter.py
@@ -1,4 +1,3 @@
-
import os
import sys
import re
@@ -12,9 +11,9 @@
from socketserver import TCPServer, StreamRequestHandler
try:
- from socketserver import UnixStreamServer
+ from socketserver import UnixStreamServer
except:
- pass
+ pass
from log import log_init, log, log_error, trace
@@ -22,270 +21,263 @@
from nbclient.exceptions import CellExecutionError
import asyncio
-if sys.platform == 'win32':
- from asyncio.windows_events import *
- asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
+if sys.platform == "win32":
+ from asyncio.windows_events import *
-class ExecuteHandler(StreamRequestHandler):
+ asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
- def handle(self):
- try:
- trace('handling server request')
+class ExecuteHandler(StreamRequestHandler):
+ def handle(self):
+ try:
+ trace("handling server request")
+
+ # read input
+ input = str(self.rfile.readline().strip(), "utf-8")
+ input = json.loads(input)
+
+ # validate secret
+ if not self.server.validate_secret(input["secret"]):
+ trace("invalid secret (exiting server)")
+ self.server.request_exit()
+ return
+ if input["command"] == "file":
+ filename = input["options"]["file"]
+ input = json.load(open(filename, "r", encoding="utf8"))
+ os.unlink(filename)
+
+ # if this is an abort command then request exit
+ command = input["command"]
+ if command == "abort":
+ trace("abort command received (exiting server)")
+ self.server.request_exit()
+ return
+
+ # options
+ options = input["options"]
+
+ # stream status back to client
+ def status(msg):
+ self.message("status", msg)
+
+ # execute the notebook
+ trace("executing notebook")
+ persist = notebook_execute(options, status)
+ if not persist:
+ trace("notebook not persistable (exiting server)")
+ self.server.request_exit()
+ else:
+ self.server.record_success()
+ except RestartKernel:
+ trace("notebook restart request recived (exiting server)")
+ self.message("restart")
+ self.server.request_exit()
+ except Exception as e:
+ self.message("error", "\n\n" + str(e))
+ self.server.record_error(e)
- # read input
- input = str(self.rfile.readline().strip(), 'utf-8')
- input = json.loads(input)
+ # write a message back to the client
+ def message(self, type, data=""):
+ message = {"type": type, "data": data}
+ self.wfile.write(bytearray(json.dumps(message) + "\n", "utf-8"))
+ self.wfile.flush()
- # validate secret
- if not self.server.validate_secret(input["secret"]):
- trace('invalid secret (exiting server)')
- self.server.request_exit()
- return
- if input["command"] == "file":
- filename = input["options"]["file"]
- input = json.load(open(filename, "r", encoding="utf8"))
- os.unlink(filename)
-
- # if this is an abort command then request exit
- command = input["command"]
- if command == "abort":
- trace('abort command received (exiting server)')
- self.server.request_exit()
- return
-
- # options
- options = input["options"]
-
- # stream status back to client
- def status(msg):
- self.message("status", msg)
-
- # execute the notebook
- trace('executing notebook')
- persist = notebook_execute(options, status)
- if not persist:
- trace('notebook not persistable (exiting server)')
- self.server.request_exit()
- else:
- self.server.record_success()
- except RestartKernel:
- trace('notebook restart request recived (exiting server)')
- self.message("restart")
- self.server.request_exit()
- except Exception as e:
- self.message("error", "\n\n" + str(e))
- self.server.record_error(e)
-
- # write a message back to the client
- def message(self, type, data = ""):
- message = {
- "type": type,
- "data": data
- }
- self.wfile.write(bytearray(json.dumps(message) + "\n", 'utf-8'))
- self.wfile.flush()
-
-def execute_server(options):
- # determine server type
- is_tcp = options["type"] == "tcp"
- if is_tcp:
- base = TCPServer
- else:
- base = UnixStreamServer
-
- class ExecuteServer(base):
-
- allow_reuse_address = True
- exit_pending = False
- consecutive_errors = 0
-
- def __init__(self, options):
-
- trace('creating notebook server (' +
- options["type"] + ': ' + options["transport"]
- + ')')
-
- # set secret for tcp
- if is_tcp:
- self.secret = str(uuid.uuid4())
- else:
- self.secret = ""
-
- # server params
- self.transport = options["transport"]
- self.timeout = options["timeout"]
-
- # initialize with address (based on server type) and handler
- if is_tcp:
- server_address = ("localhost",0)
- else:
- server_address = self.transport
- super().__init__(server_address, ExecuteHandler)
-
- # if we are a tcp server then get the port number and write it
- # to the transport file. change file permissions to user r/w
- # for both tcp and unix domain sockets
- if is_tcp:
- port = self.socket.getsockname()[1]
- trace('notebook server bound to port ' + str(port))
- with open(self.transport,"w") as file:
- file.write("")
- os.chmod(self.transport, stat.S_IRUSR | stat.S_IWUSR)
- with open(self.transport,"w") as file:
- file.write(json.dumps(dict({
- "port": port,
- "secret": self.secret
- })))
- else:
- os.chmod(self.transport, stat.S_IRUSR | stat.S_IWUSR)
-
-
- def handle_request(self):
- if self.exit_pending:
+def execute_server(options):
+ # determine server type
+ is_tcp = options["type"] == "tcp"
+ if is_tcp:
+ base = TCPServer
+ else:
+ base = UnixStreamServer
+
+ class ExecuteServer(base):
+ allow_reuse_address = True
+ exit_pending = False
+ consecutive_errors = 0
+
+ def __init__(self, options):
+ trace(
+ "creating notebook server ("
+ + options["type"]
+ + ": "
+ + options["transport"]
+ + ")"
+ )
+
+ # set secret for tcp
+ if is_tcp:
+ self.secret = str(uuid.uuid4())
+ else:
+ self.secret = ""
+
+ # server params
+ self.transport = options["transport"]
+ self.timeout = options["timeout"]
+
+ # initialize with address (based on server type) and handler
+ if is_tcp:
+ server_address = ("localhost", 0)
+ else:
+ server_address = self.transport
+ super().__init__(server_address, ExecuteHandler)
+
+ # if we are a tcp server then get the port number and write it
+ # to the transport file. change file permissions to user r/w
+ # for both tcp and unix domain sockets
+ if is_tcp:
+ port = self.socket.getsockname()[1]
+ trace("notebook server bound to port " + str(port))
+ with open(self.transport, "w") as file:
+ file.write("")
+ os.chmod(self.transport, stat.S_IRUSR | stat.S_IWUSR)
+ with open(self.transport, "w") as file:
+ file.write(json.dumps(dict({"port": port, "secret": self.secret})))
+ else:
+ os.chmod(self.transport, stat.S_IRUSR | stat.S_IWUSR)
+
+ def handle_request(self):
+ if self.exit_pending:
+ self.exit()
+ super().handle_request()
+
+ def handle_timeout(self):
+ trace("request timeout (exiting server)")
self.exit()
- super().handle_request()
- def handle_timeout(self):
- trace('request timeout (exiting server)')
- self.exit()
+ def validate_secret(self, secret):
+ return self.secret == secret
- def validate_secret(self, secret):
- return self.secret == secret
+ def record_success(self):
+ self.consecutive_errors = 0
- def record_success(self):
- self.consecutive_errors = 0
+ def record_error(self, e):
+ # exit for 5 consecutive errors
+ self.consecutive_errors += 1
+ if self.consecutive_errors >= 5:
+ self.exit()
- def record_error(self, e):
- # exit for 5 consecutive errors
- self.consecutive_errors += 1
- if self.consecutive_errors >= 5:
- self.exit()
+ def request_exit(self):
+ self.exit_pending = True
- def request_exit(self):
- self.exit_pending = True
-
- def exit(self):
- try:
- trace('cleaning up server resources')
- self.server_close()
- self.remove_transport()
-
- finally:
- trace('exiting server')
- sys.exit(0)
-
- def remove_transport(self):
- try:
- if os.path.exists(self.transport):
- os.remove(self.transport)
- except:
- pass
-
- return ExecuteServer(options)
-
-
-def run_server(options):
- try:
- with execute_server(options) as server:
- while True:
- server.handle_request()
- except Exception as e:
- log_error("Unable to run server", exc_info = e)
+ def exit(self):
+ try:
+ trace("cleaning up server resources")
+ self.server_close()
+ self.remove_transport()
-# run a server as a detached subprocess
-def run_server_subprocess(options, status):
+ finally:
+ trace("exiting server")
+ sys.exit(0)
+
+ def remove_transport(self):
+ try:
+ if os.path.exists(self.transport):
+ os.remove(self.transport)
+ except:
+ pass
- # python executable
- python_exe = sys.executable
+ return ExecuteServer(options)
- # detached process flags for windows
- flags = 0
- if sys.platform == 'win32':
- python_exe = re.sub('python\\.exe$', 'pythonw.exe', python_exe)
- flags |= 0x00000008 # DETACHED_PROCESS
- flags |= 0x00000200 # CREATE_NEW_PROCESS_GROUP
- flags |= 0x08000000 # CREATE_NO_WINDOW
- flags |= 0x01000000 # CREATE_BREAKAWAY_FROM_JOB
- # forward options via env vars
- os.environ["QUARTO_JUPYTER_OPTIONS"] = json.dumps(options)
+def run_server(options):
+ try:
+ with execute_server(options) as server:
+ while True:
+ server.handle_request()
+ except Exception as e:
+ log_error("Unable to run server", exc_info=e)
- # create subprocess
- subprocess.Popen([python_exe] + sys.argv + ["serve"],
- stdin = subprocess.DEVNULL,
- stdout = subprocess.DEVNULL,
- stderr = subprocess.DEVNULL,
- creationflags = flags,
- close_fds = True,
- start_new_session = True
- )
+
+# run a server as a detached subprocess
+def run_server_subprocess(options, status):
+ # python executable
+ python_exe = sys.executable
+
+ # detached process flags for windows
+ flags = 0
+ if sys.platform == "win32":
+ python_exe = re.sub("python\\.exe$", "pythonw.exe", python_exe)
+ flags |= 0x00000008 # DETACHED_PROCESS
+ flags |= 0x00000200 # CREATE_NEW_PROCESS_GROUP
+ flags |= 0x08000000 # CREATE_NO_WINDOW
+ flags |= 0x01000000 # CREATE_BREAKAWAY_FROM_JOB
+
+ # forward options via env vars
+ os.environ["QUARTO_JUPYTER_OPTIONS"] = json.dumps(options)
+
+ # create subprocess
+ subprocess.Popen(
+ [python_exe] + sys.argv + ["serve"],
+ stdin=subprocess.DEVNULL,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ creationflags=flags,
+ close_fds=True,
+ start_new_session=True,
+ )
# run a notebook directly (not a server)
def run_notebook(options, status):
-
- # run notebook w/ some special exception handling. note that we don't
- # log exceptions here b/c they are considered normal course of execution
- # for errors that occur in notebook cells
- try:
- trace('Running notebook_execute')
- notebook_execute(options, status)
- except Exception as e:
- trace(f'run_notebook caught exception: {type(e).__name__}')
- # CellExecutionError for execution at the terminal includes a bunch
- # of extra stack frames internal to this script. remove them
- msg = str(e)
- kCellExecutionError = "nbclient.exceptions.CellExecutionError: "
- loc = msg.find(kCellExecutionError)
- if loc != -1:
- msg = msg[loc + len(kCellExecutionError):]
- status("\n\n" + msg + "\n")
- sys.exit(1)
+ # run notebook w/ some special exception handling. note that we don't
+ # log exceptions here b/c they are considered normal course of execution
+ # for errors that occur in notebook cells
+ try:
+ trace("Running notebook_execute")
+ notebook_execute(options, status)
+ except Exception as e:
+ trace(f"run_notebook caught exception: {type(e).__name__}")
+ # CellExecutionError for execution at the terminal includes a bunch
+ # of extra stack frames internal to this script. remove them
+ msg = str(e)
+ kCellExecutionError = "nbclient.exceptions.CellExecutionError: "
+ loc = msg.find(kCellExecutionError)
+ if loc != -1:
+ msg = msg[loc + len(kCellExecutionError) :]
+ status("\n\n" + msg + "\n")
+ sys.exit(1)
if __name__ == "__main__":
-
- # stream status to stderr
- def status(msg):
- sys.stderr.write(msg)
- sys.stderr.flush()
-
- try:
- # read command from cmd line if it's there (in that case
- # options are passed via environment variable)
- if len(sys.argv) > 1:
- command = sys.argv[1]
- options = json.loads(os.getenv("QUARTO_JUPYTER_OPTIONS"))
- del os.environ["QUARTO_JUPYTER_OPTIONS"]
- # otherwise read from stdin
- else:
- input = json.load(sys.stdin)
- command = input["command"]
- options = input["options"]
-
- # initialize log
- log_init(options["log"], options["debug"])
-
- # start the server (creates a new detached process, we implement this here
- # only b/c Deno doesn't currently support detaching spawned processes)
- if command == "start":
- trace('starting notebook server subprocess')
- run_server_subprocess(options, status)
-
- # serve a notebook (invoked by run_server_subprocess)
- elif command == "serve":
- trace('running notebook server subprocess')
- run_server(options)
-
- # execute a notebook and then quit
- elif command == "execute":
- trace('running notebook without keepalive')
- run_notebook(options, status)
-
- except Exception as e:
- log_error("Unable to run notebook", exc_info = e)
- sys.exit(1)
-
+ # stream status to stderr
+ def status(msg):
+ sys.stderr.write(msg)
+ sys.stderr.flush()
+
+ try:
+ # read command from cmd line if it's there (in that case
+ # options are passed via environment variable)
+ if len(sys.argv) > 1:
+ command = sys.argv[1]
+ options = json.loads(os.getenv("QUARTO_JUPYTER_OPTIONS"))
+ del os.environ["QUARTO_JUPYTER_OPTIONS"]
+ # otherwise read from stdin
+ else:
+ input = json.load(sys.stdin)
+ command = input["command"]
+ options = input["options"]
+
+ # initialize log
+ log_init(options["log"], options["debug"])
+
+ # start the server (creates a new detached process, we implement this here
+ # only b/c Deno doesn't currently support detaching spawned processes)
+ if command == "start":
+ trace("starting notebook server subprocess")
+ run_server_subprocess(options, status)
+
+ # serve a notebook (invoked by run_server_subprocess)
+ elif command == "serve":
+ trace("running notebook server subprocess")
+ run_server(options)
+
+ # execute a notebook and then quit
+ elif command == "execute":
+ trace("running notebook without keepalive")
+ run_notebook(options, status)
+
+ except Exception as e:
+ log_error("Unable to run notebook", exc_info=e)
+ sys.exit(1)
diff --git a/src/resources/jupyter/jupyter_core_utils_vendor.py b/src/resources/jupyter/jupyter_core_utils_vendor.py
index 8861e54e7a..068ef9d5a9 100644
--- a/src/resources/jupyter/jupyter_core_utils_vendor.py
+++ b/src/resources/jupyter/jupyter_core_utils_vendor.py
@@ -36,7 +36,9 @@ def run(self, coro):
name = f"{threading.current_thread().name} - runner"
if self.__io_loop is None:
self.__io_loop = asyncio.new_event_loop()
- self.__runner_thread = threading.Thread(target=self._runner, daemon=True, name=name)
+ self.__runner_thread = threading.Thread(
+ target=self._runner, daemon=True, name=name
+ )
self.__runner_thread.start()
fut = asyncio.run_coroutine_threadsafe(coro, self.__io_loop)
return fut.result(None)
@@ -47,6 +49,7 @@ def run(self, coro):
T = TypeVar("T")
+
def run_sync(coro: Callable[..., Awaitable[T]]) -> Callable[..., T]:
"""Wraps coroutine in a function that blocks until it has executed.
diff --git a/src/resources/jupyter/log.py b/src/resources/jupyter/log.py
index 34f1b7e177..d23ac07706 100644
--- a/src/resources/jupyter/log.py
+++ b/src/resources/jupyter/log.py
@@ -1,4 +1,3 @@
-
import sys
import logging
import os
@@ -6,42 +5,52 @@
TRACE = 25
-def log_init(log_file, trace = False):
- # set level
- logger = logging.getLogger()
- if trace:
- logger.setLevel(TRACE)
- else:
- logger.setLevel(logging.WARNING)
- global file_handler
- # create handlers
- stderr_handler = logging.StreamHandler(sys.stderr)
- file_handler = logging.FileHandler(log_file)
+def log_init(log_file, trace=False):
+ # set level
+ logger = logging.getLogger()
+ if trace:
+ logger.setLevel(TRACE)
+ else:
+ logger.setLevel(logging.WARNING)
+
+ global file_handler
+ # create handlers
+ stderr_handler = logging.StreamHandler(sys.stderr)
+ file_handler = logging.FileHandler(log_file)
- # create formatter and attach to handlers
- formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
- stderr_handler.setFormatter(formatter)
- file_handler.setFormatter(formatter)
+ # create formatter and attach to handlers
+ formatter = logging.Formatter(
+ "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+ )
+ stderr_handler.setFormatter(formatter)
+ file_handler.setFormatter(formatter)
+
+ # add handlers
+ logger.addHandler(stderr_handler)
+ logger.addHandler(file_handler)
- # add handlers
- logger.addHandler(stderr_handler)
- logger.addHandler(file_handler)
# force flushing so programs which hang still produce output
if os.getenv("QUARTO_JUPYTER_FLUSH_LOGS"):
- def log(level, msg):
- logging.getLogger().log(level, msg)
- file_handler.flush()
+
+ def log(level, msg):
+ logging.getLogger().log(level, msg)
+ file_handler.flush()
else:
- def log(level, msg):
- logging.getLogger().log(level, msg)
-def log_error(msg, exc_info = False, stack_info = None):
- if stack_info is None:
- stack_info = not exc_info
- logging.getLogger().log(logging.ERROR, msg, exc_info = exc_info, stack_info = stack_info)
+ def log(level, msg):
+ logging.getLogger().log(level, msg)
+
+
+def log_error(msg, exc_info=False, stack_info=None):
+ if stack_info is None:
+ stack_info = not exc_info
+ logging.getLogger().log(
+ logging.ERROR, msg, exc_info=exc_info, stack_info=stack_info
+ )
+
def trace(msg):
- prev_frame = inspect.stack()[1]
- log(TRACE, "%s:%s - %s" % (prev_frame.filename, prev_frame.lineno, msg))
+ prev_frame = inspect.stack()[1]
+ log(TRACE, "%s:%s - %s" % (prev_frame.filename, prev_frame.lineno, msg))
diff --git a/src/resources/jupyter/notebook.py b/src/resources/jupyter/notebook.py
index dabc96bbbf..2d4ca30152 100644
--- a/src/resources/jupyter/notebook.py
+++ b/src/resources/jupyter/notebook.py
@@ -24,820 +24,863 @@
# optional import of papermill for params support
try:
- from papermill import translators as papermill_translate
+ from papermill import translators as papermill_translate
except ImportError:
- papermill_translate = None
+ papermill_translate = None
# optional import of jupyter-cache
try:
- from jupyter_cache import get_cache
+ from jupyter_cache import get_cache
except ImportError:
- get_cache = None
+ get_cache = None
NB_FORMAT_VERSION = 4
+
def get_language_from_nb_metadata(metadata):
- ks_lang = metadata.kernelspec.get("language", None)
- li_name = None
- li = metadata.get("language_info", None)
- if li:
- li_name = metadata.language_info.get("name", None)
- return ks_lang or li_name
+ ks_lang = metadata.kernelspec.get("language", None)
+ li_name = None
+ li = metadata.get("language_info", None)
+ if li:
+ li_name = metadata.language_info.get("name", None)
+ return ks_lang or li_name
+
# exception to indicate the kernel needs restarting
class RestartKernel(Exception):
- pass
+ pass
+
def build_kernel_options(options):
- # unpack options
- format = options["format"]
- resource_dir = options["resourceDir"]
- params = options.get("params", None)
- run_path = options.get("cwd", "")
- quiet = options.get('quiet', False)
-
- # read variables out of format
- execute = format["execute"]
-
- # evaluation
- eval = execute["eval"]
- allow_errors = bool(execute["error"])
-
- # figures
- fig_width = execute["fig-width"]
- fig_height = execute["fig-height"]
- fig_format = execute["fig-format"]
- fig_dpi = execute["fig-dpi"]
-
- # shell interactivity
- interactivity = execute["ipynb-shell-interactivity"]
- if interactivity == None: interactivity = ''
-
- # plotly connected
- plotly_connected = execute["plotly-connected"]
-
- # server: shiny
- metadata = format["metadata"]
- if "server" in metadata and "type" in metadata["server"] and metadata["server"]["type"] == "shiny":
- is_shiny = True
- else:
- is_shiny = False
-
- # dashboard
- is_dashboard = format["identifier"]["base-format"] == "dashboard"
-
- # caching
- if "cache" in execute:
- cache = execute["cache"]
- else:
- cache = "user"
-
- return {
- "format": format,
- "resource_dir": resource_dir,
- "params": params,
- "run_path": run_path,
- "quiet": quiet,
- "eval": eval,
- "allow_errors": allow_errors,
- "fig_width": fig_width,
- "fig_height": fig_height,
- "fig_format": fig_format,
- "fig_dpi": fig_dpi,
- "interactivity": interactivity,
- "plotly_connected": plotly_connected,
- "is_shiny": is_shiny,
- "is_dashboard": is_dashboard,
- "cache": cache
- }
+ # unpack options
+ format = options["format"]
+ resource_dir = options["resourceDir"]
+ params = options.get("params", None)
+ run_path = options.get("cwd", "")
+ quiet = options.get("quiet", False)
+
+ # read variables out of format
+ execute = format["execute"]
+
+ # evaluation
+ eval = execute["eval"]
+ allow_errors = bool(execute["error"])
+
+ # figures
+ fig_width = execute["fig-width"]
+ fig_height = execute["fig-height"]
+ fig_format = execute["fig-format"]
+ fig_dpi = execute["fig-dpi"]
+
+ # shell interactivity
+ interactivity = execute["ipynb-shell-interactivity"]
+ if interactivity == None:
+ interactivity = ""
+
+ # plotly connected
+ plotly_connected = execute["plotly-connected"]
+
+ # server: shiny
+ metadata = format["metadata"]
+ if (
+ "server" in metadata
+ and "type" in metadata["server"]
+ and metadata["server"]["type"] == "shiny"
+ ):
+ is_shiny = True
+ else:
+ is_shiny = False
+
+ # dashboard
+ is_dashboard = format["identifier"]["base-format"] == "dashboard"
+
+ # caching
+ if "cache" in execute:
+ cache = execute["cache"]
+ else:
+ cache = "user"
+
+ return {
+ "format": format,
+ "resource_dir": resource_dir,
+ "params": params,
+ "run_path": run_path,
+ "quiet": quiet,
+ "eval": eval,
+ "allow_errors": allow_errors,
+ "fig_width": fig_width,
+ "fig_height": fig_height,
+ "fig_format": fig_format,
+ "fig_dpi": fig_dpi,
+ "interactivity": interactivity,
+ "plotly_connected": plotly_connected,
+ "is_shiny": is_shiny,
+ "is_dashboard": is_dashboard,
+ "cache": cache,
+ }
+
def set_env_vars(options):
- os.environ["QUARTO_FIG_WIDTH"] = str(options["fig_width"])
- os.environ["QUARTO_FIG_HEIGHT"] = str(options["fig_height"])
- if options["fig_format"] == "retina":
- os.environ["QUARTO_FIG_DPI"] = str(options["fig_dpi"] * 2)
- os.environ["QUARTO_FIG_FORMAT"] = "png"
- else:
- os.environ["QUARTO_FIG_DPI"] = str(options["fig_dpi"])
- os.environ["QUARTO_FIG_FORMAT"] = options["fig_format"]
+ os.environ["QUARTO_FIG_WIDTH"] = str(options["fig_width"])
+ os.environ["QUARTO_FIG_HEIGHT"] = str(options["fig_height"])
+ if options["fig_format"] == "retina":
+ os.environ["QUARTO_FIG_DPI"] = str(options["fig_dpi"] * 2)
+ os.environ["QUARTO_FIG_FORMAT"] = "png"
+ else:
+ os.environ["QUARTO_FIG_DPI"] = str(options["fig_dpi"])
+ os.environ["QUARTO_FIG_FORMAT"] = options["fig_format"]
+
def retrieve_nb_from_cache(nb, status, input, **kwargs):
- cache = kwargs["cache"]
- # are we using the cache, if so connect to the cache, and then if we aren't in 'refresh'
- # (forced re-execution) mode then try to satisfy the execution request from the cache
- if cache == True or cache == "refresh":
- trace('using cache')
- if not get_cache:
- raise ImportError('The jupyter-cache package is required for cached execution')
- trace('getting cache')
- # Respect env var used to modify default cache dir
- # https://jupyter-cache.readthedocs.io/en/latest/using/cli.html
- nb_cache = get_cache(os.getenv('JUPYTERCACHE', '.jupyter_cache'))
- if not cache == "refresh":
- cached_nb = nb_from_cache(nb, nb_cache)
- if cached_nb:
- cached_nb.cells.pop(0)
- nb_write(cached_nb, input)
- status("(Notebook read from cache)\n\n")
- trace('(Notebook read from cache)')
- return True # can persist kernel
- else:
- trace('not using cache')
- nb_cache = None
- return nb_cache
+ cache = kwargs["cache"]
+ # are we using the cache, if so connect to the cache, and then if we aren't in 'refresh'
+ # (forced re-execution) mode then try to satisfy the execution request from the cache
+ if cache == True or cache == "refresh":
+ trace("using cache")
+ if not get_cache:
+ raise ImportError(
+ "The jupyter-cache package is required for cached execution"
+ )
+ trace("getting cache")
+ # Respect env var used to modify default cache dir
+ # https://jupyter-cache.readthedocs.io/en/latest/using/cli.html
+ nb_cache = get_cache(os.getenv("JUPYTERCACHE", ".jupyter_cache"))
+ if not cache == "refresh":
+ cached_nb = nb_from_cache(nb, nb_cache)
+ if cached_nb:
+ cached_nb.cells.pop(0)
+ nb_write(cached_nb, input)
+ status("(Notebook read from cache)\n\n")
+ trace("(Notebook read from cache)")
+ return True # can persist kernel
+ else:
+ trace("not using cache")
+ nb_cache = None
+ return nb_cache
+
# check if the kernel needs to be restarted
# and records necessary state for the next execution
#
# TODO why is the state here set on the function?
def check_for_kernel_restart(options):
- # if this is a re-execution of a previously loaded kernel,
- # make sure the underlying python version hasn't changed
- python_cmd = options.get("python_cmd", None)
- if python_cmd:
- if hasattr(notebook_execute, "python_cmd"):
- if notebook_execute.python_cmd != python_cmd:
- return True
- else:
- notebook_execute.python_cmd = python_cmd
-
- # if there is a supervisor_id then abort if it has changed
- supervisor_pid = options.get("supervisor_pid", None)
- if supervisor_pid:
- if hasattr(notebook_execute, "supervisor_pid"):
- if notebook_execute.supervisor_pid != supervisor_pid:
- return True
- else:
- notebook_execute.supervisor_pid = supervisor_pid
+ # if this is a re-execution of a previously loaded kernel,
+ # make sure the underlying python version hasn't changed
+ python_cmd = options.get("python_cmd", None)
+ if python_cmd:
+ if hasattr(notebook_execute, "python_cmd"):
+ if notebook_execute.python_cmd != python_cmd:
+ return True
+ else:
+ notebook_execute.python_cmd = python_cmd
+
+ # if there is a supervisor_id then abort if it has changed
+ supervisor_pid = options.get("supervisor_pid", None)
+ if supervisor_pid:
+ if hasattr(notebook_execute, "supervisor_pid"):
+ if notebook_execute.supervisor_pid != supervisor_pid:
+ return True
+ else:
+ notebook_execute.supervisor_pid = supervisor_pid
+
# execute a notebook
def notebook_execute(options, status):
- trace('inside notebook_execute')
- if check_for_kernel_restart(options):
- raise RestartKernel
-
- # change working directory and strip dir off of paths
- original_input = options["target"]["input"]
- os.chdir(Path(original_input).parent)
- input = Path(original_input).name
-
- quarto_kernel_setup_options = build_kernel_options(options)
- quarto_kernel_setup_options["input"] = input
- allow_errors = quarto_kernel_setup_options["allow_errors"]
- quiet = quarto_kernel_setup_options["quiet"]
- resource_dir = quarto_kernel_setup_options["resource_dir"]
- eval = quarto_kernel_setup_options["eval"]
-
- # set environment variables
- set_env_vars(quarto_kernel_setup_options)
-
- # read the notebook
- nb = nbformat.read(input, as_version = NB_FORMAT_VERSION)
-
- trace('notebook was read')
- # inject parameters if provided
- if quarto_kernel_setup_options["params"]:
- nb_parameterize(nb, quarto_kernel_setup_options["params"])
-
- # insert setup cell
- setup_cell = nb_setup_cell(nb, quarto_kernel_setup_options)
- nb.cells.insert(0, setup_cell)
-
- nb_cache = retrieve_nb_from_cache(nb, status, **quarto_kernel_setup_options)
- if nb_cache == True:
- return True # True indicates notebook read from cache, and hence kernel can be persisted
-
- # create resources for execution
- resources = dict({
- "metadata": {
- "input": original_input,
- }
- })
- if quarto_kernel_setup_options["run_path"]:
- resources["metadata"]["path"] = quarto_kernel_setup_options["run_path"]
-
- trace("Will attempt to create notebook")
- # create NotebookClient
- trace("type of notebook: {0}".format(type(nb)))
- client, created = notebook_init(nb, resources, allow_errors)
-
- msg = client.kc.session.msg("comm_open", {
- 'comm_id': 'quarto_comm',
- 'target_name': 'quarto_kernel_setup',
- 'data': {
- "options": quarto_kernel_setup_options
- }
- })
- client.kc.shell_channel.send(msg)
-
- trace("NotebookClient created")
-
- # complete progress if necessary
- if (not quiet) and created:
- status("Done\n")
-
- current_code_cell = 1
- total_code_cells = 0
- cell_labels = []
- max_label_len = 0
-
- kernel_supports_daemonization = False
-
- def handle_quarto_metadata(cell):
- def handle_meta_object(obj):
- nonlocal kernel_supports_daemonization
- if hasattr(obj, "quarto"):
- qm = obj["quarto"]
- if qm.get("restart_kernel"):
- raise RestartKernel
- if qm.get("daemonize"):
- kernel_supports_daemonization = True
- trace("Kernel is daemonizable from cell metadata")
- handle_meta_object(cell.get("metadata", {}))
- for output in cell.get("outputs", []):
- handle_meta_object(output.get("metadata", {}))
-
- for cell in client.nb.cells:
- # compute total code cells (for progress)
- if cell.cell_type == 'code':
- total_code_cells += 1
- # map cells to their labels
- language = get_language_from_nb_metadata(client.nb.metadata)
- label = nb_cell_yaml_options(language, cell).get('label', '')
- cell_labels.append(label)
- # find max label length
- max_label_len = max(max_label_len, len(label))
-
- # execute the cells
- for index, cell in enumerate(client.nb.cells):
- cell_label = cell_labels[index]
- padding = "." * (max_label_len - len(cell_label))
-
- # progress
- progress = (not quiet) and cell.cell_type == 'code' and index > 0
- if progress:
- status(" Cell {0}/{1}: '{2}'{3}...".format(
- current_code_cell - 1,
- total_code_cells - 1,
- cell_label,
- padding
- ))
-
- # clear cell output
- cell = cell_clear_output(cell)
-
- # execute cell
- trace("Executing cell {0}".format(index))
-
- if cell.cell_type == 'code':
- cell = cell_execute(
- client,
- cell,
- index,
- current_code_cell,
- eval,
- index > 0 # add_to_history
- )
- cell.execution_count = current_code_cell
- elif cell.cell_type == 'markdown':
- cell = cell_execute_inline(client, cell)
-
- trace("Executed cell {0}".format(index))
-
- # if this was the setup cell, see if we need to exit b/c dependencies are out of date
- if index == 0:
- # confirm kernel_deps haven't changed (restart if they have)
- if hasattr(notebook_execute, "kernel_deps"):
- kernel_deps = nb_kernel_dependencies(cell)
- if kernel_deps:
- kernel_supports_daemonization = True
- for path in kernel_deps.keys():
- if path in notebook_execute.kernel_deps.keys():
- if notebook_execute.kernel_deps[path] != kernel_deps[path]:
- raise RestartKernel
- else:
- notebook_execute.kernel_deps[path] = kernel_deps[path]
-
- trace("Handling quarto metadata")
- trace(json.dumps(cell, indent=2))
- # also do it through cell metadata
- handle_quarto_metadata(cell)
-
- # we are done w/ setup (with no restarts) so it's safe to print 'Executing...'
- if not quiet:
- status("\nExecuting '{0}'\n".format(input))
-
- # assign cell
- client.nb.cells[index] = cell
-
- # increment current code cell
- if cell.cell_type == 'code':
- current_code_cell += 1
-
- # end progress
- if progress:
- status("Done\n")
- trace("Done")
-
- trace("Notebook execution complete")
-
- # set widgets metadata
- client.set_widgets_metadata()
-
- # write to the cache
- if nb_cache:
- nb_write(client.nb, input)
- nb_cache.cache_notebook_file(path = Path(input), overwrite = True)
-
- # remove setup cell (then renumber execution_Count)
- client.nb.cells.pop(0)
- for index, cell in enumerate(client.nb.cells):
- if cell.cell_type == 'code':
- cell.execution_count = cell.execution_count - 1
-
- # re-write without setup cell
- nb_write(client.nb, input)
-
- # execute cleanup cell
- cleanup_cell = nb_cleanup_cell(nb, resource_dir)
- if cleanup_cell:
- kernel_supports_daemonization = True
- nb.cells.append(cleanup_cell)
- client.execute_cell(
- cell = cleanup_cell,
- cell_index = len(client.nb.cells) - 1,
- store_history = False
- )
- nb.cells.pop()
-
- # record kernel deps after execution (picks up imports that occurred
- # witihn the notebook cells)
- kernel_deps = nb_kernel_dependencies(cleanup_cell)
- if kernel_deps:
- notebook_execute.kernel_deps = kernel_deps
- else:
- notebook_execute.kernel_deps = {}
-
- # progress
- if not quiet:
- status("\n")
-
- # return flag indicating whether we should persist
- return kernel_supports_daemonization
+ trace("inside notebook_execute")
+ if check_for_kernel_restart(options):
+ raise RestartKernel
+
+ # change working directory and strip dir off of paths
+ original_input = options["target"]["input"]
+ os.chdir(Path(original_input).parent)
+ input = Path(original_input).name
+
+ quarto_kernel_setup_options = build_kernel_options(options)
+ quarto_kernel_setup_options["input"] = input
+ allow_errors = quarto_kernel_setup_options["allow_errors"]
+ quiet = quarto_kernel_setup_options["quiet"]
+ resource_dir = quarto_kernel_setup_options["resource_dir"]
+ eval = quarto_kernel_setup_options["eval"]
+
+ # set environment variables
+ set_env_vars(quarto_kernel_setup_options)
+
+ # read the notebook
+ nb = nbformat.read(input, as_version=NB_FORMAT_VERSION)
+
+ trace("notebook was read")
+ # inject parameters if provided
+ if quarto_kernel_setup_options["params"]:
+ nb_parameterize(nb, quarto_kernel_setup_options["params"])
+
+ # insert setup cell
+ setup_cell = nb_setup_cell(nb, quarto_kernel_setup_options)
+ nb.cells.insert(0, setup_cell)
+
+ nb_cache = retrieve_nb_from_cache(nb, status, **quarto_kernel_setup_options)
+ if nb_cache == True:
+ return True # True indicates notebook read from cache, and hence kernel can be persisted
+
+ # create resources for execution
+ resources = dict(
+ {
+ "metadata": {
+ "input": original_input,
+ }
+ }
+ )
+ if quarto_kernel_setup_options["run_path"]:
+ resources["metadata"]["path"] = quarto_kernel_setup_options["run_path"]
+
+ trace("Will attempt to create notebook")
+ # create NotebookClient
+ trace("type of notebook: {0}".format(type(nb)))
+ client, created = notebook_init(nb, resources, allow_errors)
+
+ msg = client.kc.session.msg(
+ "comm_open",
+ {
+ "comm_id": "quarto_comm",
+ "target_name": "quarto_kernel_setup",
+ "data": {"options": quarto_kernel_setup_options},
+ },
+ )
+ client.kc.shell_channel.send(msg)
+
+ trace("NotebookClient created")
+
+ # complete progress if necessary
+ if (not quiet) and created:
+ status("Done\n")
+
+ current_code_cell = 1
+ total_code_cells = 0
+ cell_labels = []
+ max_label_len = 0
+
+ kernel_supports_daemonization = False
+
+ def handle_quarto_metadata(cell):
+ def handle_meta_object(obj):
+ nonlocal kernel_supports_daemonization
+ if hasattr(obj, "quarto"):
+ qm = obj["quarto"]
+ if qm.get("restart_kernel"):
+ raise RestartKernel
+ if qm.get("daemonize"):
+ kernel_supports_daemonization = True
+ trace("Kernel is daemonizable from cell metadata")
+
+ handle_meta_object(cell.get("metadata", {}))
+ for output in cell.get("outputs", []):
+ handle_meta_object(output.get("metadata", {}))
+
+ for cell in client.nb.cells:
+ # compute total code cells (for progress)
+ if cell.cell_type == "code":
+ total_code_cells += 1
+ # map cells to their labels
+ language = get_language_from_nb_metadata(client.nb.metadata)
+ label = nb_cell_yaml_options(language, cell).get("label", "")
+ cell_labels.append(label)
+ # find max label length
+ max_label_len = max(max_label_len, len(label))
+
+ # execute the cells
+ for index, cell in enumerate(client.nb.cells):
+ cell_label = cell_labels[index]
+ padding = "." * (max_label_len - len(cell_label))
+
+ # progress
+ progress = (not quiet) and cell.cell_type == "code" and index > 0
+ if progress:
+ status(
+ " Cell {0}/{1}: '{2}'{3}...".format(
+ current_code_cell - 1, total_code_cells - 1, cell_label, padding
+ )
+ )
+
+ # clear cell output
+ cell = cell_clear_output(cell)
+
+ # execute cell
+ trace("Executing cell {0}".format(index))
+
+ if cell.cell_type == "code":
+ cell = cell_execute(
+ client,
+ cell,
+ index,
+ current_code_cell,
+ eval,
+ index > 0, # add_to_history
+ )
+ cell.execution_count = current_code_cell
+ elif cell.cell_type == "markdown":
+ cell = cell_execute_inline(client, cell)
+
+ trace("Executed cell {0}".format(index))
+
+ # if this was the setup cell, see if we need to exit b/c dependencies are out of date
+ if index == 0:
+ # confirm kernel_deps haven't changed (restart if they have)
+ if hasattr(notebook_execute, "kernel_deps"):
+ kernel_deps = nb_kernel_dependencies(cell)
+ if kernel_deps:
+ kernel_supports_daemonization = True
+ for path in kernel_deps.keys():
+ if path in notebook_execute.kernel_deps.keys():
+ if notebook_execute.kernel_deps[path] != kernel_deps[path]:
+ raise RestartKernel
+ else:
+ notebook_execute.kernel_deps[path] = kernel_deps[path]
+
+ trace("Handling quarto metadata")
+ trace(json.dumps(cell, indent=2))
+ # also do it through cell metadata
+ handle_quarto_metadata(cell)
+
+ # we are done w/ setup (with no restarts) so it's safe to print 'Executing...'
+ if not quiet:
+ status("\nExecuting '{0}'\n".format(input))
+
+ # assign cell
+ client.nb.cells[index] = cell
+
+ # increment current code cell
+ if cell.cell_type == "code":
+ current_code_cell += 1
+
+ # end progress
+ if progress:
+ status("Done\n")
+ trace("Done")
+
+ trace("Notebook execution complete")
+
+ # set widgets metadata
+ client.set_widgets_metadata()
+
+ # write to the cache
+ if nb_cache:
+ nb_write(client.nb, input)
+ nb_cache.cache_notebook_file(path=Path(input), overwrite=True)
+
+ # remove setup cell (then renumber execution_Count)
+ client.nb.cells.pop(0)
+ for index, cell in enumerate(client.nb.cells):
+ if cell.cell_type == "code":
+ cell.execution_count = cell.execution_count - 1
+
+ # re-write without setup cell
+ nb_write(client.nb, input)
+
+ # execute cleanup cell
+ cleanup_cell = nb_cleanup_cell(nb, resource_dir)
+ if cleanup_cell:
+ kernel_supports_daemonization = True
+ nb.cells.append(cleanup_cell)
+ client.execute_cell(
+ cell=cleanup_cell, cell_index=len(client.nb.cells) - 1, store_history=False
+ )
+ nb.cells.pop()
+
+ # record kernel deps after execution (picks up imports that occurred
+ # witihn the notebook cells)
+ kernel_deps = nb_kernel_dependencies(cleanup_cell)
+ if kernel_deps:
+ notebook_execute.kernel_deps = kernel_deps
+ else:
+ notebook_execute.kernel_deps = {}
+
+ # progress
+ if not quiet:
+ status("\n")
+
+ # return flag indicating whether we should persist
+ return kernel_supports_daemonization
-def notebook_init(nb, resources, allow_errors):
- created = False
- if not hasattr(notebook_init, "client"):
-
- trace("Creating NotebookClient")
- # create notebook client
- client = NotebookClient(nb, resources = resources)
- client.allow_errors = allow_errors
- client.record_timing = False
- client.create_kernel_manager()
- client.start_new_kernel()
- client.start_new_kernel_client()
-
- async def get_info():
- i = client.kc.kernel_info()
- if asyncio.isfuture(i):
- return await i
- else:
- return i
- info = run_sync(get_info)()
-
- info_msg = client.wait_for_reply(info)
- client.nb.metadata['language_info'] = info_msg['content']['language_info']
- notebook_init.client = client
- created = True
-
- # cleanup kernel at process exit
- atexit.register(client._cleanup_kernel)
-
- else:
- # if the kernel has changed we need to force a restart
- if nb.metadata.kernelspec.name != notebook_init.client.nb.metadata.kernelspec.name:
- raise RestartKernel
-
- # if the input file has changed we need to force a restart
- if resources["metadata"]["input"] != notebook_init.client.resources["metadata"]["input"]:
- raise RestartKernel
-
- # set the new notebook, resources, etc.
- notebook_init.client.nb = nb
- notebook_init.client.allow_errors = allow_errors
-
- return (notebook_init.client, created)
+def notebook_init(nb, resources, allow_errors):
+ created = False
+ if not hasattr(notebook_init, "client"):
+ trace("Creating NotebookClient")
+ # create notebook client
+ client = NotebookClient(nb, resources=resources)
+ client.allow_errors = allow_errors
+ client.record_timing = False
+ client.create_kernel_manager()
+ client.start_new_kernel()
+ client.start_new_kernel_client()
+
+ async def get_info():
+ i = client.kc.kernel_info()
+ if asyncio.isfuture(i):
+ return await i
+ else:
+ return i
+
+ info = run_sync(get_info)()
+
+ info_msg = client.wait_for_reply(info)
+ client.nb.metadata["language_info"] = info_msg["content"]["language_info"]
+ notebook_init.client = client
+ created = True
+
+ # cleanup kernel at process exit
+ atexit.register(client._cleanup_kernel)
+
+ else:
+ # if the kernel has changed we need to force a restart
+ if (
+ nb.metadata.kernelspec.name
+ != notebook_init.client.nb.metadata.kernelspec.name
+ ):
+ raise RestartKernel
+
+ # if the input file has changed we need to force a restart
+ if (
+ resources["metadata"]["input"]
+ != notebook_init.client.resources["metadata"]["input"]
+ ):
+ raise RestartKernel
+
+ # set the new notebook, resources, etc.
+ notebook_init.client.nb = nb
+ notebook_init.client.allow_errors = allow_errors
+
+ return (notebook_init.client, created)
def nb_write(nb, input):
- nbformat.write(nb, input, version = NB_FORMAT_VERSION)
+ nbformat.write(nb, input, version=NB_FORMAT_VERSION)
+
def nb_setup_cell(nb, options):
- options = dict(options)
- options["allow_empty"] = True
- return nb_language_cell('setup', nb, **options)
+ options = dict(options)
+ options["allow_empty"] = True
+ return nb_language_cell("setup", nb, **options)
+
def nb_cleanup_cell(nb, resource_dir):
- return nb_language_cell('cleanup', nb, resource_dir, False)
+ return nb_language_cell("cleanup", nb, resource_dir, False)
+
def nb_language_cell(name, nb, resource_dir, allow_empty, **args):
- kernelspec = nb.metadata.kernelspec
- language = get_language_from_nb_metadata(nb.metadata)
- trace(json.dumps(nb.metadata, indent=2))
- source = ''
- lang_dir = os.path.join(resource_dir, 'jupyter', 'lang', language)
- if os.path.isdir(lang_dir):
- cell_file = glob.glob(os.path.join(lang_dir, name + '.*'))
- # base64-encode the run_path given
- args['run_path'] = base64.b64encode(args.get('run_path', '').encode('utf-8')).decode('utf-8')
- if len(cell_file) > 0:
- with open(cell_file[0], 'r') as file:
- source = file.read().format(**args)
- else:
- trace(f'No {language} directory found in {lang_dir}')
- trace(f'Will look for explicit quarto setup cell information in kernelspec dir')
- try:
- with open(os.path.join(kernelspec.path, f"quarto_{name}_cell"), 'r') as file:
- trace(f'Quarto_{name}_cell file found in {kernelspec.path}')
+ kernelspec = nb.metadata.kernelspec
+ language = get_language_from_nb_metadata(nb.metadata)
+ trace(json.dumps(nb.metadata, indent=2))
+ source = ""
+ lang_dir = os.path.join(resource_dir, "jupyter", "lang", language)
+ if os.path.isdir(lang_dir):
+ cell_file = glob.glob(os.path.join(lang_dir, name + ".*"))
+ # base64-encode the run_path given
+ args["run_path"] = base64.b64encode(
+ args.get("run_path", "").encode("utf-8")
+ ).decode("utf-8")
+ if len(cell_file) > 0:
+ with open(cell_file[0], "r") as file:
+ source = file.read().format(**args)
+ else:
+ trace(f"No {language} directory found in {lang_dir}")
+ trace(f"Will look for explicit quarto setup cell information in kernelspec dir")
+ try:
+ with open(
+ os.path.join(kernelspec.path, f"quarto_{name}_cell"), "r"
+ ) as file:
+ trace(f"Quarto_{name}_cell file found in {kernelspec.path}")
+ trace(os.path.join(kernelspec.path, f"quarto_{name}_cell"))
+ source = file.read()
+ except FileNotFoundError:
+ trace(f"No quarto_{name}_cell file found in {kernelspec.path}")
trace(os.path.join(kernelspec.path, f"quarto_{name}_cell"))
- source = file.read()
- except FileNotFoundError:
- trace(f'No quarto_{name}_cell file found in {kernelspec.path}')
- trace(os.path.join(kernelspec.path, f"quarto_{name}_cell"))
- pass
-
- # create cell
- if source != '' or allow_empty:
- return nbformat.versions[NB_FORMAT_VERSION].new_code_cell(
- source = source
- )
- else:
- return None
-
-def nb_from_cache(nb, nb_cache, nb_meta = ("kernelspec", "language_info", "widgets")):
- try:
- trace("nb_from_cache match")
- cache_record = nb_cache.match_cache_notebook(nb)
- trace("nb_from_cache get buncle")
- cache_bundle = nb_cache.get_cache_bundle(cache_record.pk)
- cache_nb = cache_bundle.nb
- nb = copy.deepcopy(nb)
- # selected (execution-oriented) metadata
- trace("nb_from_cache processing metadata")
- if nb_meta is None:
- nb.metadata = cache_nb.metadata
- else:
- for key in nb_meta:
- if key in cache_nb.metadata:
- nb.metadata[key] = cache_nb.metadata[key]
- # code cells
- trace("nb_from_cache processing cells")
- for idx in range(len(nb.cells)):
- if nb.cells[idx].cell_type == "code":
- cache_cell = cache_nb.cells.pop(0)
- nb.cells[idx] = cache_cell
- trace("nb_from_cache returning")
- return nb
- except KeyError:
- return None
+ pass
+
+ # create cell
+ if source != "" or allow_empty:
+ return nbformat.versions[NB_FORMAT_VERSION].new_code_cell(source=source)
+ else:
+ return None
+
+
+def nb_from_cache(nb, nb_cache, nb_meta=("kernelspec", "language_info", "widgets")):
+ try:
+ trace("nb_from_cache match")
+ cache_record = nb_cache.match_cache_notebook(nb)
+ trace("nb_from_cache get buncle")
+ cache_bundle = nb_cache.get_cache_bundle(cache_record.pk)
+ cache_nb = cache_bundle.nb
+ nb = copy.deepcopy(nb)
+ # selected (execution-oriented) metadata
+ trace("nb_from_cache processing metadata")
+ if nb_meta is None:
+ nb.metadata = cache_nb.metadata
+ else:
+ for key in nb_meta:
+ if key in cache_nb.metadata:
+ nb.metadata[key] = cache_nb.metadata[key]
+ # code cells
+ trace("nb_from_cache processing cells")
+ for idx in range(len(nb.cells)):
+ if nb.cells[idx].cell_type == "code":
+ cache_cell = cache_nb.cells.pop(0)
+ nb.cells[idx] = cache_cell
+ trace("nb_from_cache returning")
+ return nb
+ except KeyError:
+ return None
+
# This function is only called on setup cells
def nb_kernel_dependencies(setup_cell):
- for index, output in enumerate(setup_cell.outputs):
- if output.name == 'stdout' and output.output_type == 'stream':
- return json.loads(output.text)
+ for index, output in enumerate(setup_cell.outputs):
+ if output.name == "stdout" and output.output_type == "stream":
+ return json.loads(output.text)
+
def cell_execute(client, cell, index, execution_count, eval_default, store_history):
+ language = get_language_from_nb_metadata(client.nb.metadata)
+ # read cell options
+ cell_options = nb_cell_yaml_options(language, cell)
+
+ # check options for eval and error
+ eval = cell_options.get("eval", eval_default)
+ allow_errors = cell_options.get("error")
+
+ trace(f"cell_execute with eval={eval}")
+ if allow_errors == True:
+ trace(f"cell_execute with allow_errors={allow_errors}")
+
+ # execute if eval is active
+ if eval == True:
+ # add 'raises-exception' tag for allow_errors
+ if allow_errors:
+ if not "metadata" in cell:
+ cell["metadata"] = {}
+ tags = cell.get("metadata", {}).get("tags", [])
+ cell["metadata"]["tags"] = tags + ["raises-exception"]
+
+ # execute (w/o yaml options so that cell magics work)
+ source = cell.source
+ cell.source = nb_strip_yaml_options(client, cell.source)
+ cell = client.execute_cell(
+ cell=cell,
+ cell_index=index,
+ execution_count=execution_count,
+ store_history=store_history,
+ )
+ cell.source = source
+
+ # if lines_to_next_cell is 0 then fix it to be 1
+ lines_to_next_cell = cell.get("metadata", {}).get("lines_to_next_cell", -1)
+ if lines_to_next_cell == 0:
+ cell["metadata"]["lines_to_next_cell"] = 1
+
+ # remove injected raises-exception
+ if allow_errors:
+ cell["metadata"]["tags"].remove("raises-exception")
+ if len(cell["metadata"]["tags"]) == 0:
+ del cell["metadata"]["tags"]
+
+ # Check for display errors in output (respecting both global and cell settings)
+ cell_allows_errors = (
+ allow_errors if allow_errors is not None else client.allow_errors
+ )
+ if not cell_allows_errors:
+ trace("Cell does not allow errors: checking for uncaught errors")
+ for output in cell.outputs:
+ if output.get("output_type") == "error":
+ trace(" Uncaught error found in output")
+ from nbclient.exceptions import CellExecutionError
+
+ error_name = output.get("ename", "UnnamedError")
+ error_value = output.get("evalue", "")
+ traceback = output.get("traceback", [])
+ # Use same error raising mechanism as nbclient
+ raise CellExecutionError.from_cell_and_msg(
+ cell,
+ {
+ "ename": "UncaughtCellError:" + error_name,
+ "evalue": error_value,
+ "traceback": traceback,
+ },
+ )
+
+ # return cell
+ return cell
+
- language = get_language_from_nb_metadata(client.nb.metadata)
- # read cell options
- cell_options = nb_cell_yaml_options(language, cell)
-
- # check options for eval and error
- eval = cell_options.get('eval', eval_default)
- allow_errors = cell_options.get('error')
-
- trace(f"cell_execute with eval={eval}")
- if (allow_errors == True):
- trace(f"cell_execute with allow_errors={allow_errors}")
-
- # execute if eval is active
- if eval == True:
-
- # add 'raises-exception' tag for allow_errors
- if allow_errors:
- if not "metadata" in cell:
- cell["metadata"] = {}
- tags = cell.get('metadata', {}).get('tags', [])
- cell["metadata"]["tags"] = tags + ['raises-exception']
-
- # execute (w/o yaml options so that cell magics work)
- source = cell.source
- cell.source = nb_strip_yaml_options(client, cell.source)
- cell = client.execute_cell(
- cell = cell,
- cell_index = index,
- execution_count = execution_count,
- store_history = store_history
- )
- cell.source = source
-
- # if lines_to_next_cell is 0 then fix it to be 1
- lines_to_next_cell = cell.get('metadata', {}).get('lines_to_next_cell', -1)
- if lines_to_next_cell == 0:
- cell["metadata"]["lines_to_next_cell"] = 1
-
- # remove injected raises-exception
- if allow_errors:
- cell["metadata"]["tags"].remove('raises-exception')
- if len(cell["metadata"]["tags"]) == 0:
- del cell["metadata"]["tags"]
-
- # Check for display errors in output (respecting both global and cell settings)
- cell_allows_errors = allow_errors if allow_errors is not None else client.allow_errors
- if not cell_allows_errors:
- trace("Cell does not allow errors: checking for uncaught errors")
- for output in cell.outputs:
- if output.get('output_type') == 'error':
- trace(" Uncaught error found in output")
- from nbclient.exceptions import CellExecutionError
- error_name = output.get('ename', 'UnnamedError')
- error_value = output.get('evalue', '')
- traceback = output.get('traceback', [])
- # Use same error raising mechanism as nbclient
- raise CellExecutionError.from_cell_and_msg(
- cell,
- {
- 'ename': 'UncaughtCellError:' + error_name,
- 'evalue': error_value,
- 'traceback': traceback
- }
- )
-
- # return cell
- return cell
-
def cell_execute_inline(client, cell):
-
- # helper to raise an error from a result
- def raise_error(result):
- ename = result.get('ename')
- evalue = result.get('evalue')
- raise Exception(f'{ename}: {evalue}')
-
- # helper to clear existing user_expressions if they exist
- def clear_user_expressions():
- if "metadata" in cell:
- metadata = cell.get("metadata")
- if "user_expressions" in metadata:
- del metadata["user_expressions"]
-
- # find expressions in source
- language = get_language_from_nb_metadata(client.nb.metadata)
- source = ''.join(cell.source)
- expressions = re.findall(
- fr'(?:^|[^`])`{{{language}}}[ \t]([^`]+)`',
- source,
- re.MULTILINE
- )
- if len(expressions):
- # send and wait for 'execute' kernel message w/ user_expressions
- kc = client.kc
- user_expressions = dict()
- for idx, expr in enumerate(expressions):
- user_expressions[str(idx).strip()] = expr
- msg_id = kc.execute('', user_expressions = user_expressions)
- reply = client.wait_for_reply(msg_id)
-
- # process reply
- content = reply.get('content')
- if content.get('status') == 'ok':
- # build results (check for error on each one)
- results = []
- for key in user_expressions:
- result = content.get('user_expressions').get(key)
- if result.get('status') == 'ok':
- results.append({
- 'expression' : user_expressions.get(key),
- 'result': result
- })
- elif result.get('status') == 'error':
- raise_error(result)
-
- # set results into metadata
- if not "metadata" in cell:
- cell["metadata"] = {}
- cell["metadata"]["user_expressions"] = results
-
- elif content.get('status') == 'error':
- raise_error(content)
- else:
- clear_user_expressions()
-
- # return cell
- return cell
+ # helper to raise an error from a result
+ def raise_error(result):
+ ename = result.get("ename")
+ evalue = result.get("evalue")
+ raise Exception(f"{ename}: {evalue}")
+
+ # helper to clear existing user_expressions if they exist
+ def clear_user_expressions():
+ if "metadata" in cell:
+ metadata = cell.get("metadata")
+ if "user_expressions" in metadata:
+ del metadata["user_expressions"]
+
+ # find expressions in source
+ language = get_language_from_nb_metadata(client.nb.metadata)
+ source = "".join(cell.source)
+ expressions = re.findall(
+ rf"(?:^|[^`])`{{{language}}}[ \t]([^`]+)`", source, re.MULTILINE
+ )
+ if len(expressions):
+ # send and wait for 'execute' kernel message w/ user_expressions
+ kc = client.kc
+ user_expressions = dict()
+ for idx, expr in enumerate(expressions):
+ user_expressions[str(idx).strip()] = expr
+ msg_id = kc.execute("", user_expressions=user_expressions)
+ reply = client.wait_for_reply(msg_id)
+
+ # process reply
+ content = reply.get("content")
+ if content.get("status") == "ok":
+ # build results (check for error on each one)
+ results = []
+ for key in user_expressions:
+ result = content.get("user_expressions").get(key)
+ if result.get("status") == "ok":
+ results.append(
+ {"expression": user_expressions.get(key), "result": result}
+ )
+ elif result.get("status") == "error":
+ raise_error(result)
+
+ # set results into metadata
+ if not "metadata" in cell:
+ cell["metadata"] = {}
+ cell["metadata"]["user_expressions"] = results
+
+ elif content.get("status") == "error":
+ raise_error(content)
+ else:
+ clear_user_expressions()
+
+ # return cell
+ return cell
def cell_clear_output(cell):
- remove_metadata = ['collapsed', 'scrolled']
- if cell.cell_type == 'code':
- cell.outputs = []
- cell.execution_count = None
- if 'metadata' in cell:
- for field in remove_metadata:
- cell.metadata.pop(field, None)
- return cell
+ remove_metadata = ["collapsed", "scrolled"]
+ if cell.cell_type == "code":
+ cell.outputs = []
+ cell.execution_count = None
+ if "metadata" in cell:
+ for field in remove_metadata:
+ cell.metadata.pop(field, None)
+ return cell
-def nb_parameterize(nb, params):
- # verify papermill import
- if not papermill_translate:
- raise ImportError('The papermill package is required for processing --execute-params')
-
- # alias kernel name and language
- kernel_name = nb.metadata.kernelspec.name
- language = get_language_from_nb_metadata(nb.metadata)
-
- # find params index and note any tags/yaml on it (exit if no params)
- params_index = find_first_tagged_cell_index(nb, "parameters")
- if params_index != -1:
- params_cell_tags = nb.cells[params_index].get('metadata', {}).get('tags', []).copy()
- params_cell_yaml = nb_cell_yaml_lines(language, nb.cells[params_index].source)
- params_cell_tags.remove("parameters")
- else:
- return
-
- # Generate parameter content based on the kernel_name
- params_content = papermill_translate.translate_parameters(
- kernel_name,
- language,
- params,
- 'Injected Parameters'
- )
-
- # prepend options
- if len(params_cell_yaml):
- # https://github.com/quarto-dev/quarto-cli/issues/10097
- # We need to find and drop `label: ` from the yaml options
- # to avoid label duplication
- # The only way to do this robustly is to parse the yaml
- # and then re-encode it
- try:
- params_cell_yaml = parse_string("\n".join(params_cell_yaml))
- if "label" in params_cell_yaml:
- del params_cell_yaml['label']
- params_cell_yaml = safe_dump(params_cell_yaml).strip().splitlines()
- except Exception as e:
- sys.stderr.write(str(e) + "\naksjdfhakjsdhf\n")
- sys.stderr.write("\nWARNING: Invalid YAML option format in cell:\n" + "\n".join(params_cell_yaml) + "\n")
- sys.stderr.flush()
- params_cell_yaml = []
-
- comment_chars = nb_language_comment_chars(language)
- option_prefix = comment_chars[0] + "| "
- option_suffix = comment_chars[1] if len(comment_chars) > 1 else None
- def enclose(yaml):
- yaml = option_prefix + yaml
- if option_suffix:
- yaml = yaml + option_suffix
- return yaml
- params_content = "\n".join(map(enclose, params_cell_yaml)) + "\n" + params_content
-
- # create params cell
- params_cell = nbformat.v4.new_code_cell(source=params_content)
- params_cell.metadata['tags'] = ['injected-parameters'] + params_cell_tags
+def nb_parameterize(nb, params):
+ # verify papermill import
+ if not papermill_translate:
+ raise ImportError(
+ "The papermill package is required for processing --execute-params"
+ )
+
+ # alias kernel name and language
+ kernel_name = nb.metadata.kernelspec.name
+ language = get_language_from_nb_metadata(nb.metadata)
+
+ # find params index and note any tags/yaml on it (exit if no params)
+ params_index = find_first_tagged_cell_index(nb, "parameters")
+ if params_index != -1:
+ params_cell_tags = (
+ nb.cells[params_index].get("metadata", {}).get("tags", []).copy()
+ )
+ params_cell_yaml = nb_cell_yaml_lines(language, nb.cells[params_index].source)
+ params_cell_tags.remove("parameters")
+ else:
+ return
+
+ # Generate parameter content based on the kernel_name
+ params_content = papermill_translate.translate_parameters(
+ kernel_name, language, params, "Injected Parameters"
+ )
+
+ # prepend options
+ if len(params_cell_yaml):
+ # https://github.com/quarto-dev/quarto-cli/issues/10097
+ # We need to find and drop `label: ` from the yaml options
+ # to avoid label duplication
+ # The only way to do this robustly is to parse the yaml
+ # and then re-encode it
+ try:
+ params_cell_yaml = parse_string("\n".join(params_cell_yaml))
+ if "label" in params_cell_yaml:
+ del params_cell_yaml["label"]
+ params_cell_yaml = safe_dump(params_cell_yaml).strip().splitlines()
+ except Exception as e:
+ sys.stderr.write(str(e) + "\naksjdfhakjsdhf\n")
+ sys.stderr.write(
+ "\nWARNING: Invalid YAML option format in cell:\n"
+ + "\n".join(params_cell_yaml)
+ + "\n"
+ )
+ sys.stderr.flush()
+ params_cell_yaml = []
+
+ comment_chars = nb_language_comment_chars(language)
+ option_prefix = comment_chars[0] + "| "
+ option_suffix = comment_chars[1] if len(comment_chars) > 1 else None
+
+ def enclose(yaml):
+ yaml = option_prefix + yaml
+ if option_suffix:
+ yaml = yaml + option_suffix
+ return yaml
+
+ params_content = (
+ "\n".join(map(enclose, params_cell_yaml)) + "\n" + params_content
+ )
+
+ # create params cell
+ params_cell = nbformat.v4.new_code_cell(source=params_content)
+ params_cell.metadata["tags"] = ["injected-parameters"] + params_cell_tags
# find existing injected params index
- injected_params_index = find_first_tagged_cell_index(nb, 'injected-parameters')
-
- # find the right insertion/replace point for the injected params
- if injected_params_index >= 0:
- # Replace the injected cell with a new version
- before = nb.cells[:injected_params_index]
- after = nb.cells[injected_params_index + 1 :]
- else:
- # Add an injected cell after the parameter cell
- before = nb.cells[: params_index + 1]
- after = nb.cells[params_index + 1 :]
-
- nb.cells = before + [params_cell] + after
- if not nb.metadata.get('papermill'):
- nb.metadata.papermill = {}
- nb.metadata.papermill['parameters'] = params
-
+ injected_params_index = find_first_tagged_cell_index(nb, "injected-parameters")
+
+ # find the right insertion/replace point for the injected params
+ if injected_params_index >= 0:
+ # Replace the injected cell with a new version
+ before = nb.cells[:injected_params_index]
+ after = nb.cells[injected_params_index + 1 :]
+ else:
+ # Add an injected cell after the parameter cell
+ before = nb.cells[: params_index + 1]
+ after = nb.cells[params_index + 1 :]
+
+ nb.cells = before + [params_cell] + after
+ if not nb.metadata.get("papermill"):
+ nb.metadata.papermill = {}
+ nb.metadata.papermill["parameters"] = params
+
def find_first_tagged_cell_index(nb, tag):
- parameters_indices = []
- for idx, cell in enumerate(nb.cells):
- if tag in cell.get('metadata', {}).get('tags', {}):
- parameters_indices.append(idx)
- if not parameters_indices:
- return -1
- return parameters_indices[0]
+ parameters_indices = []
+ for idx, cell in enumerate(nb.cells):
+ if tag in cell.get("metadata", {}).get("tags", {}):
+ parameters_indices.append(idx)
+ if not parameters_indices:
+ return -1
+ return parameters_indices[0]
+
def nb_strip_yaml_options(client, source):
- yaml_lines = nb_cell_yaml_lines(get_language_from_nb_metadata(client.nb.metadata), source)
- num_yaml_lines = len(yaml_lines)
- if num_yaml_lines > 0:
- return "\n".join(source.splitlines()[num_yaml_lines:])
- else:
- return source
+ yaml_lines = nb_cell_yaml_lines(
+ get_language_from_nb_metadata(client.nb.metadata), source
+ )
+ num_yaml_lines = len(yaml_lines)
+ if num_yaml_lines > 0:
+ return "\n".join(source.splitlines()[num_yaml_lines:])
+ else:
+ return source
+
def nb_cell_yaml_options(lang, cell):
+ # go through the lines until we've found all of the yaml
+ yaml_lines = nb_cell_yaml_lines(lang, cell.source)
+
+ # if we have yaml then parse it
+ if len(yaml_lines) > 0:
+ yaml_code = "\n".join(yaml_lines)
+ yaml_options = parse_string(yaml_code)
+ if type(yaml_options) is dict:
+ return yaml_options
+ else:
+ sys.stderr.write(
+ "\nWARNING: Invalid YAML option format in cell:\n" + yaml_code + "\n"
+ )
+ sys.stderr.flush()
+ return dict()
+
+ else:
+ return dict()
+
- # go through the lines until we've found all of the yaml
- yaml_lines = nb_cell_yaml_lines(lang, cell.source)
-
- # if we have yaml then parse it
- if len(yaml_lines) > 0:
- yaml_code = "\n".join(yaml_lines)
- yaml_options = parse_string(yaml_code)
- if (type(yaml_options) is dict):
- return yaml_options
- else:
- sys.stderr.write("\nWARNING: Invalid YAML option format in cell:\n" + yaml_code + "\n")
- sys.stderr.flush()
- return dict()
-
- else:
- return dict()
-
def nb_cell_yaml_lines(lang, source):
- # determine language comment chars
- comment_chars = nb_language_comment_chars(lang)
- option_pattern = "^" + re.escape(comment_chars[0]) + "\\s*\\| ?"
- option_suffix = comment_chars[1] if len(comment_chars) > 1 else None
-
- # go through the lines until we've found all of the yaml
- yaml_lines = []
- for line in source.splitlines():
- option_match = re.match(option_pattern, line)
- if option_match:
- if (not option_suffix) or line.rstrip().endswith(option_suffix):
- yaml_option = line[len(option_match.group()):]
- if (option_suffix):
- yaml_option = yaml_option.rstrip()[:-len(option_suffix)]
- # strip trailing spaces after : to avoid poyo error
- # (https://github.com/hackebrot/poyo/issues/30)
- yaml_option = re.sub(":\\s+$", ":", yaml_option)
- yaml_lines.append(yaml_option)
- continue
- break
-
- # return the lines
- return yaml_lines
+ # determine language comment chars
+ comment_chars = nb_language_comment_chars(lang)
+ option_pattern = "^" + re.escape(comment_chars[0]) + "\\s*\\| ?"
+ option_suffix = comment_chars[1] if len(comment_chars) > 1 else None
+
+ # go through the lines until we've found all of the yaml
+ yaml_lines = []
+ for line in source.splitlines():
+ option_match = re.match(option_pattern, line)
+ if option_match:
+ if (not option_suffix) or line.rstrip().endswith(option_suffix):
+ yaml_option = line[len(option_match.group()) :]
+ if option_suffix:
+ yaml_option = yaml_option.rstrip()[: -len(option_suffix)]
+ # strip trailing spaces after : to avoid poyo error
+ # (https://github.com/hackebrot/poyo/issues/30)
+ yaml_option = re.sub(":\\s+$", ":", yaml_option)
+ yaml_lines.append(yaml_option)
+ continue
+ break
+
+ # return the lines
+ return yaml_lines
+
def nb_language_comment_chars(lang):
- langs = dict(
- r = "#",
- python = "#",
- julia = "#",
- scala = "//",
- matlab = "%",
- csharp = "//",
- fsharp = "//",
- c = ["/*", "*/"],
- css = ["/*", "*/"],
- sas = ["*", ";"],
- powershell = "#",
- bash = "#",
- sql = "--",
- mysql = "--",
- psql = "--",
- lua = "--",
- cpp = "//",
- cc = "//",
- stan = "#",
- octave = "#",
- fortran = "!",
- fortran95 = "!",
- awk = "#",
- gawk = "#",
- stata = "*",
- java = "//",
- groovy = "//",
- sed = "#",
- perl = "#",
- ruby = "#",
- tikz = "%",
- js = "//",
- d3 = "//",
- node = "//",
- sass = "//",
- coffee = "#",
- go = "//",
- asy = "//",
- haskell = "--",
- dot = "//",
- apl = "⍝",
- ocaml = ["(*", "*)"]
- )
- if lang in langs:
- chars = langs[lang]
- if not isinstance(chars, type([])):
- chars = [chars]
- return chars
- else:
- return ["#"]
+ langs = dict(
+ r="#",
+ python="#",
+ julia="#",
+ scala="//",
+ matlab="%",
+ csharp="//",
+ fsharp="//",
+ c=["/*", "*/"],
+ css=["/*", "*/"],
+ sas=["*", ";"],
+ powershell="#",
+ bash="#",
+ sql="--",
+ mysql="--",
+ psql="--",
+ lua="--",
+ cpp="//",
+ cc="//",
+ stan="#",
+ octave="#",
+ fortran="!",
+ fortran95="!",
+ awk="#",
+ gawk="#",
+ stata="*",
+ java="//",
+ groovy="//",
+ sed="#",
+ perl="#",
+ ruby="#",
+ tikz="%",
+ js="//",
+ d3="//",
+ node="//",
+ sass="//",
+ coffee="#",
+ go="//",
+ asy="//",
+ haskell="--",
+ dot="//",
+ apl="⍝",
+ ocaml=["(*", "*)"],
+ )
+ if lang in langs:
+ chars = langs[lang]
+ if not isinstance(chars, type([])):
+ chars = [chars]
+ return chars
+ else:
+ return ["#"]
diff --git a/src/resources/jupyter/shiny.py b/src/resources/jupyter/shiny.py
index 7628c1bd28..a0a012162f 100644
--- a/src/resources/jupyter/shiny.py
+++ b/src/resources/jupyter/shiny.py
@@ -1,3 +1,3 @@
-
import time
+
time.sleep(100)
diff --git a/src/resources/use/binder/jupyter_notebook_config.py b/src/resources/use/binder/jupyter_notebook_config.py
index f0ba2484a5..903cfa6e18 100644
--- a/src/resources/use/binder/jupyter_notebook_config.py
+++ b/src/resources/use/binder/jupyter_notebook_config.py
@@ -1,13 +1,20 @@
# Traitlet configuration file for jupyter-notebook.
c.ServerProxy.servers = {
- 'vscode': {
- 'command': ['code-server', '--auth', 'none', '--disable-telemetry', '--port={port}', '.'],
- 'timeout': 300,
- 'launcher_entry': {
- 'enabled': True,
- 'icon_path': '.jupyter/vscode.svg',
- 'title': 'VS Code',
+ "vscode": {
+ "command": [
+ "code-server",
+ "--auth",
+ "none",
+ "--disable-telemetry",
+ "--port={port}",
+ ".",
+ ],
+ "timeout": 300,
+ "launcher_entry": {
+ "enabled": True,
+ "icon_path": ".jupyter/vscode.svg",
+ "title": "VS Code",
},
},
}
diff --git a/src/resources/vendor/fix-import-map.py b/src/resources/vendor/fix-import-map.py
index efed701407..621f35e375 100644
--- a/src/resources/vendor/fix-import-map.py
+++ b/src/resources/vendor/fix-import-map.py
@@ -1,17 +1,23 @@
import json
import os.path
+
def fix_path(v):
- (dirname, basename) = os.path.split(v)
- dirname = dirname[1:].replace(".", "-")
- return f'.{dirname}/{basename}'
+ (dirname, basename) = os.path.split(v)
+ dirname = dirname[1:].replace(".", "-")
+ return f".{dirname}/{basename}"
+
def fix_import_map(v):
- if type(v) == str:
- return fix_path(v)
- elif type(v) == dict:
- return dict((k if not k.startswith(".") else fix_path(k), fix_import_map(vv)) for (k,vv) in v.items())
+ if type(v) == str:
+ return fix_path(v)
+ elif type(v) == dict:
+ return dict(
+ (k if not k.startswith(".") else fix_path(k), fix_import_map(vv))
+ for (k, vv) in v.items()
+ )
+
-if __name__ == '__main__':
- d = json.load(open("import_map_deno_vendor.json"))
- print(json.dumps(fix_import_map(d), indent=2))
+if __name__ == "__main__":
+ d = json.load(open("import_map_deno_vendor.json"))
+ print(json.dumps(fix_import_map(d), indent=2))
diff --git a/tests/docs/filters/behead.py b/tests/docs/filters/behead.py
index ff8ff08868..4e6afee93f 100755
--- a/tests/docs/filters/behead.py
+++ b/tests/docs/filters/behead.py
@@ -7,9 +7,11 @@
from pandocfilters import toJSONFilter, Emph, Para
+
def behead(key, value, format, meta):
- if key == 'Header' and value[0] >= 2:
- return Para([Emph(value[2])])
+ if key == "Header" and value[0] >= 2:
+ return Para([Emph(value[2])])
+
if __name__ == "__main__":
- toJSONFilter(behead)
\ No newline at end of file
+ toJSONFilter(behead)
diff --git a/tests/docs/smoke-all/2023/03/30/test.py b/tests/docs/smoke-all/2023/03/30/test.py
index 6cd3edd1a8..d621b95edc 100644
--- a/tests/docs/smoke-all/2023/03/30/test.py
+++ b/tests/docs/smoke-all/2023/03/30/test.py
@@ -1,4 +1,5 @@
#!/usr/bin/env python
import sys
-sys.stdout.write(sys.stdin.read())
\ No newline at end of file
+
+sys.stdout.write(sys.stdin.read())