diff --git a/.github/scripts/install.sh b/.github/scripts/install.sh index a4f5d729c70..f62334ec370 100755 --- a/.github/scripts/install.sh +++ b/.github/scripts/install.sh @@ -62,7 +62,7 @@ else fi # Install subrepos from source -python -bb -X dev install_dev_repos.py --not-editable --no-install spyder +python -bb -X dev install_dev_repos.py --not-editable --no-install spyder spyder-remote-services # Install Spyder to test it as if it was properly installed. python -bb -X dev -m build diff --git a/external-deps/spyder-remote-services/.github/workflows/python-publish.yml b/external-deps/spyder-remote-services/.github/workflows/python-publish.yml new file mode 100644 index 00000000000..79e43279cfa --- /dev/null +++ b/external-deps/spyder-remote-services/.github/workflows/python-publish.yml @@ -0,0 +1,30 @@ +name: Release to PyPI +on: + release: + types: [published] + +permissions: + contents: read + +jobs: + release: + runs-on: ubuntu-latest + environment: + name: release + url: https://pypi.org/p/spyder-remote-services + permissions: + id-token: write + steps: + - name: Setup python to build package + uses: actions/setup-python@v5 + with: + python-version: "3.x" + - name: Install build + run: python -m pip install build + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Build package + run: pyproject-build -s -w . -o dist + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@v1.8.11 diff --git a/external-deps/spyder-remote-services/.gitignore b/external-deps/spyder-remote-services/.gitignore new file mode 100644 index 00000000000..87e810c2457 --- /dev/null +++ b/external-deps/spyder-remote-services/.gitignore @@ -0,0 +1,133 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Ignore VSCode settings +.vscode/ diff --git a/external-deps/spyder-remote-services/.gitrepo b/external-deps/spyder-remote-services/.gitrepo new file mode 100644 index 00000000000..b37a80d0fff --- /dev/null +++ b/external-deps/spyder-remote-services/.gitrepo @@ -0,0 +1,12 @@ +; DO NOT EDIT (unless you know what you are doing) +; +; This subdirectory is a git "subrepo", and this file is maintained by the +; git-subrepo command. See https://github.com/ingydotnet/git-subrepo#readme +; +[subrepo] + remote = https://github.com/spyder-ide/spyder-remote-services + branch = main + commit = d425e769dc85783c1a95d1791d98a025341dafd1 + parent = 56ea3e3573d9fa5b37c3ea0dfb3f66da5efcc114 + method = merge + cmdver = 0.4.9 diff --git a/external-deps/spyder-remote-services/AUTHORS.txt b/external-deps/spyder-remote-services/AUTHORS.txt new file mode 100644 index 00000000000..aa9e9da3e44 --- /dev/null +++ b/external-deps/spyder-remote-services/AUTHORS.txt @@ -0,0 +1,5 @@ +The Spyder Remote Services Contributors are composed of: + +* Carlos Cordoba (Current Spyder/-Remote Services maintainer) +* All other developers that have committed to the spyder-remote-services repository: + diff --git a/external-deps/spyder-remote-services/LICENSE.txt b/external-deps/spyder-remote-services/LICENSE.txt new file mode 100644 index 00000000000..e9838e3452a --- /dev/null +++ b/external-deps/spyder-remote-services/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024- Spyder Remote Services Contributors (see AUTHORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/external-deps/spyder-remote-services/README.md b/external-deps/spyder-remote-services/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/external-deps/spyder-remote-services/environment.yml b/external-deps/spyder-remote-services/environment.yml new file mode 100644 index 00000000000..179f7fae705 --- /dev/null +++ b/external-deps/spyder-remote-services/environment.yml @@ -0,0 +1,12 @@ +name: spyder-remote +channels: + - conda-forge + # We want to have a reproducible setup, so we don't want default channels, + # which may be different for different users. All required channels should + # be listed explicitly here. + - nodefaults +dependencies: + - python=3.12.* + - pip + - jupyter_server >=2.14.2,<3.0 + - jupyter_client >=8.6.2,<9.0 diff --git a/external-deps/spyder-remote-services/kernel_enviroment.yml b/external-deps/spyder-remote-services/kernel_enviroment.yml new file mode 100644 index 00000000000..08e88b0de33 --- /dev/null +++ b/external-deps/spyder-remote-services/kernel_enviroment.yml @@ -0,0 +1,12 @@ +name: spyder-kernel +channels: + - conda-forge/label/spyder_kernels_rc + - conda-forge + # We want to have a reproducible setup, so we don't want default channels, + # which may be different for different users. All required channels should + # be listed explicitly here. + - nodefaults +dependencies: + - python=3.12.* + - pip + - spyder-kernels diff --git a/external-deps/spyder-remote-services/pyinstaller.py b/external-deps/spyder-remote-services/pyinstaller.py new file mode 100644 index 00000000000..65a18a1b5cc --- /dev/null +++ b/external-deps/spyder-remote-services/pyinstaller.py @@ -0,0 +1,28 @@ +from pathlib import Path + +import PyInstaller.__main__ +import jupyterhub + + +SPYDER_REMOTE_SERVER = Path(__file__).parent.absolute() / "spyder_remote_server" +path_to_main = str(SPYDER_REMOTE_SERVER / "__main__.py") +# path_to_run_jupyterhub = str(SPYDER_REMOTE_SERVER / "run_jupyterhub.py") +# path_to_run_service = str(SPYDER_REMOTE_SERVER / "run_service.py") +path_to_jupyterhub_config = str(SPYDER_REMOTE_SERVER / "jupyterhub_config.py") + +JUPYTERHUB_PATH = Path(jupyterhub.__file__).parent.absolute() +path_to_alembic = str(JUPYTERHUB_PATH / "alembic") +path_to_alembic_ini = str(JUPYTERHUB_PATH / "alembic.ini") + +def install(): + PyInstaller.__main__.run([ + path_to_main, + # '--add-data', f'{path_to_run_jupyterhub}:.', + # '--add-data', f'{path_to_run_service}:spyder_remote_server', + '--add-data', f'{path_to_jupyterhub_config}:spyder_remote_server', + '--add-data', f'{path_to_alembic}:jupyterhub/alembic', + '--add-data', f'{path_to_alembic_ini}:jupyterhub', + '--name', 'spyder-remote-server', + '--onefile', + '--noconsole', + ]) diff --git a/external-deps/spyder-remote-services/pyproject.toml b/external-deps/spyder-remote-services/pyproject.toml new file mode 100644 index 00000000000..4b1c676368c --- /dev/null +++ b/external-deps/spyder-remote-services/pyproject.toml @@ -0,0 +1,29 @@ +[project] +name = "spyder-remote-services" +authors = [{name = "Hendrik Louzada", email = "hendriklouzada@gmail.com"}] +description = "A remote server for Spyder IDE" +readme = "README.md" +license = { file = "LICENSE.txt" } +dynamic = ["version"] +requires-python = ">=3.10" +dependencies = [ + "jupyter_server >=2.14.2,<3.0", + "jupyter_client >=8.6.2,<9.0", +] + +[tool.setuptools.dynamic] +version = {attr = "spyder_remote_services.__version__"} + +[project.scripts] +spyder-server = "spyder_remote_services.__main__:main" + +[project.optional-dependencies] +dev = [ + "pytest >= 7.3.1", + "ruff >= 0.4.1", + #"pyinstaller >= 5.10.1" +] + +[build-system] +requires = ["setuptools >= 61.0"] +build-backend = "setuptools.build_meta" diff --git a/external-deps/spyder-remote-services/scripts/installer.sh b/external-deps/spyder-remote-services/scripts/installer.sh new file mode 100755 index 00000000000..b34e091bcf4 --- /dev/null +++ b/external-deps/spyder-remote-services/scripts/installer.sh @@ -0,0 +1,107 @@ +#!/bin/sh + +# Detect the shell from which the script was called +parent=$(ps -o comm $PPID |tail -1) +parent=${parent#-} # remove the leading dash that login shells have +case "$parent" in + # shells supported + bash|fish|xonsh|zsh) + shell=$parent + ;; + *) + # use the login shell (basename of $SHELL) as a fallback + shell=${SHELL##*/} + ;; +esac + +function download { + if hash curl >/dev/null 2>&1; then + curl $1 -o $2 -fsSL --compressed ${CURL_OPTS:-} + elif hash wget >/dev/null 2>&1; then + wget ${WGET_OPTS:-} -qO $2 $1 + else + echo "Neither curl nor wget was found" >&2 + exit 1 + fi +} + +function get_enviroment_name { + echo $(sed -n -e 's/^.*name:\s*//p' $1) +} + + +# Variables +PACKAGE_NAME="spyder-remote-services" +VERSION=${1:-latest} +KERNEL_VERSION=${2:-latest} + +SERVER_ENV="spyder-remote" +KERNEL_ENV="spyder-kernel" + +MICROMAMBA_VERSION="latest" +BIN_FOLDER="${HOME}/.local/bin" +PREFIX_LOCATION="${HOME}/micromamba" + +PYTHON_VERSION="3.12" + + +# Detecting platform +case "$(uname)" in + Linux) + PLATFORM="linux" ;; + Darwin) + PLATFORM="osx" ;; + *NT*) + PLATFORM="win" ;; +esac + +ARCH="$(uname -m)" +case "$ARCH" in + aarch64|ppc64le|arm64) + ;; # pass + *) + ARCH="64" ;; +esac + +case "$PLATFORM-$ARCH" in + linux-aarch64|linux-ppc64le|linux-64|osx-arm64|osx-64|win-64) + ;; # pass + *) + echo "Failed to detect your OS" >&2 + exit 1 + ;; +esac + + +# Install micromamba +RELEASE_URL="https://github.com/mamba-org/micromamba-releases/releases/${MICROMAMBA_VERSION}/download/micromamba-${PLATFORM}-${ARCH}" + +mkdir -p "${BIN_FOLDER}" +download "${RELEASE_URL}" "${BIN_FOLDER}/micromamba" +chmod +x "${BIN_FOLDER}/micromamba" + +eval "$("${BIN_FOLDER}/micromamba" shell hook --shell ${shell})" + + +# Install spyder-remote-services +micromamba create -y -n $SERVER_ENV -c conda-forge "python=${PYTHON_VERSION}" pip + +if [ $VERSION == "latest" ]; then + micromamba run -n $SERVER_ENV pip install ${PACKAGE_NAME} +elif [[ $VERSION != *"=="* ]] && [[ $VERSION != *">="* ]] && [[ $VERSION != *"<="* ]] && [[ $VERSION != *">"* ]] && [[ $VERSION != *"<"* ]]; then + micromamba run -n $SERVER_ENV pip install ${PACKAGE_NAME}==$VERSION +else + micromamba run -n $SERVER_ENV pip install ${PACKAGE_NAME}${VERSION} +fi + + +# Install spyder-kernel +if [ $KERNEL_VERSION == "latest" ]; then + micromamba create -y -n $KERNEL_ENV -c conda-forge -c conda-forge/label/spyder_kernels_rc "python=${PYTHON_VERSION}" spyder-kernels +elif [[ $KERNEL_VERSION != *"="* ]] && [[ $KERNEL_VERSION != *">="* ]] && [[ $KERNEL_VERSION != *"<="* ]] && [[ $KERNEL_VERSION != *">"* ]] && [[ $KERNEL_VERSION != *"<"* ]]; then + micromamba create -y -n $KERNEL_ENV -c conda-forge -c conda-forge/label/spyder_kernels_rc "python=${PYTHON_VERSION}" "spyder-kernels=$KERNEL_VERSION" +else + micromamba create -y -n $KERNEL_ENV -c conda-forge -c conda-forge/label/spyder_kernels_rc "python=${PYTHON_VERSION}" "spyder-kernels${KERNEL_VERSION}" +fi + +micromamba run -n $KERNEL_ENV python -m ipykernel install --user --name $KERNEL_ENV diff --git a/external-deps/spyder-remote-services/scripts/installer_dev.sh b/external-deps/spyder-remote-services/scripts/installer_dev.sh new file mode 100755 index 00000000000..2a5bb517288 --- /dev/null +++ b/external-deps/spyder-remote-services/scripts/installer_dev.sh @@ -0,0 +1,93 @@ +#!/bin/sh + +# Detect the shell from which the script was called +parent=$(ps -o comm $PPID |tail -1) +parent=${parent#-} # remove the leading dash that login shells have +case "$parent" in + # shells supported + bash|fish|xonsh|zsh) + shell=$parent + ;; + *) + # use the login shell (basename of $SHELL) as a fallback + shell=${SHELL##*/} + ;; +esac + +function download { + if hash curl >/dev/null 2>&1; then + curl $1 -o $2 -fsSL --compressed ${CURL_OPTS:-} + elif hash wget >/dev/null 2>&1; then + wget ${WGET_OPTS:-} -qO $2 $1 + else + echo "Neither curl nor wget was found" >&2 + exit 1 + fi +} + +function get_enviroment_name { + echo $(sed -n -e 's/^.*name:\s*//p' $1) +} + + +# Variables +PACKAGE_PATH=${1:-spyder-remote-services} +KERNEL_PATH=${2:-spyder-kernels} + +SERVER_ENV="spyder-remote" +KERNEL_ENV="spyder-kernel" + +MICROMAMBA_VERSION="latest" +BIN_FOLDER="${HOME}/.local/bin" +PREFIX_LOCATION="${HOME}/micromamba" + +PYTHON_VERSION="3.12" + + +# Detecting platform +case "$(uname)" in + Linux) + PLATFORM="linux" ;; + Darwin) + PLATFORM="osx" ;; + *NT*) + PLATFORM="win" ;; +esac + +ARCH="$(uname -m)" +case "$ARCH" in + aarch64|ppc64le|arm64) + ;; # pass + *) + ARCH="64" ;; +esac + +case "$PLATFORM-$ARCH" in + linux-aarch64|linux-ppc64le|linux-64|osx-arm64|osx-64|win-64) + ;; # pass + *) + echo "Failed to detect your OS" >&2 + exit 1 + ;; +esac + + +# Install micromamba +RELEASE_URL="https://github.com/mamba-org/micromamba-releases/releases/${MICROMAMBA_VERSION}/download/micromamba-${PLATFORM}-${ARCH}" + +mkdir -p "${BIN_FOLDER}" +download "${RELEASE_URL}" "${BIN_FOLDER}/micromamba" +chmod +x "${BIN_FOLDER}/micromamba" + +eval "$("${BIN_FOLDER}/micromamba" shell hook --shell ${shell})" + + +# Install spyder-remote-services +micromamba create -y -n $SERVER_ENV -c conda-forge "python=${PYTHON_VERSION}" pip +micromamba run -n $SERVER_ENV pip install -e ${PACKAGE_PATH} + +# Install spyder-kernel +micromamba create -y -n $KERNEL_ENV -c conda-forge "python=${PYTHON_VERSION}" pip +micromamba run -n $KERNEL_ENV pip install -e ${KERNEL_PATH} + +micromamba run -n $KERNEL_ENV python -m ipykernel install --user --name $KERNEL_ENV diff --git a/external-deps/spyder-remote-services/spyder_remote_services/__init__.py b/external-deps/spyder-remote-services/spyder_remote_services/__init__.py new file mode 100644 index 00000000000..9dd5eb4692f --- /dev/null +++ b/external-deps/spyder-remote-services/spyder_remote_services/__init__.py @@ -0,0 +1,2 @@ + +__version__ = '0.1.3' diff --git a/external-deps/spyder-remote-services/spyder_remote_services/__main__.py b/external-deps/spyder-remote-services/spyder_remote_services/__main__.py new file mode 100644 index 00000000000..f92a9ba240e --- /dev/null +++ b/external-deps/spyder-remote-services/spyder_remote_services/__main__.py @@ -0,0 +1,26 @@ +import argparse + +from spyder_remote_services.jupyter_server.serverapp import ( + get_running_server, + launch_new_instance, +) + + +def main(argv=None): + parser = argparse.ArgumentParser() + parser.add_argument('--jupyter-server', action='store_true', help="Start the Spyder's Jupyter server") + parser.add_argument('--get-running-info', action='store_true', help="Get the running server info") + args, rest = parser.parse_known_args(argv) + if args.jupyter_server: + launch_new_instance(rest) + elif args.get_running_info: + if info := get_running_server(as_str=True): + print(info) + else: + print('No info found.') + else: + parser.print_help() + + +if __name__ == '__main__': + main() diff --git a/external-deps/spyder-remote-services/spyder_remote_services/jupyter_client/__init__.py b/external-deps/spyder-remote-services/spyder_remote_services/jupyter_client/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/external-deps/spyder-remote-services/spyder_remote_services/jupyter_client/manager.py b/external-deps/spyder-remote-services/spyder_remote_services/jupyter_client/manager.py new file mode 100644 index 00000000000..d11f5cdfc34 --- /dev/null +++ b/external-deps/spyder-remote-services/spyder_remote_services/jupyter_client/manager.py @@ -0,0 +1,15 @@ +from jupyter_client.ioloop import AsyncIOLoopKernelManager + + +class SpyderAsyncIOLoopKernelManager(AsyncIOLoopKernelManager): + def format_kernel_cmd(self, extra_arguments=None): + """Format the kernel command line to be run.""" + # avoids sporadical warning on kernel restart + self.update_env(env={'PYDEVD_DISABLE_FILE_VALIDATION': '1'}) + + cmd = super().format_kernel_cmd(extra_arguments) + # Replace the `ipykernel_launcher` with `spyder_kernel.console` + cmd_indx = cmd.index('ipykernel_launcher') + if cmd_indx != -1: + cmd[cmd_indx] = 'spyder_kernels.console' + return cmd diff --git a/external-deps/spyder-remote-services/spyder_remote_services/jupyter_server/__init__.py b/external-deps/spyder-remote-services/spyder_remote_services/jupyter_server/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/external-deps/spyder-remote-services/spyder_remote_services/jupyter_server/kernelmanager.py b/external-deps/spyder-remote-services/spyder_remote_services/jupyter_server/kernelmanager.py new file mode 100644 index 00000000000..045059c3dca --- /dev/null +++ b/external-deps/spyder-remote-services/spyder_remote_services/jupyter_server/kernelmanager.py @@ -0,0 +1,39 @@ +from jupyter_server.services.kernels.kernelmanager import ( + AsyncMappingKernelManager, +) +from jupyter_server._tz import isoformat +from traitlets import Unicode + + + +class SpyderAsyncMappingKernelManager(AsyncMappingKernelManager): + kernel_manager_class = 'spyder_remote_services.jupyter_client.manager.SpyderAsyncIOLoopKernelManager' + + default_kernel_name = Unicode( + 'spyder-kernel', help='The name of the default kernel to start' + ).tag(config=True) + + def kernel_model(self, kernel_id): + """Return a JSON-safe dict representing a kernel + + For use in representing kernels in the JSON APIs. + """ + self._check_kernel_id(kernel_id) + kernel = self._kernels[kernel_id] + + conn_info = kernel.get_connection_info() + + # convert key bytes to str + conn_info["key"] = conn_info["key"].decode() + + model = { + "id": kernel_id, + "name": kernel.kernel_name, + "last_activity": isoformat(kernel.last_activity), + "execution_state": kernel.execution_state, + "connections": self._kernel_connections.get(kernel_id, 0), + "connection_info": conn_info, + } + if getattr(kernel, "reason", None): + model["reason"] = kernel.reason + return model diff --git a/external-deps/spyder-remote-services/spyder_remote_services/jupyter_server/serverapp.py b/external-deps/spyder-remote-services/spyder_remote_services/jupyter_server/serverapp.py new file mode 100644 index 00000000000..195e80867d4 --- /dev/null +++ b/external-deps/spyder-remote-services/spyder_remote_services/jupyter_server/serverapp.py @@ -0,0 +1,85 @@ +import json +import os +from pathlib import Path + +from jupyter_server.transutils import _i18n +from jupyter_server.utils import check_pid +from jupyter_core.paths import jupyter_runtime_dir +from jupyter_server.serverapp import ServerApp +from traitlets import Bool, default + +from spyder_remote_services.jupyter_server.kernelmanager import ( + SpyderAsyncMappingKernelManager, +) +from spyder_remote_services.utils import get_free_port + + +SYPDER_SERVER_INFO_FILE = "jpserver-spyder.json" + +class SpyderServerApp(ServerApp): + kernel_manager_class = SpyderAsyncMappingKernelManager + + set_dynamic_port = Bool( + True, + help="""Set the port dynamically. + + Get an available port instead of using the default port + if no port is provided. + """, + ).tag(config=True) + + @default("port") + def port_default(self): + if self.set_dynamic_port: + return get_free_port() + return int(os.getenv(self.port_env, self.port_default_value)) + + @property + def info_file(self): + return str((Path(self.runtime_dir) / + SYPDER_SERVER_INFO_FILE).resolve()) + + +def get_running_server(runtime_dir=None, log=None, *, as_str=False): + """Iterate over the server info files of running Jupyter servers. + + Given a runtime directory, find jpserver-* files in the security directory, + and yield dicts of their information, each one pertaining to + a currently running Jupyter server instance. + """ + if runtime_dir is None: + runtime_dir = jupyter_runtime_dir() + + runtime_dir = Path(runtime_dir) + + # The runtime dir might not exist + if not runtime_dir.is_dir(): + return None + + conf_file = runtime_dir / SYPDER_SERVER_INFO_FILE + + if not conf_file.exists(): + return None + + with conf_file.open(mode="rb") as f: + info = json.load(f) + + # Simple check whether that process is really still running + # Also remove leftover files from IPython 2.x without a pid field + if ("pid" in info) and check_pid(info["pid"]): + if as_str: + return json.dumps(info, indent=None) + return info + + # If the process has died, try to delete its info file + try: + conf_file.unlink() + except OSError as e: + if log: + log.warning(_i18n("Deleting server info file failed: %s.") % e) + + +main = launch_new_instance = SpyderServerApp.launch_instance + +if __name__ == '__main__': + main() diff --git a/external-deps/spyder-remote-services/spyder_remote_services/utils.py b/external-deps/spyder-remote-services/spyder_remote_services/utils.py new file mode 100644 index 00000000000..7a4dc45f35c --- /dev/null +++ b/external-deps/spyder-remote-services/spyder_remote_services/utils.py @@ -0,0 +1,21 @@ +import socket +import sys +import os + + +if getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS'): + SYS_EXEC = sys.executable +else: + SYS_EXEC = 'spyder-remote-server' + + +def get_free_port(): + """Request a free port from the OS.""" + with socket.socket() as s: + s.bind(('', 0)) + return s.getsockname()[1] + + +def generate_token(): + """Generate a random token.""" + return os.urandom(64).hex() diff --git a/external-deps/spyder-remote-services/tests/Dockerfile b/external-deps/spyder-remote-services/tests/Dockerfile new file mode 100644 index 00000000000..e24153f0828 --- /dev/null +++ b/external-deps/spyder-remote-services/tests/Dockerfile @@ -0,0 +1,46 @@ +FROM ubuntu:focal AS ubuntu-base +ENV DEBIAN_FRONTEND noninteractive +SHELL ["/bin/bash", "-o", "pipefail", "-c"] + +# Setup the default user. +RUN useradd -rm -d /home/ubuntu -s /bin/bash -g root -G sudo ubuntu +RUN echo 'ubuntu:ubuntu' | chpasswd +USER ubuntu +WORKDIR /home/ubuntu + +# Build image with Python and SSHD. +FROM ubuntu-base AS ubuntu-with-sshd +USER root + +# Install required tools. +RUN apt-get -qq update \ + && apt-get -qq --no-install-recommends install curl \ + && apt-get -qq --no-install-recommends install ca-certificates \ + && apt-get -qq --no-install-recommends install vim-tiny \ + && apt-get -qq --no-install-recommends install sudo \ + && apt-get -qq --no-install-recommends install git \ + && apt-get -qq --no-install-recommends install openssh-server \ + && apt-get -qq clean \ + && rm -rf /var/lib/apt/lists/* + +# Configure SSHD. +# SSH login fix. Otherwise user is kicked off after login +RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd +RUN mkdir /var/run/sshd +RUN bash -c 'install -m755 <(printf "#!/bin/sh\nexit 0") /usr/sbin/policy-rc.d' +RUN ex +'%s/^#\zeListenAddress/\1/g' -scwq /etc/ssh/sshd_config +RUN ex +'%s/^#\zeHostKey .*ssh_host_.*_key/\1/g' -scwq /etc/ssh/sshd_config +RUN RUNLEVEL=1 dpkg-reconfigure openssh-server +RUN ssh-keygen -A -v +RUN update-rc.d ssh defaults + +# Configure sudo. +RUN ex +"%s/^%sudo.*$/%sudo ALL=(ALL:ALL) NOPASSWD:ALL/g" -scwq! /etc/sudoers + +# Generate and configure user keys. +USER ubuntu +RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 +#COPY --chown=ubuntu:root "./files/authorized_keys" /home/ubuntu/.ssh/authorized_keys + + +CMD ["/usr/bin/sudo", "/usr/sbin/sshd", "-D", "-o", "ListenAddress=0.0.0.0"] diff --git a/external-deps/spyder-remote-services/tests/client/api.py b/external-deps/spyder-remote-services/tests/client/api.py new file mode 100644 index 00000000000..8231c598174 --- /dev/null +++ b/external-deps/spyder-remote-services/tests/client/api.py @@ -0,0 +1,369 @@ +import uuid +import logging +import time +import asyncio + +import yarl +import aiohttp + +from client import auth + + +logger = logging.getLogger(__name__) + + +class JupyterHubAPI: + def __init__(self, hub_url, auth_type="token", verify_ssl=True, **kwargs): + self.hub_url = yarl.URL(hub_url) + self.api_url = self.hub_url / "hub/api" + self.auth_type = auth_type + self.verify_ssl = verify_ssl + + if auth_type == "token": + self.api_token = kwargs.get("api_token") + elif auth_type == "basic" or auth_type == "keycloak": + self.username = kwargs.get("username") + self.password = kwargs.get("password") + + async def __aenter__(self): + if self.auth_type == "token": + self.session = await auth.token_authentication( + self.api_token, verify_ssl=self.verify_ssl + ) + elif self.auth_type == "basic": + self.session = await auth.basic_authentication( + self.hub_url, self.username, self.password, verify_ssl=self.verify_ssl + ) + self.api_token = await self.create_token(self.username) + await self.session.close() + logger.debug("upgrading basic authentication to token authentication") + self.session = await auth.token_authentication( + self.api_token, verify_ssl=self.verify_ssl + ) + elif self.auth_type == "keycloak": + self.session = await auth.keycloak_authentication( + self.hub_url, self.username, self.password, verify_ssl=self.verify_ssl + ) + self.api_token = await self.create_token(self.username) + await self.session.close() + logger.debug("upgrading keycloak authentication to token authentication") + self.session = await auth.token_authentication( + self.api_token, verify_ssl=self.verify_ssl + ) + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.session.close() + + async def ensure_user(self, username, create_user=False): + user = await self.get_user(username) + if user is None: + if create_user: + await self.create_user(username) + else: + raise ValueError( + f"current username={username} does not exist and create_user={create_user}" + ) + user = await self.get_user(username) + return user + + async def get_user(self, username): + async with self.session.get(self.api_url / "users" / username) as response: + if response.status == 200: + return await response.json() + elif response.status == 404: + logger.info(f"username={username} does not exist") + return None + + async def create_user(self, username): + async with self.session.post(self.api_url / "users" / username) as response: + if response.status == 201: + logger.info(f"created username={username}") + response = await response.json() + self.api_token = await self.create_token(username) + return response + elif response.status == 409: + raise ValueError(f"username={username} already exists") + + async def delete_user(self, username): + async with self.session.delete(self.api_url / "users" / username) as response: + if response.status == 204: + logger.info(f"deleted username={username}") + elif response.status == 404: + raise ValueError(f"username={username} does not exist cannot delete") + + async def ensure_server( + self, username, timeout, user_options=None, create_user=False + ): + user = await self.ensure_user(username, create_user=create_user) + if user["server"] is None: + await self.create_server(username, user_options=user_options) + + start_time = time.time() + while True: + user = await self.get_user(username) + if user["server"] and user["pending"] is None: + return JupyterAPI( + self.hub_url / "user" / username, + self.api_token, + verify_ssl=self.verify_ssl, + ) + + await asyncio.sleep(5) + total_time = time.time() - start_time + if total_time > timeout: + logger.error(f"jupyterhub server creation timeout={timeout:.0f} [s]") + raise TimeoutError( + f"jupyterhub server creation timeout={timeout:.0f} [s]" + ) + + logger.info(f"pending spawn polling for seconds={total_time:.0f} [s]") + + async def ensure_server_deleted(self, username, timeout): + user = await self.get_user(username) + if user is None: + return # user doesn't exist so server can't exist + + start_time = time.time() + while True: + server_status = await self.delete_server(username) + if server_status == 204: + return + + await asyncio.sleep(5) + total_time = time.time() - start_time + if total_time > timeout: + logger.error(f"jupyterhub server deletion timeout={timeout:.0f} [s]") + raise TimeoutError( + f"jupyterhub server deletion timeout={timeout:.0f} [s]" + ) + + logger.info(f"pending deletion polling for seconds={total_time:.0f} [s]") + + async def create_token(self, username, token_name=None): + token_name = token_name or "jhub-client" + async with self.session.post( + self.api_url / "users" / username / "tokens", json={"note": token_name} + ) as response: + logger.info(f"created token for username={username}") + return (await response.json())["token"] + + async def create_server(self, username, user_options=None): + user_options = user_options or {} + async with self.session.post( + self.api_url / "users" / username / "server", json=user_options + ) as response: + logger.info( + f"creating cluster username={username} user_options={user_options}" + ) + if response.status == 400: + raise ValueError(f"server for username={username} is already running") + elif response.status == 201: + logger.info( + f"created server for username={username} with user_options={user_options}" + ) + return True + + async def delete_server(self, username): + response = await self.session.delete( + self.api_url / "users" / username / "server" + ) + logger.info(f"deleted server for username={username}") + return response.status + + async def info(self): + async with self.session.get(self.api_url / "info") as response: + return await response.json() + + async def list_users(self): + async with self.session.get(self.api_url / "users") as response: + return await response.json() + + async def list_proxy(self): + async with self.session.get(self.api_url / "proxy") as response: + return await response.json() + + async def identify_token(self, token): + async with self.session.get( + self.api_url / "authorizations" / "token" / token + ) as response: + return await response.json() + + async def get_services(self): + async with self.session.get(self.api_url / "services") as response: + return await response.json() + + + async def get_service(self, service_name): + async with self.session.get(self.api_url / "services" / service_name) as response: + if response.status == 404: + return None + elif response.status == 200: + return await response.json() + + async def execute_post_service(self, service_name, url='', data=None): + async with self.session.post(self.hub_url / "services" / service_name / url, data=data) as response: + if response.status == 404: + return None + elif response.status == 200: + return await response.json() + + async def execute_get_service(self, service_name, url=''): + async with self.session.get(self.hub_url / "services" / service_name / url) as response: + if response.status == 404: + return None + elif response.status == 200: + return await response.json() + + async def execute_delete_service(self, service_name, url=''): + async with self.session.delete(self.hub_url / "services" / service_name / url) as response: + if response.status == 404: + return None + elif response.status == 200: + return await response.json() + + + + +class JupyterAPI: + def __init__(self, notebook_url, api_token, verify_ssl=True): + self.api_url = yarl.URL(notebook_url) / "api" + self.api_token = api_token + self.verify_ssl = verify_ssl + + async def __aenter__(self): + self.session = aiohttp.ClientSession( + headers={"Authorization": f"token {self.api_token}"}, + connector=aiohttp.TCPConnector(ssl=None if self.verify_ssl else False), + ) + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.session.close() + + async def create_kernel(self, kernel_spec=None): + data = {"kernel_spec": kernel_spec} if kernel_spec else None + + async with self.session.post(self.api_url / "kernels", json=data) as response: + data = await response.json() + logger.info( + f'created kernel_spec={kernel_spec} kernel={data["id"]} for jupyter' + ) + return data + + async def list_kernel_specs(self): + async with self.session.get(self.api_url / "kernelspecs") as response: + return await response.json() + + async def list_kernels(self): + async with self.session.get(self.api_url / "kernels") as response: + return await response.json() + + async def ensure_kernel(self, kernel_spec=None): + kernel_specs = await self.list_kernel_specs() + if kernel_spec is None: + kernel_spec = kernel_specs["default"] + else: + available_kernel_specs = list(kernel_specs["kernelspecs"].keys()) + if kernel_spec not in kernel_specs["kernelspecs"]: + logger.error( + f"kernel_spec={kernel_spec} not listed in available kernel specifications={available_kernel_specs}" + ) + raise ValueError( + f"kernel_spec={kernel_spec} not listed in available kernel specifications={available_kernel_specs}" + ) + + kernel_id = (await self.create_kernel(kernel_spec=kernel_spec))["id"] + return kernel_id, JupyterKernelAPI( + self.api_url / "kernels" / kernel_id, + self.api_token, + verify_ssl=self.verify_ssl, + ) + + async def get_kernel(self, kernel_id): + async with self.session.get(self.api_url / "kernels" / kernel_id) as response: + if response.status == 404: + return None + elif response.status == 200: + return await response.json() + + async def delete_kernel(self, kernel_id): + async with self.session.delete( + self.api_url / "kernels" / kernel_id + ) as response: + if response.status == 404: + raise ValueError( + f"failed to delete kernel_id={kernel_id} does not exist" + ) + elif response.status == 204: + logger.info(f"deleted kernel={kernel_id} for jupyter") + return True + + +class JupyterKernelAPI: + def __init__(self, kernel_url, api_token, verify_ssl=True): + self.api_url = kernel_url + self.api_token = api_token + self.verify_ssl = verify_ssl + + async def __aenter__(self): + self.session = aiohttp.ClientSession( + headers={"Authorization": f"token {self.api_token}"}, + connector=aiohttp.TCPConnector(ssl=None if self.verify_ssl else False), + ) + self.websocket = await self.session.ws_connect(self.api_url / "channels") + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.session.close() + + def request_execute_code(self, msg_id, username, code): + return { + "header": { + "msg_id": msg_id, + "username": username, + "msg_type": "execute_request", + "version": "5.2", + }, + "metadata": {}, + "content": { + "code": code, + "silent": False, + "store_history": True, + "user_expressions": {}, + "allow_stdin": True, + "stop_on_error": True, + }, + "buffers": [], + "parent_header": {}, + "channel": "shell", + } + + async def send_code(self, username, code, wait=True, timeout=None): + msg_id = str(uuid.uuid4()) + + await self.websocket.send_json( + self.request_execute_code(msg_id, username, code) + ) + + if not wait: + return None + + async for msg_text in self.websocket: + if msg_text.type != aiohttp.WSMsgType.TEXT: + return False + + # TODO: timeout is ignored + + msg = msg_text.json() + + if "parent_header" in msg and msg["parent_header"].get("msg_id") == msg_id: + # These are responses to our request + if msg["channel"] == "iopub": + if msg["msg_type"] == "execute_result": + return msg["content"]["data"]["text/plain"] + elif msg["msg_type"] == "stream": + return msg["content"]["text"] + # cell did not produce output + elif msg["content"].get("execution_state") == "idle": + return "" diff --git a/external-deps/spyder-remote-services/tests/client/auth.py b/external-deps/spyder-remote-services/tests/client/auth.py new file mode 100644 index 00000000000..eff9c61ebde --- /dev/null +++ b/external-deps/spyder-remote-services/tests/client/auth.py @@ -0,0 +1,50 @@ +import re + +import aiohttp +import yarl + + +async def token_authentication(api_token, verify_ssl=True): + return aiohttp.ClientSession( + headers={"Authorization": f"token {api_token}"}, + connector=aiohttp.TCPConnector(ssl=None if verify_ssl else False), + ) + + +async def basic_authentication(hub_url, username, password, verify_ssl=True): + session = aiohttp.ClientSession( + headers={"Referer": str(yarl.URL(hub_url) / "hub" / "api")}, + connector=aiohttp.TCPConnector(ssl=None if verify_ssl else False), + ) + + await session.post( + yarl.URL(hub_url) / "hub" / "login", + data={ + "username": username, + "password": password, + }, + ) + + return session + + +async def keycloak_authentication(hub_url, username, password, verify_ssl=True): + session = aiohttp.ClientSession( + headers={"Referer": str(yarl.URL(hub_url) / "hub" / "api")}, + connector=aiohttp.TCPConnector(ssl=None if verify_ssl else False), + ) + + response = await session.get(yarl.URL(hub_url) / "hub" / "oauth_login") + content = await response.content.read() + auth_url = re.search('action="([^"]+)"', content.decode("utf8")).group(1) + + response = await session.post( + auth_url.replace("&", "&"), + headers={"Content-Type": "application/x-www-form-urlencoded"}, + data={ + "username": username, + "password": password, + "credentialId": "", + }, + ) + return session diff --git a/external-deps/spyder-remote-services/tests/client/execute.py b/external-deps/spyder-remote-services/tests/client/execute.py new file mode 100644 index 00000000000..c86bb394938 --- /dev/null +++ b/external-deps/spyder-remote-services/tests/client/execute.py @@ -0,0 +1,153 @@ +import uuid +import difflib +import logging +import textwrap + +from client.api import JupyterHubAPI +from client.utils import parse_notebook_cells + +logger = logging.getLogger(__name__) + + +DAEMONIZED_STOP_SERVER_HEADER = """ +def _client_stop_server(): + import urllib.request + request = urllib.request.Request(url="{delete_server_endpoint}", method= "DELETE") + request.add_header("Authorization", "token {api_token}") + urllib.request.urlopen(request) + +def custom_exc(shell, etype, evalue, tb, tb_offset=None): + _jupyerhub_client_stop_server() + +get_ipython().set_custom_exc((Exception,), custom_exc) +""" + + +async def determine_username( + hub, + username=None, + user_format="user-{user}-{id}", + service_format="service-{name}-{id}", + temporary_user=False, +): + token = await hub.identify_token(hub.api_token) + + if username is None and not temporary_user: + if token["kind"] == "service": + logger.error( + "cannot execute without specified username or temporary_user=True for service api token" + ) + raise ValueError( + "Service api token cannot execute without specified username or temporary_user=True for" + ) + return token["name"] + elif username is None and temporary_user: + if token["kind"] == "service": + return service_format.format(id=str(uuid.uuid4()), name=token["name"]) + else: + return user_format.format(id=str(uuid.uuid4()), name=token["name"]) + else: + return username + + +async def execute_code( + hub_url, + cells, + username=None, + temporary_user=False, + create_user=False, + delete_user=False, + server_creation_timeout=60, + server_deletion_timeout=60, + kernel_execution_timeout=60, + daemonized=False, + validate=False, + stop_server=True, + user_options=None, + kernel_spec=None, + auth_type="token", + verify_ssl=True, +): + hub = JupyterHubAPI(hub_url, auth_type=auth_type, verify_ssl=verify_ssl) + result_cells = [] + + async with hub: + username = await determine_username( + hub, username, temporary_user=temporary_user + ) + try: + jupyter = await hub.ensure_server( + username, + create_user=create_user, + user_options=user_options, + timeout=server_creation_timeout, + ) + + async with jupyter: + kernel_id, kernel = await jupyter.ensure_kernel(kernel_spec=kernel_spec) + async with kernel: + if daemonized and stop_server: + await kernel.send_code( + username, + DAEMONIZED_STOP_SERVER_HEADER.format( + delete_server_endpoint=hub.api_url + / "users" + / username + / "server", + api_token=hub.api_token, + ), + wait=False, + ) + + for i, (code, expected_result) in enumerate(cells): + kernel_result = await kernel.send_code( + username, + code, + timeout=kernel_execution_timeout, + wait=(not daemonized), + ) + result_cells.append((code, kernel_result)) + if daemonized: + logger.debug( + f'kernel submitted cell={i} code=\n{textwrap.indent(code, " >>> ")}' + ) + else: + logger.debug( + f'kernel executing cell={i} code=\n{textwrap.indent(code, " >>> ")}' + ) + logger.debug( + f'kernel result cell={i} result=\n{textwrap.indent(kernel_result, " | ")}' + ) + if validate and ( + kernel_result.strip() != expected_result.strip() + ): + diff = "".join( + difflib.unified_diff(kernel_result, expected_result) + ) + logger.error( + f"kernel result did not match expected result diff={diff}" + ) + raise ValueError( + f"execution of cell={i} did not match expected result diff={diff}" + ) + + if daemonized and stop_server: + await kernel.send_code( + username, "__client_stop_server()", wait=False + ) + if not daemonized: + await jupyter.delete_kernel(kernel_id) + if not daemonized and stop_server: + await hub.ensure_server_deleted( + username, timeout=server_deletion_timeout + ) + finally: + if delete_user and not daemonized: + await hub.delete_user(username) + + return result_cells + + +async def execute_notebook(hub_url, notebook_path, **kwargs): + cells = parse_notebook_cells(notebook_path) + return await execute_code(hub_url, cells, **kwargs) diff --git a/external-deps/spyder-remote-services/tests/client/installation.py b/external-deps/spyder-remote-services/tests/client/installation.py new file mode 100644 index 00000000000..8a14bfaa2c7 --- /dev/null +++ b/external-deps/spyder-remote-services/tests/client/installation.py @@ -0,0 +1,7 @@ +MICROMAMBA_INSTALLER = """\ +"${SHELL}" <(curl -L micro.mamba.pm/install.sh) +""" + +MICROMAMBA_INSTALLER_PS = """\ +Invoke-Expression ((Invoke-WebRequest -Uri https://micro.mamba.pm/install.ps1).Content) +""" diff --git a/external-deps/spyder-remote-services/tests/client/simulate.py b/external-deps/spyder-remote-services/tests/client/simulate.py new file mode 100644 index 00000000000..992a4dba8f4 --- /dev/null +++ b/external-deps/spyder-remote-services/tests/client/simulate.py @@ -0,0 +1,23 @@ +import asyncio + +from client.execute import execute_code + + +async def simulate_users(hub_url, num_users, user_generator, workflow="concurrent"): + jupyterhub_sessions = [] + + if workflow == "concurrent": + for i, (username, cells) in zip(range(num_users), user_generator): + jupyterhub_sessions.append( + execute_code( + hub_url=hub_url, + username=username, + cells=cells, + create_user=True, + delete_user=True, + ) + ) + + return await asyncio.gather(*jupyterhub_sessions) + else: + raise ValueError("uknown type of jupyterhub workflow to simulate") diff --git a/external-deps/spyder-remote-services/tests/client/utils.py b/external-deps/spyder-remote-services/tests/client/utils.py new file mode 100644 index 00000000000..9183086ccd4 --- /dev/null +++ b/external-deps/spyder-remote-services/tests/client/utils.py @@ -0,0 +1,83 @@ +import json + + +def parse_notebook_cells(notebook_path): + with open(notebook_path) as f: + notebook_data = json.load(f) + + cells = [] + for cell in notebook_data["cells"]: + if cell["cell_type"] == "code": + source = "".join(cell["source"]) + outputs = [] + for output in cell["outputs"]: + if output["output_type"] == "stream": + outputs.append("".join(output["text"])) + elif output["output_type"] == "execute_result": + outputs.append("".join(output["data"]["text/plain"])) + result = "\n".join(outputs) + cells.append((source, result)) + + return cells + + +def render_notebook(cells): + notebook_template = { + "cells": [], + "nbformat": 4, + "nbformat_minor": 4, + "metadata": {}, + } + + for i, (code, result) in enumerate(cells, start=1): + notebook_template["cells"].append( + { + "cell_type": "code", + "execution_count": i, + "metadata": {}, + "outputs": [ + { + "data": {"text/plain": result}, + "execution_count": i, + "metadata": {}, + "output_type": "execute_result", + } + ], + "source": code, + } + ) + + return notebook_template + + +TEMPLATE_SCRIPT_HEADER = """ +import os +import sys +import logging + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger('client') + +OUTPUT_FORMAT = '{output_format}' +STDOUT_FILENAME = os.path.expanduser('{stdout_filename}') +STDERR_FILENAME = os.path.expanduser('{stderr_filename}') + +if OUTPUT_FORMAT == 'file': + logger.info('writting output to files stdout={stdout_filename} and stderr={stderr_filename}') + sys.stdout = open(STDOUT_FILENAME, 'w') + sys.stderr = open(STDERR_FILENAME, 'w') + +""" + + +def tangle_cells( + cells, output_format="file", stdout_filename=None, stderr_filename=None +): + # TODO: eventually support writing output to notebook + + tangled_code = [] + for i, (code, expected_result) in enumerate(cells): + tangled_code.append('logger.info("beginning execution cell={i}")') + tangled_code.append(code) + tangled_code.append('logger.info("completed execution cell={i}")') + return TEMPLATE_SCRIPT_HEADER + "\n".join(tangled_code) diff --git a/external-deps/spyder-remote-services/tests/docker-compose.yaml b/external-deps/spyder-remote-services/tests/docker-compose.yaml new file mode 100644 index 00000000000..98e652dc64e --- /dev/null +++ b/external-deps/spyder-remote-services/tests/docker-compose.yaml @@ -0,0 +1,18 @@ +version: "3" + +services: + spyder-remote-server: + build: . + # volumes: + # - "..:/home/ubuntu/spyder_remote_server" + networks: + mynet: + ipv4_address: 172.16.128.2 + ports: + - "2222:22" + privileged: true # Required for /usr/sbin/init +networks: + mynet: + ipam: + config: + - subnet: 172.16.128.0/24 diff --git a/external-deps/spyder-remote-services/tests/test.py b/external-deps/spyder-remote-services/tests/test.py new file mode 100644 index 00000000000..be3e81cce98 --- /dev/null +++ b/external-deps/spyder-remote-services/tests/test.py @@ -0,0 +1,86 @@ +import asyncio +import logging + +import textwrap + +from client.api import JupyterHubAPI + +logger = logging.getLogger(__name__) + +SERVER_TIMEOUT = 3600 +KERNEL_EXECUTION_TIMEOUT = 3600 + + +SERVER_URL = "http://localhost:8000" + +USERNAME = "user-test-1" + +async def test(): + result_cells = [] + cells = [ + "a, b = 1, 2", + "a + b" + ] + + async with JupyterHubAPI( + SERVER_URL, + auth_type="token", + api_token="GiJ96ujfLpPsq7oatW1IJuER01FbZsgyCM0xH6oMZXDAV6zUZsFy3xQBZakSBo6P", + verify_ssl=False + ) as hub: + try: + # jupyter = await hub.ensure_server( + # USERNAME, + # timeout=SERVER_TIMEOUT, + # create_user=True, + # ) + + # # test kernel + # async with jupyter: + # kernel_id, kernel = await jupyter.ensure_kernel() + # async with kernel: + # for i, code in enumerate(cells): + # kernel_result = await kernel.send_code( + # USERNAME, + # code, + # timeout=KERNEL_EXECUTION_TIMEOUT, + # wait=True, + # ) + # result_cells.append((code, kernel_result)) + # logger.warning( + # f'kernel executing cell={i} code=\n{textwrap.indent(code, " >>> ")}' + # ) + # logger.warning( + # f'kernel result cell={i} result=\n{textwrap.indent(kernel_result, " | ")}' + # ) + + # test custom spyder-service + # spyder_service_response = await hub.get_service("spyder-service") + # logger.warning(f'spyder-service: {spyder_service_response}') + + spyder_service_response = await hub.execute_get_service("spyder-service", "kernel") + logger.warning(f'spyder-service-kernel-get: {spyder_service_response}') + + spyder_service_response = await hub.execute_post_service("spyder-service", "kernel") + logger.warning(f'spyder-service-kernel-post: {spyder_service_response}') + + key = spyder_service_response['key'] + + spyder_service_response = await hub.execute_get_service("spyder-service", f"kernel/{key}") + logger.warning(f'spyder-service-kernel-get: {spyder_service_response}') + + spyder_service_response = await hub.execute_delete_service("spyder-service", f"kernel/{key}") + logger.warning(f'spyder-service-kernel-delete: {spyder_service_response}') + + spyder_service_response = await hub.execute_get_service("spyder-service", "kernel") + logger.warning(f'spyder-service-kernel-get: {spyder_service_response}') + + finally: + if await hub.get_user(USERNAME) is not None: + await hub.delete_user(USERNAME) + +if __name__ == "__main__": + logging.basicConfig(level=logging.DEBUG) + loop = asyncio.get_event_loop() + loop.run_until_complete(test()) + loop.close() diff --git a/spyder/plugins/ipythonconsole/__init__.py b/spyder/plugins/ipythonconsole/__init__.py index 56c49deb74e..2ff34ccd29d 100644 --- a/spyder/plugins/ipythonconsole/__init__.py +++ b/spyder/plugins/ipythonconsole/__init__.py @@ -11,7 +11,7 @@ IPython Console plugin based on QtConsole """ -from spyder.config.base import is_stable_version, running_remoteclient_tests +from spyder.config.base import is_stable_version # Use this variable, which corresponds to the html dash symbol, for any command @@ -20,9 +20,7 @@ _d = '-' # Required version of Spyder-kernels -SPYDER_KERNELS_MIN_VERSION = ( - "3.0.0" if running_remoteclient_tests() else "3.1.0.dev0" -) +SPYDER_KERNELS_MIN_VERSION = "3.1.0.dev0" SPYDER_KERNELS_MAX_VERSION = '3.2.0' SPYDER_KERNELS_VERSION = ( f'>={SPYDER_KERNELS_MIN_VERSION},<{SPYDER_KERNELS_MAX_VERSION}' diff --git a/spyder/plugins/remoteclient/tests/Dockerfile b/spyder/plugins/remoteclient/tests/Dockerfile index 72ae9e0d6f9..9d803a4b78c 100644 --- a/spyder/plugins/remoteclient/tests/Dockerfile +++ b/spyder/plugins/remoteclient/tests/Dockerfile @@ -34,4 +34,12 @@ RUN ex +"%s/^%sudo.*$/%sudo ALL=(ALL:ALL) NOPASSWD:ALL/g" -scwq! /etc/sudoers USER ubuntu RUN ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 + +# Configure and Install the spyder-remote-services + +COPY --from=external-deps --chown=ubuntu spyder-remote-services /home/ubuntu/spyder-remote-services +COPY --from=external-deps --chown=ubuntu spyder-kernels /home/ubuntu/spyder-kernels + +RUN bash /home/ubuntu/spyder-remote-services/scripts/installer_dev.sh /home/ubuntu/spyder-remote-services /home/ubuntu/spyder-kernels + CMD ["/usr/bin/sudo", "/usr/sbin/sshd", "-D", "-o", "ListenAddress=172.16.128.2"] diff --git a/spyder/plugins/remoteclient/tests/docker-compose.yml b/spyder/plugins/remoteclient/tests/docker-compose.yml index 4f470411813..0ba20c27479 100644 --- a/spyder/plugins/remoteclient/tests/docker-compose.yml +++ b/spyder/plugins/remoteclient/tests/docker-compose.yml @@ -1,6 +1,13 @@ services: test-spyder-remote-server: - build: . + build: + context: ./ + additional_contexts: + external-deps: ../../../../external-deps + dockerfile: Dockerfile + volumes: + - "../../../../external-deps/spyder-remote-services:/home/ubuntu/spyder-remote-services" + - "../../../../external-deps/spyder-kernels:/home/ubuntu/spyder-kernels" ports: - "22" privileged: true # Required for /usr/sbin/init diff --git a/spyder/plugins/remoteclient/utils/installation.py b/spyder/plugins/remoteclient/utils/installation.py index 0a5aa4f80d2..b611a22a616 100644 --- a/spyder/plugins/remoteclient/utils/installation.py +++ b/spyder/plugins/remoteclient/utils/installation.py @@ -5,6 +5,7 @@ # (see spyder/__init__.py for details) from spyder.plugins.ipythonconsole import SPYDER_KERNELS_VERSION +from spyder.config.base import running_remoteclient_tests SERVER_ENTRY_POINT = "spyder-server" @@ -21,6 +22,9 @@ def get_installer_command(platform: str) -> str: if platform == "win": raise NotImplementedError("Windows is not supported yet") + + if running_remoteclient_tests(): + return '\n' # server should be aready installed in the test environment return ( f'"${{SHELL}}" <(curl -L {SCRIPT_URL}/installer.sh) '