Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
faa2246
feat: add new apispec for the new amalthea sessions (#360)
olevski Sep 20, 2024
a6e8243
feat!: expand environments specification (#338)
olevski Sep 20, 2024
f509340
feat: run notebooks in data service (#375)
olevski Sep 23, 2024
2e92ba6
feat: ensure notebook endpoints do their job (#388)
sgaist Sep 25, 2024
4de34f3
feat: use cloud storage from amalthea (#387)
olevski Sep 27, 2024
12c2ab2
feat: perf improvements for amalthea sessions (#411)
olevski Oct 1, 2024
34c99fa
refactor: add validation to project, storage, repo and session bluepr…
Panaetius Oct 7, 2024
2cf4d78
fix: allow session launcher parameters to be reset (#434)
olevski Oct 8, 2024
a03872e
fix: docker image check endpoint and gitlab authn (#439)
olevski Oct 8, 2024
a8bfb8e
fix: add hibernation and deletion time in status (#440)
olevski Oct 8, 2024
40b64fa
fix: find images when there is an oci index (#462)
olevski Oct 16, 2024
95cfa9c
Notebook: refactor blueprint business logic to core (#429)
sgaist Oct 17, 2024
684ab13
feat: add data connectors (#478)
olevski Oct 30, 2024
3e28eb9
fix: environment kind assignment (#511)
olevski Nov 6, 2024
03a24e0
fix: do not create storage secrets twice (#512)
olevski Nov 7, 2024
55ad740
fix: preserve the default url in the session spec (#514)
olevski Nov 7, 2024
0575d30
fix: migrate old session launchers (#513)
olevski Nov 7, 2024
f6fd293
fix: git connectors and reading logs (#518)
olevski Nov 8, 2024
8d63208
fix: add user secret key when using saved secrets (#519)
olevski Nov 8, 2024
1046f40
fix: run the /post-init.sh script if present
olevski Nov 11, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .devcontainer/.poetry_cache/.keep
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@

11 changes: 7 additions & 4 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
"command": "poetry self add poetry-polylith-plugin"
},
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
"ghcr.io/mpriscella/features/kind:1": {},
"ghcr.io/devcontainers-contrib/features/gh-release:1": {
"repo": "authzed/zed",
"binaryNames": "zed"
Expand All @@ -20,18 +19,22 @@
"ghcr.io/devcontainers/features/kubectl-helm-minikube:1": {
"minikube": "none"
},
"ghcr.io/eitsupi/devcontainer-features/jq-likes:2": {},
"ghcr.io/eitsupi/devcontainer-features/jq-likes:2": {
"jqVersion": "latest",
"yqVersion": "latest"
},
"ghcr.io/dhoeric/features/k9s:1": {},
"ghcr.io/EliiseS/devcontainer-features/bash-profile:1": {
"command": "alias k=kubectl"
},
"ghcr.io/devcontainers-contrib/features/rclone:1": {}
"ghcr.io/devcontainers-contrib/features/rclone:1": {},
"./k3d": {}
},
"overrideFeatureInstallOrder": [
"ghcr.io/devcontainers-contrib/features/poetry",
"ghcr.io/devcontainers-contrib/features/bash-command"
],
"postCreateCommand": "poetry install --with dev",
"postCreateCommand": "poetry install --with dev && mkdir -p /home/vscode/.config/k9s",
"customizations": {
"vscode": {
"extensions": [
Expand Down
9 changes: 8 additions & 1 deletion .devcontainer/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ services:
ZED_TOKEN: renku
ZED_INSECURE: "true"
POETRY_CACHE_DIR: "/poetry_cache"
NB_SERVER_OPTIONS__DEFAULTS_PATH: /workspace/server_defaults.json
NB_SERVER_OPTIONS__UI_CHOICES_PATH: /workspace/server_options.json
KUBECONFIG: "/workspace/.k3d-config.yaml"
network_mode: service:db
depends_on:
- db
Expand All @@ -33,16 +36,20 @@ services:
restart: unless-stopped
volumes:
- postgres-data:/var/lib/postgresql/data
- type: bind
source: ./generate_ulid_func.sql
target: /docker-entrypoint-initdb.d/generate_ulid_func.sql
environment:
POSTGRES_USER: renku
POSTGRES_DB: renku
POSTGRES_DB: postgres
POSTGRES_PASSWORD: renku
ports:
- "8000:8000"
- "5432:5432"
- "8080:8080"
- "5678:5678"
- "50051:50051"
- "8888:80"

swagger:
image: swaggerapi/swagger-ui
Expand Down
89 changes: 89 additions & 0 deletions .devcontainer/generate_ulid_func.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
CREATE DATABASE renku_template;
ALTER DATABASE renku_template WITH is_template TRUE;
\c renku_template;

-- From https://github.com/geckoboard/pgulid/blob/master/pgulid.sql
-- Taken at commit sha b265253
-- pgulid is based on OK Log's Go implementation of the ULID spec
--
-- https://github.com/oklog/ulid
-- https://github.com/ulid/spec
--
-- Copyright 2016 The Oklog Authors
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.

CREATE EXTENSION IF NOT EXISTS pgcrypto;

-- NOTE: REPLACE will error if you change the name, args or return type of the function
-- There is no CREATE IF EXISTS, this is the closest thing that gives similar functionality
CREATE OR REPLACE FUNCTION generate_ulid()
RETURNS TEXT
AS $$
DECLARE
-- Crockford's Base32
encoding BYTEA = '0123456789ABCDEFGHJKMNPQRSTVWXYZ';
timestamp BYTEA = E'\\000\\000\\000\\000\\000\\000';
output TEXT = '';

unix_time BIGINT;
ulid BYTEA;
BEGIN
-- 6 timestamp bytes
unix_time = (EXTRACT(EPOCH FROM CLOCK_TIMESTAMP()) * 1000)::BIGINT;
timestamp = SET_BYTE(timestamp, 0, (unix_time >> 40)::BIT(8)::INTEGER);
timestamp = SET_BYTE(timestamp, 1, (unix_time >> 32)::BIT(8)::INTEGER);
timestamp = SET_BYTE(timestamp, 2, (unix_time >> 24)::BIT(8)::INTEGER);
timestamp = SET_BYTE(timestamp, 3, (unix_time >> 16)::BIT(8)::INTEGER);
timestamp = SET_BYTE(timestamp, 4, (unix_time >> 8)::BIT(8)::INTEGER);
timestamp = SET_BYTE(timestamp, 5, unix_time::BIT(8)::INTEGER);

-- 10 entropy bytes
ulid = timestamp || gen_random_bytes(10);

-- Encode the timestamp
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 0) & 224) >> 5));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 0) & 31)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 1) & 248) >> 3));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 1) & 7) << 2) | ((GET_BYTE(ulid, 2) & 192) >> 6)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 2) & 62) >> 1));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 2) & 1) << 4) | ((GET_BYTE(ulid, 3) & 240) >> 4)));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 3) & 15) << 1) | ((GET_BYTE(ulid, 4) & 128) >> 7)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 4) & 124) >> 2));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 4) & 3) << 3) | ((GET_BYTE(ulid, 5) & 224) >> 5)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 5) & 31)));

-- Encode the entropy
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 6) & 248) >> 3));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 6) & 7) << 2) | ((GET_BYTE(ulid, 7) & 192) >> 6)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 7) & 62) >> 1));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 7) & 1) << 4) | ((GET_BYTE(ulid, 8) & 240) >> 4)));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 8) & 15) << 1) | ((GET_BYTE(ulid, 9) & 128) >> 7)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 9) & 124) >> 2));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 9) & 3) << 3) | ((GET_BYTE(ulid, 10) & 224) >> 5)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 10) & 31)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 11) & 248) >> 3));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 11) & 7) << 2) | ((GET_BYTE(ulid, 12) & 192) >> 6)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 12) & 62) >> 1));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 12) & 1) << 4) | ((GET_BYTE(ulid, 13) & 240) >> 4)));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 13) & 15) << 1) | ((GET_BYTE(ulid, 14) & 128) >> 7)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 14) & 124) >> 2));
output = output || CHR(GET_BYTE(encoding, ((GET_BYTE(ulid, 14) & 3) << 3) | ((GET_BYTE(ulid, 15) & 224) >> 5)));
output = output || CHR(GET_BYTE(encoding, (GET_BYTE(ulid, 15) & 31)));

RETURN output;
END
$$
LANGUAGE plpgsql
VOLATILE;

CREATE DATABASE renku TEMPLATE renku_template;
17 changes: 17 additions & 0 deletions .devcontainer/k3d/devcontainer-feature.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"id": "k3d",
"version": "1.0.0",
"name": "k3s based kubernetes cluster in docker",
"postCreateCommand": "k3d --version",
"installsAfter": [
"ghcr.io/devcontainers-contrib/features/bash-command"
],
"options": {
"k3d_version": {
"type": "string",
"description": "k3d version to install",
"proposals": ["latest", "5.7.4"],
"default": "latest"
}
}
}
14 changes: 14 additions & 0 deletions .devcontainer/k3d/install.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
if [ "${K3D_VERSION}" != "none" ]; then
echo "Downloading k3d..."
if [ "${K3D_VERSION}" = "latest" ]; then
# Install and check the hash
curl -sSL https://raw.githubusercontent.com/k3d-io/k3d/main/install.sh | bash
else
find_version_from_git_tags K3D_VERSION https://github.com/kubernetes/K3D
if [ "${K3D_VERSION::1}" != "v" ]; then
K3D_VERSION="v${K3D_VERSION}"
fi
# Install and check the hash
curl -sSL https://raw.githubusercontent.com/k3d-io/k3d/main/install.sh | TAG="${K3D_VERSION}" bash
fi
fi
4 changes: 4 additions & 0 deletions .github/workflows/acceptance-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ jobs:
renku-graph: ${{ steps.deploy-comment.outputs.renku-graph}}
renku-notebooks: ${{ steps.deploy-comment.outputs.renku-notebooks}}
renku-ui: ${{ steps.deploy-comment.outputs.renku-ui}}
amalthea-sessions: ${{ steps.deploy-comment.outputs.amalthea-sessions}}
amalthea: ${{ steps.deploy-comment.outputs.amalthea}}
test-enabled: ${{ steps.deploy-comment.outputs.test-enabled}}
test-cypress-enabled: ${{ steps.deploy-comment.outputs.test-cypress-enabled}}
persist: ${{ steps.deploy-comment.outputs.persist}}
Expand Down Expand Up @@ -84,6 +86,8 @@ jobs:
renku_graph: "${{ needs.check-deploy.outputs.renku-graph }}"
renku_notebooks: "${{ needs.check-deploy.outputs.renku-notebooks }}"
renku_data_services: "@${{ github.head_ref }}"
amalthea: "${{ needs.check-deploy.outputs.amalthea }}"
amalthea_sessions: "${{ needs.check-deploy.outputs.amalthea-sessions }}"
extra_values: "${{ needs.check-deploy.outputs.extra-values }}"

selenium-acceptance-tests:
Expand Down
41 changes: 41 additions & 0 deletions .github/workflows/save_cache.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
name: Create cache from commits on main

on:
push:
branches:
- main
- chore-add-kind
workflow_dispatch:


jobs:
save-poetry-cache:
runs-on: ubuntu-latest
env:
CACHE_KEY: main-branch-poetry-cache-ubuntu
CACHE_PATH: .devcontainer/.poetry_cache
DEVCONTAINER_IMAGE_CACHE: ghcr.io/swissdatasciencecenter/renku-data-services/devcontainer

steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Install python deps
uses: devcontainers/ci@v0.3
with:
runCmd: poetry install --with dev
push: always
skipContainerUserIdUpdate: false
imageName: ${{ env.DEVCONTAINER_IMAGE_CACHE }}
cacheFrom: ${{ env.DEVCONTAINER_IMAGE_CACHE }}
- uses: actions/cache/save@v3
name: Create cache
with:
path: ${{ env.CACHE_PATH }}
key: ${{ env.CACHE_KEY }}
20 changes: 20 additions & 0 deletions .github/workflows/test_publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,11 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/cache/restore@v3
name: Restore cache
with:
path: ${{ env.CACHE_PATH }}
key: ${{ env.CACHE_KEY }}
- name: Set Git config
shell: bash
run: |
Expand All @@ -90,6 +95,11 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/cache/restore@v3
name: Restore cache
with:
path: ${{ env.CACHE_PATH }}
key: ${{ env.CACHE_KEY }}
- name: Set Git config
shell: bash
run: |
Expand All @@ -111,6 +121,11 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/cache/restore@v3
name: Restore cache
with:
path: ${{ env.CACHE_PATH }}
key: ${{ env.CACHE_KEY }}
- name: Set Git config
shell: bash
run: |
Expand Down Expand Up @@ -155,6 +170,11 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/cache/restore@v3
name: Restore cache
with:
path: ${{ env.CACHE_PATH }}
key: ${{ env.CACHE_KEY }}
- name: Set Git config
shell: bash
run: |
Expand Down
9 changes: 9 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -79,3 +79,12 @@ docker-compose.override.yml
# nix
result
*.qcow2

# tests
.k3d-config.yaml

# Misc
*.pem
*.gz
*.tgz
.dmypy.json
2 changes: 2 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,10 @@ repos:
- id: check-toml
- id: debug-statements
- id: end-of-file-fixer
exclude: 'components/renku_data_services/message_queue/(avro_models|schemas)'
- id: mixed-line-ending
- id: trailing-whitespace
exclude: 'components/renku_data_services/message_queue/(avro_models|schemas)'
- repo: https://github.com/asottile/yesqa
rev: v1.5.0
hooks:
Expand Down
3 changes: 1 addition & 2 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@ When reporting a bug, please include the following information:
If you do not already have a development environment set up, you will probably find the [developer documentation](DEVELOPING.md) helpful.

* Before submitting a pull request, please make sure you agree to the license and have submitted a signed [contributor license agreement](https://github.com/SwissDataScienceCenter/renku/wiki/Legal)
* PRs should include a short, descriptive title. The titles will be used to compile changelogs for releases, so think about the title in that context. The title should be formatted using the [Conventional Commits](https://www.conventionalcommits.org/) style
* PRs should include a short, descriptive title. The titles will be used to compile changelogs for releases, so think about the title in that context. The title should be formatted using the [Conventional Commits](https://www.conventionalcommits.org/) style
* Small improvements need not reference an issue, but PRs that introduce larger changes or add new functionality should refer to an issue
* Structure your commits in meaningful units, each with an understandable purpose and coherent commit message. For example, if your proposed changes contain a refactoring and a new feature, make two PRs
* Format commit messages using the [Conventional Commits](https://www.conventionalcommits.org/) style

Loading
Loading