diff --git a/.circleci/config.yml b/.circleci/config.yml index 208cb523d..486a1c7f5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -69,7 +69,7 @@ jobs: - run: name: "Tests: Run unit/integration tests (excluding e2e)" - command: docker compose exec django py.test src/ -m "not e2e" + command: docker compose exec django py.test src/ # We give the name of the test files manually because we need test_auth.py to be run before the others for state.json file to be created # CI="true" to skip some tests that fail in the CI for now diff --git a/.env_circleci b/.env_circleci index f56adf935..9ec59cd51 100644 --- a/.env_circleci +++ b/.env_circleci @@ -1,5 +1,3 @@ -SECRET_KEY=change-this-secret - DB_HOST=db DB_NAME=postgres DB_USERNAME=postgres @@ -35,4 +33,4 @@ DJANGO_SUPERUSER_EMAIL=test@test.com DJANGO_SUPERUSER_USERNAME=codabench DOMAIN_NAME=localhost:80 TLS_EMAIL=your@email.com -SUBMISSIONS_API_URL=http://django:8000/api \ No newline at end of file +SUBMISSIONS_API_URL=http://django:8000/api diff --git a/.env_sample b/.env_sample index 6bd01cfbd..0100ffc37 100644 --- a/.env_sample +++ b/.env_sample @@ -1,7 +1,5 @@ -SECRET_KEY=change-this-secret - # For local setup and debug -DEBUG=True +DEBUG=False # Database DB_HOST=db @@ -71,7 +69,7 @@ AWS_STORAGE_PRIVATE_BUCKET_NAME=private AWS_S3_ENDPOINT_URL=http://minio:9000/ AWS_QUERYSTRING_AUTH=False # Optional URL rewriting in compute worker, format: FROM | TO -#WORKER_BUNDLE_URL_REWRITE=http://localhost:9000|http://minio:9000 +#WORKER_BUNDLE_URL_REWRITE=http://localhost:9000/|http://minio:9000/ # ----------------------------------------------------------------------------- diff --git a/.gitignore b/.gitignore index 11852a1b9..b246dbe90 100644 --- a/.gitignore +++ b/.gitignore @@ -24,6 +24,8 @@ src/static/generated/* db.sqlite3 celerybeat-schedule +celerybeat-schedule-shm +celerybeat-schedule-wal celerybeat-schedule.db package-lock.json artifacts/ @@ -44,3 +46,4 @@ caddy_data/ home_page_counters.json my-postgres.conf tests/config/state.json + diff --git a/charts/Chart.yaml b/charts/Chart.yaml new file mode 100644 index 000000000..3081bd676 --- /dev/null +++ b/charts/Chart.yaml @@ -0,0 +1,34 @@ +apiVersion: v2 +name: codabench-chart +description: A Helm chart for Kubernetes + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +# It is recommended to use it with quotes. +appVersion: "1.16.0" + +dependencies: + - name: rabbitmq + version: "14.7.0" + repository: "oci://registry-1.docker.io/bitnamicharts" + condition: rabbitmq.enabled + - name: redis + version: "19.5.4" + repository: "oci://registry-1.docker.io/bitnamicharts" + condition: redis.enabled diff --git a/charts/templates/app-state-pvc.yaml b/charts/templates/app-state-pvc.yaml new file mode 100644 index 000000000..8e8fa52b8 --- /dev/null +++ b/charts/templates/app-state-pvc.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ .Values.appState.pvcName }} +spec: + accessModes: + - ReadWriteMany + resources: + requests: + storage: {{ .Values.appState.storage }} + storageClassName: {{ .Values.appState.storageClass }} diff --git a/charts/templates/compute-worker-deployment.yaml b/charts/templates/compute-worker-deployment.yaml new file mode 100644 index 000000000..8a6ddaab8 --- /dev/null +++ b/charts/templates/compute-worker-deployment.yaml @@ -0,0 +1,55 @@ +{{- range .Values.compute_worker.brokers }} +{{- $isDefault := eq .name "default" }} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: compute-worker{{ if not $isDefault }}-{{ .name }}{{ end }} + labels: + app: compute-worker +spec: + replicas: 1 + selector: + matchLabels: + app: compute-worker + template: + metadata: + labels: + app: compute-worker + spec: + serviceAccountName: compute-worker-sa + containers: + - name: compute-worker + image: "{{ $.Values.compute_worker.image.repository }}:{{ $.Values.compute_worker.image.tag }}" + imagePullPolicy: {{ $.Values.compute_worker.image.pullPolicy | default "IfNotPresent" }} + command: + - bash + - -c + - > + watchmedo auto-restart -p '*.py' --recursive -- celery -A compute_worker worker -l info -Q compute-worker -n compute-worker{{ if not $isDefault }}-{{ .name }}{{ end }}@%n + workingDir: /app + env: + - name: BROKER_URL + value: "{{ if .url }}{{ .url }}{{ else }}pyamqp://{{ $.Values.env.RABBITMQ_DEFAULT_USER }}:{{ $.Values.env.RABBITMQ_DEFAULT_PASS }}@{{ $.Values.env.RABBITMQ_HOST }}:{{ $.Values.env.RABBITMQ_PORT }}//{{ end }}" + - name: CODALAB_IGNORE_CLEANUP_STEP + value: "1" + {{- range $key, $value := $.Values.env }} + - name: {{ $key }} + value: "{{ $value }}" + {{- end }} + resources: + {{- toYaml $.Values.compute_worker.resources | nindent 12 }} + volumeMounts: + - name: docker-socket + mountPath: /var/run/docker.sock + - name: codabench-storage + mountPath: /codabench + volumes: + - name: docker-socket + hostPath: + path: /var/run/docker.sock + type: Socket + - name: codabench-storage + persistentVolumeClaim: + claimName: {{ $.Values.compute_worker.volumes.pvcName }} +{{- end }} diff --git a/charts/templates/compute-worker-rbac.yaml b/charts/templates/compute-worker-rbac.yaml new file mode 100644 index 000000000..519db211e --- /dev/null +++ b/charts/templates/compute-worker-rbac.yaml @@ -0,0 +1,35 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: compute-worker-sa + namespace: {{ .Release.Namespace }} +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: compute-worker-role + namespace: {{ .Release.Namespace }} +rules: + - apiGroups: ["batch"] + resources: ["jobs"] + verbs: ["create", "get", "list", "watch", "delete"] + - apiGroups: [""] + resources: ["pods"] + verbs: ["get", "list", "watch", "delete"] + - apiGroups: [""] + resources: ["pods/exec", "pods/log"] + verbs: ["create", "get", "list"] +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: compute-worker-bind + namespace: {{ .Release.Namespace }} +subjects: + - kind: ServiceAccount + name: compute-worker-sa + namespace: {{ .Release.Namespace }} +roleRef: + kind: Role + name: compute-worker-role + apiGroup: rbac.authorization.k8s.io diff --git a/charts/templates/django-deployment.yaml b/charts/templates/django-deployment.yaml new file mode 100644 index 000000000..51e9c4109 --- /dev/null +++ b/charts/templates/django-deployment.yaml @@ -0,0 +1,41 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: django +spec: + replicas: {{ .Values.django.replicas }} + selector: + matchLabels: + app: django + template: + metadata: + labels: + app: django + spec: + containers: + - name: django + image: "{{ .Values.django.image.repository }}:{{ .Values.django.image.tag }}" + imagePullPolicy: {{ .Values.django.image.pullPolicy }} + command: + - bash + - -c + - > + python manage.py migrate --no-input && + python manage.py collectstatic --no-input && + cd {{ .Values.django.workingDir }} && + watchmedo auto-restart -p '*.py' --recursive -- + python3 ./gunicorn_run.py + env: + {{- range $key, $value := .Values.env }} + - name: {{ $key }} + value: "{{ $value }}" + {{- end }} + - name: DATABASE_URL + value: "postgres://{{ .Values.db.username }}:{{ .Values.db.password }}@{{ .Values.db.host }}:{{ .Values.db.port }}/{{ .Values.db.name }}" + volumeMounts: + - name: app-state + mountPath: /app/app-state + volumes: + - name: app-state + persistentVolumeClaim: + claimName: {{ .Values.appState.pvcName }} diff --git a/charts/templates/django-service.yaml b/charts/templates/django-service.yaml new file mode 100644 index 000000000..08a94857d --- /dev/null +++ b/charts/templates/django-service.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: Service +metadata: + name: django +spec: + selector: + app: django + ports: + - protocol: TCP + port: 8000 + targetPort: 8000 + diff --git a/charts/templates/flower-deployment.yaml b/charts/templates/flower-deployment.yaml new file mode 100644 index 000000000..401ba3173 --- /dev/null +++ b/charts/templates/flower-deployment.yaml @@ -0,0 +1,27 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: flower + labels: + app: flower +spec: + replicas: {{ .Values.flower.replicas }} + selector: + matchLabels: + app: flower + template: + metadata: + labels: + app: flower + spec: + containers: + - name: flower + image: "{{ .Values.flower.image.repository }}:{{ .Values.flower.image.tag }}" + imagePullPolicy: {{ .Values.flower.image.pullPolicy }} + ports: + - containerPort: {{ .Values.flower.service.port }} + env: + - name: CELERY_BROKER_URL + value: "pyamqp://{{ .Values.env.RABBITMQ_DEFAULT_USER }}:{{ .Values.env.RABBITMQ_DEFAULT_PASS }}@{{ .Values.env.RABBITMQ_HOST }}:{{ .Values.env.RABBITMQ_PORT }}//" + - name: FLOWER_PORT + value: "{{ .Values.flower.service.port }}" diff --git a/charts/templates/flower-service.yaml b/charts/templates/flower-service.yaml new file mode 100644 index 000000000..5ec854ec4 --- /dev/null +++ b/charts/templates/flower-service.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Service +metadata: + name: flower + labels: + app: flower +spec: + type: ClusterIP + ports: + - port: {{ .Values.flower.service.port }} + targetPort: 5555 + selector: + app: flower \ No newline at end of file diff --git a/charts/templates/ingress.yaml b/charts/templates/ingress.yaml new file mode 100644 index 000000000..ae722a430 --- /dev/null +++ b/charts/templates/ingress.yaml @@ -0,0 +1,7 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: sample-http-ingress +spec: + {{- toYaml .Values.ingress.spec | nindent 2 }} + diff --git a/charts/templates/istio.yaml b/charts/templates/istio.yaml new file mode 100644 index 000000000..19343026c --- /dev/null +++ b/charts/templates/istio.yaml @@ -0,0 +1,11 @@ +{{- if .Values.istio.enableVirtualService}} +apiVersion: networking.istio.io/v1alpha3 +kind: VirtualService +metadata: + name: codabench-vs +spec: + {{- toYaml .Values.istio.spec | nindent 12 }} + +{{- else }} +{{- end }} + diff --git a/charts/templates/shared-pvc.yaml b/charts/templates/shared-pvc.yaml new file mode 100644 index 000000000..ab12b14a7 --- /dev/null +++ b/charts/templates/shared-pvc.yaml @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ .Values.sharedJob.pvcName }} +spec: + accessModes: + - ReadWriteMany + resources: + requests: + storage: {{ .Values.sharedJob.storage }} + storageClassName: {{ .Values.sharedJob.storageClass }} + diff --git a/charts/templates/site-worker-deployment.yaml b/charts/templates/site-worker-deployment.yaml new file mode 100644 index 000000000..720a83d0c --- /dev/null +++ b/charts/templates/site-worker-deployment.yaml @@ -0,0 +1,54 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: site-worker + labels: + app: site-worker +spec: + replicas: {{ .Values.siteWorker.replicas }} + selector: + matchLabels: + app: site-worker + template: + metadata: + labels: + app: site-worker + {{- if .Values.siteWorker.metadata }} + {{- with .Values.siteWorker.metadata.labels }} + {{- toYaml . | nindent 8 }} + {{- end }} + {{- end }} + spec: + {{- with .Values.siteWorker.securityContext }} + securityContext: + {{- toYaml . | nindent 8 }} + {{- end }} + containers: + - name: site-worker + image: "{{ .Values.siteWorker.image.repository }}:{{ .Values.siteWorker.image.tag }}" + imagePullPolicy: {{ .Values.siteWorker.image.pullPolicy }} + workingDir: {{ .Values.siteWorker.workingDir }} + env: + - name: PYTHONPATH + value: {{ .Values.siteWorker.workingDir }} + - name: DATABASE_URL + value: "postgres://{{ .Values.db.username }}:{{ .Values.db.password }}@{{ .Values.db.host }}:{{ .Values.db.port }}/{{ .Values.db.name }}" + {{- range $key, $value := .Values.env }} + - name: {{ $key }} + value: "{{ $value }}" + {{- end }} + command: + - bash + - -c + - > + watchmedo auto-restart -p '*.py' --recursive -- + celery -A celery_config worker -B -Q site-worker -l info + -n site-worker@%n --concurrency={{ .Values.siteWorker.concurrency }} + volumeMounts: + - name: app-state + mountPath: /app/app-state + volumes: + - name: app-state + persistentVolumeClaim: + claimName: {{ .Values.siteWorker.volumes.pvcName }} + diff --git a/charts/values.yaml b/charts/values.yaml new file mode 100644 index 000000000..41d5d25e9 --- /dev/null +++ b/charts/values.yaml @@ -0,0 +1,166 @@ +istio: + enableVirtualService: false + +ingress: + spec: + ingressClassName: # e.g. nginx or traefik + rules: + - http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: django + port: + number: 8000 + +django: + image: + repository: + pullPolicy: Always + tag: + replicas: 1 + port: 8000 + workingDir: /app/src + +flower: + image: + repository: mher/flower + tag: latest + pullPolicy: Always + replicas: 1 + service: + port: 5555 + +compute_worker: + image: + repository: + tag: + pullPolicy: Always + podCreationRetries: + numberOfRetries: 30 + sleepTimeBetweenRetries: 10 + submissionPods: + securityContext: + runAsUser: 1000 + runAsGroup: 1000 + fsGroup: 1000 + resources: + requests: + memory: 256Mi + limits: + memory: 512Mi + volumes: + pvcName: shared-job-pvc + brokers: + - name: "default" + gpu: + enabled: false + +siteWorker: + image: + repository: + tag: + pullPolicy: Always + replicas: 1 + workingDir: /app/src + concurrency: 2 + securityContext: + runAsUser: 0 + runAsGroup: 0 + fsGroup: 0 + volumes: + pvcName: app-state + +redis: + image: + pullPolicy: Always + enabled: true + auth: + enabled: false + architecture: standalone + master: + persistence: + enabled: false + storageClass: + size: 1Gi + service: + ports: + redis: 6379 + +rabbitmq: + image: + pullPolicy: Always + auth: + username: rabbit-username + password: rabbit-password-you-should-change + service: + ports: + amqp: 5672 + persistence: + enabled: true + storageClass: + size: 1Gi + extraEnvVars: + - name: RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS + value: "-rabbit consumer_timeout 100000000" + +global: + defaultStorageClass: + +sharedJob: + storageClass: + pvcName: shared-job-pvc + storage: 1Gi + +appState: + storageClass: + pvcName: app-state-pvc + storage: 1Gi + +db: + host: + name: + username: + password: + port: + +env: + LOG_LEVEL: "info" + + SECRET_KEY: + + DJANGO_SETTINGS_MODULE: settings.develop + SUBMISSIONS_API_URL: http://django:8000/api + MAX_EXECUTION_TIME_LIMIT: "600" + + DOMAIN_NAME: + + TLS_EMAIL: + + RABBITMQ_HOST: + RABBITMQ_DEFAULT_USER: + RABBITMQ_DEFAULT_PASS: + RABBITMQ_MANAGEMENT_PORT: "15672" + RABBITMQ_PORT: "5672" + + FLOWER_PUBLIC_PORT: "5555" + FLOWER_BASIC_AUTH: + + SELENIUM_HOSTNAME: + + RERUN_SUBMISSION_LIMIT: "30" + + ENABLE_SIGN_UP: "True" + ENABLE_SIGN_IN: "True" + + STORAGE_TYPE: s3 + AWS_ACCESS_KEY_ID: + AWS_SECRET_ACCESS_KEY: + AWS_STORAGE_BUCKET_NAME: + AWS_STORAGE_PRIVATE_BUCKET_NAME: + AWS_S3_ENDPOINT_URL: + AWS_QUERYSTRING_AUTH: "False" + + REDIS_URL: redis://:6379 diff --git a/compute_worker/compute_worker.py b/compute_worker/compute_worker.py index 2e39c1800..573e72a94 100644 --- a/compute_worker/compute_worker.py +++ b/compute_worker/compute_worker.py @@ -3,6 +3,7 @@ import hashlib import json import os +import traceback import shutil import signal import socket @@ -18,7 +19,6 @@ from rich.progress import Progress from rich.pretty import pprint import requests - import websockets import yaml from billiard.exceptions import SoftTimeLimitExceeded @@ -89,22 +89,31 @@ def show_progress(line, progress): try: - if "Status: Image is up to date" in line["status"]: - logger.info(line["status"]) + status = line.get("status") or "" + layer_id = line.get("id") + detail = line.get("progressDetail") or {} + current = detail.get("current") + total = detail.get("total") + + if "Status: Image is up to date" in status: + logger.info(status) + + if not layer_id: + return completed = False - if line["status"] == "Download complete": + if status == "Download complete": description = ( - f"[blue][Download complete, waiting for extraction {line['id']}]" + f"[blue][Download complete, waiting for extraction {layer_id}]" ) completed = True - elif line["status"] == "Downloading": - description = f"[bold][Downloading {line['id']}]" - elif line["status"] == "Pull complete": - description = f"[green][Extraction complete {line['id']}]" + elif status == "Downloading": + description = f"[bold][Downloading {layer_id}]" + elif status == "Pull complete": + description = f"[green][Extraction complete {layer_id}]" completed = True - elif line["status"] == "Extracting": - description = f"[blue][Extracting {line['id']}]" + elif status == "Extracting": + description = f"[blue][Extracting {layer_id}]" else: # skip other statuses, but show extraction progress @@ -121,7 +130,7 @@ def show_progress(line, progress): ) else: tasks[task_id] = progress.add_task( - description, total=line["progressDetail"]["total"] + description, total=total ) else: if completed: @@ -134,12 +143,12 @@ def show_progress(line, progress): else: progress.update( tasks[task_id], - completed=line["progressDetail"]["current"], - total=line["progressDetail"]["total"], + completed=current, + total=total, ) except Exception as e: - logger.error("There was an error showing the progress bar") - logger.error(e) + if os.environ.get("LOG_LEVEL", "info").lower() == "debug": + logger.exception("There was an error showing the progress bar") # ----------------------------------------------- @@ -240,9 +249,10 @@ def rewrite_bundle_url_if_needed(url): # ----------------------------------------------------------------------------- @shared_task(name="compute_worker_run") def run_wrapper(run_args): + # We need to convert the UUID given by celery into a byte like object otherwise things will break + run_args.update(secret=str(run_args["secret"])) logger.info(f"Received run arguments: \n {colorize_run_args(json.dumps(run_args))}") run = Run(run_args) - try: run.prepare() run.start() @@ -250,12 +260,37 @@ def run_wrapper(run_args): run.push_scores() run.push_output() except DockerImagePullException as e: - run._update_status(STATUS_FAILED, str(e)) - except SubmissionException as e: - run._update_status(STATUS_FAILED, str(e)) + msg = str(e).strip() + if msg: + msg = f"Docker image pull failed: {msg}" + else: + msg = "Docker image pull failed." + run._update_status(STATUS_FAILED, extra_information=msg) + raise except SoftTimeLimitExceeded: - run._update_status(STATUS_FAILED, "Soft time limit exceeded!") + run._update_status( + STATUS_FAILED, + extra_information="Execution time limit exceeded.", + ) + raise + except SubmissionException as e: + msg = str(e).strip() + if msg: + msg = f"Submission failed: {msg}. See logs for more details." + else: + msg = "Submission failed. See logs for more details." + run._update_status(STATUS_FAILED, extra_information=msg) + raise + except Exception as e: + # Catch any exception to avoid getting stuck in Running status + run._update_status(STATUS_FAILED, extra_information=traceback.format_exc()) + raise finally: + try: + # Try to push logs before cleanup + run.push_logs() + except Exception: + logger.exception("push_logs failed") run.clean_up() @@ -294,16 +329,22 @@ def md5(filename): def get_folder_size_in_gb(folder): + # Check if the folder exists; if not, return 0 GB if not os.path.exists(folder): return 0 - total_size = os.path.getsize(folder) - for item in os.listdir(folder): - path = os.path.join(folder, item) - if os.path.isfile(path): - total_size += os.path.getsize(path) - elif os.path.isdir(path): - total_size += get_folder_size_in_gb(path) - return total_size / 1000 / 1000 / 1000 # GB: decimal system (1000^3) + + total_size = 0 # Initialize total size accumulator (in bytes) + + # Walk through the folder and all its subdirectories + for root, dirs, files in os.walk(folder): + for f in files: + # Construct full path to the file + fp = os.path.join(root, f) + # Add the file size to total_size + total_size += os.path.getsize(fp) + + # Convert bytes to gigabytes using decimal system (1 GB = 1000^3 bytes) + return total_size / (1000 ** 3) def delete_files_in_folder(folder): @@ -444,6 +485,22 @@ async def watch_detailed_results(self): if file_path: await self.send_detailed_results(file_path) + def push_logs(self): + """Upload any collected logs, even in case of crash. + """ + try: + for kind, logs in (self.logs or {}).items(): + for stream_key in ("stdout", "stderr"): + entry = logs.get(stream_key) if isinstance(logs, dict) else None + if not entry: + continue + location = entry.get("location") + data = entry.get("data") or b"" + if location: + self._put_file(location, raw_data=data) + except Exception as e: + logger.exception(f"Failed best-effort log upload: {e}") + def get_detailed_results_file_path(self): default_detailed_results_path = os.path.join( self.output_dir, "detailed_results.html" @@ -465,7 +522,7 @@ async def send_detailed_results(self, file_path): ) websocket_url = f"{self.websocket_url}?kind=detailed_results" logger.info(f"Connecting to {websocket_url} for detailed results") - # Wrap this with a Try ... Except otherwise a failure here will make the submission get stuck on Running + # Wrap this with a Try block to avoid getting stuck on Running try: websocket = await asyncio.wait_for( websockets.connect(websocket_url), timeout=30.0 @@ -478,14 +535,8 @@ async def send_detailed_results(self, file_path): ) ) except Exception as e: - logger.error( - f"This error might result in a Execution Time Exceeded error: {e}" - ) - if os.environ.get("LOG_LEVEL", "info").lower() == "debug": - logger.exception(e) - raise SubmissionException( - "Could not connect to instance to update detailed result" - ) + logger.exception(e) + return def _get_stdout_stderr_file_names(self, run_args): # run_args should be the run_args argument passed to __init__ from the run_wrapper. @@ -511,7 +562,7 @@ def _update_submission(self, data): logger.info(f"Updating submission @ {url} with data = {data}") - resp = self.requests_session.patch(url, data, timeout=150) + resp = self.requests_session.patch(url, data=data, timeout=150) if resp.status_code == 200: logger.info("Submission updated successfully!") else: @@ -521,23 +572,17 @@ def _update_submission(self, data): raise SubmissionException("Failure updating submission data.") def _update_status(self, status, extra_information=None): + # Update submission status if status not in AVAILABLE_STATUSES: raise SubmissionException( f"Status '{status}' is not in available statuses: {AVAILABLE_STATUSES}" ) - - data = { - "status": status, - "status_details": extra_information, - } - - # TODO: figure out if we should pull this task code later(submission.task should always be set) - # When we start - # if status == STATUS_SCORING: - # data.update({ - # "task_pk": self.task_pk, - # }) - self._update_submission(data) + data = {"status": status, "status_details": extra_information} + try: + self._update_submission(data) + except Exception as e: + # Always catch exception and never raise error + logger.exception(f"Failed to update submission status to {status}: {e}") def _get_container_image(self, image_name): logger.info("Running pull for image: {}".format(image_name)) @@ -547,6 +592,8 @@ def _get_container_image(self, image_name): with Progress() as progress: resp = client.pull(image_name, stream=True, decode=True) for line in resp: + if isinstance(line, dict) and line.get("error"): + raise DockerImagePullException(line["error"]) show_progress(line, progress) break # Break if the loop is successful to exit "with Progress() as progress" @@ -682,8 +729,13 @@ async def _run_container_engine_cmd(self, container, kind): # Creating this and setting 2 values to None in case there is not enough time for the worker to get logs, otherwise we will have errors later on logs_Unified = [None, None] + # To store on-going logs and avoid empty logs returning to the platform + stdout_chunks = [] + stderr_chunks = [] + # Create a websocket to send the logs in real time to the codabench instance # We need to set a timeout for the websocket connection otherwise the program will get stuck if he websocket does not connect. + websocket = None try: websocket_url = f"{self.websocket_url}?kind={kind}" logger.debug( @@ -730,21 +782,27 @@ async def _run_container_engine_cmd(self, container, kind): "Show the logs and stream them to codabench " + container.get("Id") ) for log in container_LogsDemux: - if str(log[0]) != "None": + # Output + if log[0] is not None: + stdout_chunks.append(log[0]) logger.info(log[0].decode()) try: - await websocket.send( - json.dumps({"kind": kind, "message": log[0].decode()}) - ) + if websocket is not None: + await websocket.send( + json.dumps({"kind": kind, "message": log[0].decode()}) + ) except Exception as e: logger.error(e) - - elif str(log[1]) != "None": + + # Errors + elif log[1] is not None: + stderr_chunks.append(log[1]) logger.error(log[1].decode()) try: - await websocket.send( - json.dumps({"kind": kind, "message": log[1].decode()}) - ) + if websocket is not None: + await websocket.send( + json.dumps({"kind": kind, "message": log[1].decode()}) + ) except Exception as e: logger.error(e) @@ -760,12 +818,17 @@ async def _run_container_engine_cmd(self, container, kind): # Get the return code of the competition container once done try: # Gets the logs of the container, sperating stdout and stderr (first and second position) thanks for demux=True - logs_Unified = client.attach(container, logs=True, demux=True) return_Code = client.wait(container) + logs_Unified = (b"".join(stdout_chunks), b"".join(stderr_chunks)) logger.debug( f"WORKER_MARKER: Disconnecting from {websocket_url}, program counter = {self.completed_program_counter}" ) - await websocket.close() + if websocket is not None: + try: + await websocket.close() + await websocket.wait_closed() + except Exception as e: + logger.error(e) client.remove_container(container, force=True) logger.debug( @@ -783,6 +846,13 @@ async def _run_container_engine_cmd(self, container, kind): logger.error(e) return_Code = {"StatusCode": 1} + finally: + try: + # Last chance of removing container + client.remove_container(container_id, force=True) + except Exception: + pass + self.logs[kind] = { "returncode": return_Code["StatusCode"], "start": start, @@ -1053,9 +1123,8 @@ async def _run_program_directory(self, program_dir, kind): try: return await self._run_container_engine_cmd(container, kind=kind) except Exception as e: - logger.error(e) - if os.environ.get("LOG_LEVEL", "info").lower() == "debug": - logger.exception(e) + logger.exception("Program directory execution failed") + raise SubmissionException(str(e)) def _put_dir(self, url, directory): """Zip the directory and send it to the given URL using _put_file.""" @@ -1097,7 +1166,7 @@ def _put_file(self, url, file=None, raw_data=None, content_type="application/zip logger.info("Putting file %s in %s" % (file, url)) data = open(file, "rb") headers["Content-Length"] = str(os.path.getsize(file)) - elif raw_data: + elif raw_data is not None: logger.info("Putting raw data %s in %s" % (raw_data, url)) data = raw_data else: @@ -1183,21 +1252,23 @@ def start(self): logger.info("Running scoring program, and then ingestion program") loop = asyncio.new_event_loop() + # Set the event loop for the gather + asyncio.set_event_loop(loop) gathered_tasks = asyncio.gather( self._run_program_directory(program_dir, kind="program"), self._run_program_directory(ingestion_program_dir, kind="ingestion"), self.watch_detailed_results(), - loop=loop, return_exceptions=True, ) - task_results = [] # will store results/exceptions from gather signal.signal(signal.SIGALRM, alarm_handler) signal.alarm(self.execution_time_limit) + try: # run tasks # keep what gather returned so we can detect async errors later task_results = loop.run_until_complete(gathered_tasks) or [] + except ExecutionTimeLimitExceeded: error_message = f"Execution Time Limit exceeded. Limit was {self.execution_time_limit} seconds" logger.error(error_message) @@ -1231,8 +1302,25 @@ def start(self): # Send error through web socket to the frontend asyncio.run(self._send_data_through_socket(error_message)) raise SubmissionException(error_message) + finally: + signal.alarm(0) self.watch = False + + # Cancel any remaining pending tasks before closing the loop + pending = [t for t in asyncio.all_tasks(loop) if not t.done()] + for task in pending: + task.cancel() + if pending: + try: + loop.run_until_complete(asyncio.gather(*pending, return_exceptions=True)) + except Exception: + pass + + # Close loop + asyncio.set_event_loop(None) + loop.close() + for kind, logs in self.logs.items(): if logs["end"] is not None: elapsed_time = logs["end"] - logs["start"] @@ -1278,6 +1366,9 @@ def start(self): # set logs of this kind to None, since we handled them already logger.info("Program finished") signal.alarm(0) + # Ensure loop is cleaned up + loop.close() + asyncio.set_event_loop(None) if self.is_scoring: # Check if scoring program failed @@ -1287,7 +1378,7 @@ def start(self): program_results, BaseException ) and not isinstance(program_results, asyncio.CancelledError) program_rc = getattr(self, "program_exit_code", None) - failed_rc = program_rc not in (0, None) + failed_rc = (program_rc is None) or (program_rc != 0) if had_async_exc or failed_rc: self._update_status( STATUS_FAILED, @@ -1296,6 +1387,7 @@ def start(self): # Raise so upstream marks failed immediately raise SubmissionException("Child task failed or non-zero return code") self._update_status(STATUS_FINISHED) + else: self._update_status(STATUS_SCORING) diff --git a/compute_worker/pyproject.toml b/compute_worker/pyproject.toml index fbc8a534a..ea41563ce 100644 --- a/compute_worker/pyproject.toml +++ b/compute_worker/pyproject.toml @@ -2,20 +2,21 @@ authors = [ {name = "codalab"}, ] -requires-python = "==3.9.20" +requires-python = "==3.13.11" dependencies = [ - "celery==5.2.2", + "celery==5.6.2", "requests>=2.32.4,<3", - "watchdog==2.1.1", - "argh==0.26.2", - "websockets==9.1", - "aiofiles==0.4.0", - "pyyaml==6.0.1", + "watchdog==6.0.0", + "argh==0.31.3", + "websockets==16.0.0", + "aiofiles==25.1.0", + "pyyaml==6.0.3", "loguru>=0.7.3,<0.8", "docker>=7.1.0,<8", "rich>=14.2.0,<15", + "setuptools>=82.0.0", ] name = "compute-worker" version = "0.1.0" -description = "" \ No newline at end of file +description = "" diff --git a/compute_worker/uv.lock b/compute_worker/uv.lock index f05387699..6a9f9d311 100644 --- a/compute_worker/uv.lock +++ b/compute_worker/uv.lock @@ -1,14 +1,14 @@ version = 1 revision = 3 -requires-python = "==3.9.20" +requires-python = "==3.13.11" [[package]] name = "aiofiles" -version = "0.4.0" +version = "25.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/c2/e3cb60c1b7d9478203d4514e2d33ea424ad9bb98e45b21d6225db93f25c9/aiofiles-0.4.0.tar.gz", hash = "sha256:021ea0ba314a86027c166ecc4b4c07f2d40fc0f4b3a950d1868a0f2571c2bbee", size = 9270, upload-time = "2018-08-11T17:24:08.5Z" } +sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/f2/a67a23bc0bb61d88f82aa7fb84a2fb5f278becfbdc038c5cbb36c31feaf1/aiofiles-0.4.0-py3-none-any.whl", hash = "sha256:1e644c2573f953664368de28d2aa4c89dfd64550429d0c27c4680ccd3aa4985d", size = 9191, upload-time = "2018-08-11T17:24:07.206Z" }, + { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" }, ] [[package]] @@ -25,25 +25,25 @@ wheels = [ [[package]] name = "argh" -version = "0.26.2" +version = "0.31.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/75/1183b5d1663a66aebb2c184e0398724b624cecd4f4b679cb6e25de97ed15/argh-0.26.2.tar.gz", hash = "sha256:e9535b8c84dc9571a48999094fda7f33e63c3f1b74f3e5f3ac0105a58405bb65", size = 32913, upload-time = "2016-05-11T20:55:36.296Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/34/bc0b3577a818b4b70c6e318d23fe3c81fc3bb25f978ca8a3965cd8ee3af9/argh-0.31.3.tar.gz", hash = "sha256:f30023d8be14ca5ee6b1b3eeab829151d7bbda464ae07dc4dd5347919c5892f9", size = 57570, upload-time = "2024-07-13T17:54:59.729Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/1c/e667a7126f0b84aaa1c56844337bf0ac12445d1beb9c8a6199a7314944bf/argh-0.26.2-py2.py3-none-any.whl", hash = "sha256:a9b3aaa1904eeb78e32394cd46c6f37ac0fb4af6dc488daa58971bdc7d7fcaf3", size = 30872, upload-time = "2016-05-11T20:55:26.893Z" }, + { url = "https://files.pythonhosted.org/packages/d2/52/fcd83710b6f8786df80e5d335882d1b24d1f610f397703e94a6ffb0d6f66/argh-0.31.3-py3-none-any.whl", hash = "sha256:2edac856ff50126f6e47d884751328c9f466bacbbb6cbfdac322053d94705494", size = 44844, upload-time = "2024-07-13T17:54:57.706Z" }, ] [[package]] name = "billiard" -version = "3.6.4.0" +version = "4.2.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/91/40de1901da8ec9eeb7c6a22143ba5d55d8aaa790761ca31342cedcd5c793/billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547", size = 155303, upload-time = "2021-04-01T09:23:50.092Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/23/b12ac0bcdfb7360d664f40a00b1bda139cbbbced012c34e375506dbd0143/billiard-4.2.4.tar.gz", hash = "sha256:55f542c371209e03cd5862299b74e52e4fbcba8250ba611ad94276b369b6a85f", size = 156537, upload-time = "2025-11-30T13:28:48.52Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/89/0c43de91d4e52eaa7bd748771d417f6ac9e51e66b2f61928c2151bf65878/billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b", size = 89472, upload-time = "2021-04-01T09:23:42.019Z" }, + { url = "https://files.pythonhosted.org/packages/cb/87/8bab77b323f16d67be364031220069f79159117dd5e43eeb4be2fef1ac9b/billiard-4.2.4-py3-none-any.whl", hash = "sha256:525b42bdec68d2b983347ac312f892db930858495db601b5836ac24e6477cde5", size = 87070, upload-time = "2025-11-30T13:28:47.016Z" }, ] [[package]] name = "celery" -version = "5.2.2" +version = "5.6.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "billiard" }, @@ -52,59 +52,59 @@ dependencies = [ { name = "click-plugins" }, { name = "click-repl" }, { name = "kombu" }, - { name = "pytz" }, - { name = "setuptools" }, + { name = "python-dateutil" }, + { name = "tzlocal" }, { name = "vine" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/00/ac6f7afbf93f0199c70d6ee4e17082c67a4a27d6ebc57c39f0df04426c15/celery-5.2.2.tar.gz", hash = "sha256:2844eb040e915398623a43253a8e1016723442ece6b0751a3c416d8a2b34216f", size = 1470157, upload-time = "2021-12-26T14:31:59.584Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/9d/3d13596519cfa7207a6f9834f4b082554845eb3cd2684b5f8535d50c7c44/celery-5.6.2.tar.gz", hash = "sha256:4a8921c3fcf2ad76317d3b29020772103581ed2454c4c042cc55dcc43585009b", size = 1718802, upload-time = "2026-01-04T12:35:58.012Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/a9/57e261eb3d95c74faf6a0ab7b21b4a875bfe3f71d60b1788ddafdef71f37/celery-5.2.2-py3-none-any.whl", hash = "sha256:5a68a351076cfac4f678fa5ffd898105c28825a2224902da006970005196d061", size = 405080, upload-time = "2021-12-26T14:31:56.465Z" }, + { url = "https://files.pythonhosted.org/packages/dd/bd/9ecd619e456ae4ba73b6583cc313f26152afae13e9a82ac4fe7f8856bfd1/celery-5.6.2-py3-none-any.whl", hash = "sha256:3ffafacbe056951b629c7abcf9064c4a2366de0bdfc9fdba421b97ebb68619a5", size = 445502, upload-time = "2026-01-04T12:35:55.894Z" }, ] [[package]] name = "certifi" -version = "2026.1.4" +version = "2026.2.25" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, ] [[package]] name = "charset-normalizer" -version = "3.4.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/46/7c/0c4760bccf082737ca7ab84a4c2034fcc06b1f21cf3032ea98bd6feb1725/charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9", size = 209609, upload-time = "2025-10-14T04:42:10.922Z" }, - { url = "https://files.pythonhosted.org/packages/bb/a4/69719daef2f3d7f1819de60c9a6be981b8eeead7542d5ec4440f3c80e111/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d", size = 149029, upload-time = "2025-10-14T04:42:12.38Z" }, - { url = "https://files.pythonhosted.org/packages/e6/21/8d4e1d6c1e6070d3672908b8e4533a71b5b53e71d16828cc24d0efec564c/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608", size = 144580, upload-time = "2025-10-14T04:42:13.549Z" }, - { url = "https://files.pythonhosted.org/packages/a7/0a/a616d001b3f25647a9068e0b9199f697ce507ec898cacb06a0d5a1617c99/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc", size = 162340, upload-time = "2025-10-14T04:42:14.892Z" }, - { url = "https://files.pythonhosted.org/packages/85/93/060b52deb249a5450460e0585c88a904a83aec474ab8e7aba787f45e79f2/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e", size = 159619, upload-time = "2025-10-14T04:42:16.676Z" }, - { url = "https://files.pythonhosted.org/packages/dd/21/0274deb1cc0632cd587a9a0ec6b4674d9108e461cb4cd40d457adaeb0564/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1", size = 153980, upload-time = "2025-10-14T04:42:17.917Z" }, - { url = "https://files.pythonhosted.org/packages/28/2b/e3d7d982858dccc11b31906976323d790dded2017a0572f093ff982d692f/charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3", size = 152174, upload-time = "2025-10-14T04:42:19.018Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ff/4a269f8e35f1e58b2df52c131a1fa019acb7ef3f8697b7d464b07e9b492d/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6", size = 151666, upload-time = "2025-10-14T04:42:20.171Z" }, - { url = "https://files.pythonhosted.org/packages/da/c9/ec39870f0b330d58486001dd8e532c6b9a905f5765f58a6f8204926b4a93/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88", size = 145550, upload-time = "2025-10-14T04:42:21.324Z" }, - { url = "https://files.pythonhosted.org/packages/75/8f/d186ab99e40e0ed9f82f033d6e49001701c81244d01905dd4a6924191a30/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1", size = 163721, upload-time = "2025-10-14T04:42:22.46Z" }, - { url = "https://files.pythonhosted.org/packages/96/b1/6047663b9744df26a7e479ac1e77af7134b1fcf9026243bb48ee2d18810f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf", size = 152127, upload-time = "2025-10-14T04:42:23.712Z" }, - { url = "https://files.pythonhosted.org/packages/59/78/e5a6eac9179f24f704d1be67d08704c3c6ab9f00963963524be27c18ed87/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318", size = 161175, upload-time = "2025-10-14T04:42:24.87Z" }, - { url = "https://files.pythonhosted.org/packages/e5/43/0e626e42d54dd2f8dd6fc5e1c5ff00f05fbca17cb699bedead2cae69c62f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c", size = 155375, upload-time = "2025-10-14T04:42:27.246Z" }, - { url = "https://files.pythonhosted.org/packages/e9/91/d9615bf2e06f35e4997616ff31248c3657ed649c5ab9d35ea12fce54e380/charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505", size = 99692, upload-time = "2025-10-14T04:42:28.425Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a9/6c040053909d9d1ef4fcab45fddec083aedc9052c10078339b47c8573ea8/charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966", size = 107192, upload-time = "2025-10-14T04:42:29.482Z" }, - { url = "https://files.pythonhosted.org/packages/f0/c6/4fa536b2c0cd3edfb7ccf8469fa0f363ea67b7213a842b90909ca33dd851/charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50", size = 100220, upload-time = "2025-10-14T04:42:30.632Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +version = "3.4.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/35/02daf95b9cd686320bb622eb148792655c9412dbb9b67abb5694e5910a24/charset_normalizer-3.4.5.tar.gz", hash = "sha256:95adae7b6c42a6c5b5b559b1a99149f090a57128155daeea91732c8d970d8644", size = 134804, upload-time = "2026-03-06T06:03:19.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/48/9f34ec4bb24aa3fdba1890c1bddb97c8a4be1bd84ef5c42ac2352563ad05/charset_normalizer-3.4.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ac59c15e3f1465f722607800c68713f9fbc2f672b9eb649fe831da4019ae9b23", size = 280788, upload-time = "2026-03-06T06:01:37.126Z" }, + { url = "https://files.pythonhosted.org/packages/0e/09/6003e7ffeb90cc0560da893e3208396a44c210c5ee42efff539639def59b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:165c7b21d19365464e8f70e5ce5e12524c58b48c78c1f5a57524603c1ab003f8", size = 188890, upload-time = "2026-03-06T06:01:38.73Z" }, + { url = "https://files.pythonhosted.org/packages/42/1e/02706edf19e390680daa694d17e2b8eab4b5f7ac285e2a51168b4b22ee6b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:28269983f25a4da0425743d0d257a2d6921ea7d9b83599d4039486ec5b9f911d", size = 206136, upload-time = "2026-03-06T06:01:40.016Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/942c3def1b37baf3cf786bad01249190f3ca3d5e63a84f831e704977de1f/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d27ce22ec453564770d29d03a9506d449efbb9fa13c00842262b2f6801c48cce", size = 202551, upload-time = "2026-03-06T06:01:41.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/0a/af49691938dfe175d71b8a929bd7e4ace2809c0c5134e28bc535660d5262/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0625665e4ebdddb553ab185de5db7054393af8879fb0c87bd5690d14379d6819", size = 195572, upload-time = "2026-03-06T06:01:43.208Z" }, + { url = "https://files.pythonhosted.org/packages/20/ea/dfb1792a8050a8e694cfbde1570ff97ff74e48afd874152d38163d1df9ae/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:c23eb3263356d94858655b3e63f85ac5d50970c6e8febcdde7830209139cc37d", size = 184438, upload-time = "2026-03-06T06:01:44.755Z" }, + { url = "https://files.pythonhosted.org/packages/72/12/c281e2067466e3ddd0595bfaea58a6946765ace5c72dfa3edc2f5f118026/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e6302ca4ae283deb0af68d2fbf467474b8b6aedcd3dab4db187e07f94c109763", size = 193035, upload-time = "2026-03-06T06:01:46.051Z" }, + { url = "https://files.pythonhosted.org/packages/ba/4f/3792c056e7708e10464bad0438a44708886fb8f92e3c3d29ec5e2d964d42/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e51ae7d81c825761d941962450f50d041db028b7278e7b08930b4541b3e45cb9", size = 191340, upload-time = "2026-03-06T06:01:47.547Z" }, + { url = "https://files.pythonhosted.org/packages/e7/86/80ddba897127b5c7a9bccc481b0cd36c8fefa485d113262f0fe4332f0bf4/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:597d10dec876923e5c59e48dbd366e852eacb2b806029491d307daea6b917d7c", size = 185464, upload-time = "2026-03-06T06:01:48.764Z" }, + { url = "https://files.pythonhosted.org/packages/4d/00/b5eff85ba198faacab83e0e4b6f0648155f072278e3b392a82478f8b988b/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5cffde4032a197bd3b42fd0b9509ec60fb70918d6970e4cc773f20fc9180ca67", size = 208014, upload-time = "2026-03-06T06:01:50.371Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/d36f70be01597fd30850dde8a1269ebc8efadd23ba5785808454f2389bde/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2da4eedcb6338e2321e831a0165759c0c620e37f8cd044a263ff67493be8ffb3", size = 193297, upload-time = "2026-03-06T06:01:51.933Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1d/259eb0a53d4910536c7c2abb9cb25f4153548efb42800c6a9456764649c0/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:65a126fb4b070d05340a84fc709dd9e7c75d9b063b610ece8a60197a291d0adf", size = 204321, upload-time = "2026-03-06T06:01:53.887Z" }, + { url = "https://files.pythonhosted.org/packages/84/31/faa6c5b9d3688715e1ed1bb9d124c384fe2fc1633a409e503ffe1c6398c1/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7a80a9242963416bd81f99349d5f3fce1843c303bd404f204918b6d75a75fd6", size = 197509, upload-time = "2026-03-06T06:01:56.439Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a5/c7d9dd1503ffc08950b3260f5d39ec2366dd08254f0900ecbcf3a6197c7c/charset_normalizer-3.4.5-cp313-cp313-win32.whl", hash = "sha256:f1d725b754e967e648046f00c4facc42d414840f5ccc670c5670f59f83693e4f", size = 132284, upload-time = "2026-03-06T06:01:57.812Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0f/57072b253af40c8aa6636e6de7d75985624c1eb392815b2f934199340a89/charset_normalizer-3.4.5-cp313-cp313-win_amd64.whl", hash = "sha256:e37bd100d2c5d3ba35db9c7c5ba5a9228cbcffe5c4778dc824b164e5257813d7", size = 142630, upload-time = "2026-03-06T06:01:59.062Z" }, + { url = "https://files.pythonhosted.org/packages/31/41/1c4b7cc9f13bd9d369ce3bc993e13d374ce25fa38a2663644283ecf422c1/charset_normalizer-3.4.5-cp313-cp313-win_arm64.whl", hash = "sha256:93b3b2cc5cf1b8743660ce77a4f45f3f6d1172068207c1defc779a36eea6bb36", size = 133254, upload-time = "2026-03-06T06:02:00.281Z" }, + { url = "https://files.pythonhosted.org/packages/c5/60/3a621758945513adfd4db86827a5bafcc615f913dbd0b4c2ed64a65731be/charset_normalizer-3.4.5-py3-none-any.whl", hash = "sha256:9db5e3fcdcee89a78c04dffb3fe33c79f77bd741a624946db2591c81b2fc85b0", size = 55455, upload-time = "2026-03-06T06:03:17.827Z" }, ] [[package]] name = "click" -version = "8.1.8" +version = "8.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, ] [[package]] @@ -166,22 +166,24 @@ dependencies = [ { name = "pyyaml" }, { name = "requests" }, { name = "rich" }, + { name = "setuptools" }, { name = "watchdog" }, { name = "websockets" }, ] [package.metadata] requires-dist = [ - { name = "aiofiles", specifier = "==0.4.0" }, - { name = "argh", specifier = "==0.26.2" }, - { name = "celery", specifier = "==5.2.2" }, + { name = "aiofiles", specifier = "==25.1.0" }, + { name = "argh", specifier = "==0.31.3" }, + { name = "celery", specifier = "==5.6.2" }, { name = "docker", specifier = ">=7.1.0,<8" }, { name = "loguru", specifier = ">=0.7.3,<0.8" }, - { name = "pyyaml", specifier = "==6.0.1" }, + { name = "pyyaml", specifier = "==6.0.3" }, { name = "requests", specifier = ">=2.32.4,<3" }, { name = "rich", specifier = ">=14.2.0,<15" }, - { name = "watchdog", specifier = "==2.1.1" }, - { name = "websockets", specifier = "==9.1" }, + { name = "setuptools", specifier = ">=82.0.0" }, + { name = "watchdog", specifier = "==6.0.0" }, + { name = "websockets", specifier = "==16.0.0" }, ] [[package]] @@ -237,14 +239,14 @@ wheels = [ [[package]] name = "markdown-it-py" -version = "3.0.0" +version = "4.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] [[package]] @@ -287,12 +289,15 @@ wheels = [ ] [[package]] -name = "pytz" -version = "2025.2" +name = "python-dateutil" +version = "2.9.0.post0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] [[package]] @@ -300,25 +305,27 @@ name = "pywin32" version = "311" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/59/42/b86689aac0cdaee7ae1c58d464b0ff04ca909c19bb6502d4973cdd9f9544/pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b", size = 8760837, upload-time = "2025-07-14T20:12:59.59Z" }, - { url = "https://files.pythonhosted.org/packages/9f/8a/1403d0353f8c5a2f0829d2b1c4becbf9da2f0a4d040886404fc4a5431e4d/pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91", size = 9590187, upload-time = "2025-07-14T20:13:01.419Z" }, - { url = "https://files.pythonhosted.org/packages/60/22/e0e8d802f124772cec9c75430b01a212f86f9de7546bda715e54140d5aeb/pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d", size = 8778162, upload-time = "2025-07-14T20:13:03.544Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/e5/af35f7ea75cf72f2cd079c95ee16797de7cd71f29ea7c68ae5ce7be1eda0/PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43", size = 125201, upload-time = "2023-07-18T00:00:23.308Z" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/c5/5d09b66b41d549914802f482a2118d925d876dc2a35b2d127694c1345c34/PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8", size = 197846, upload-time = "2023-07-17T23:59:46.424Z" }, - { url = "https://files.pythonhosted.org/packages/0e/88/21b2f16cb2123c1e9375f2c93486e35fdc86e63f02e274f0e99c589ef153/PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859", size = 174396, upload-time = "2023-07-17T23:59:49.538Z" }, - { url = "https://files.pythonhosted.org/packages/ac/6c/967d91a8edf98d2b2b01d149bd9e51b8f9fb527c98d80ebb60c6b21d60c4/PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6", size = 731824, upload-time = "2023-07-17T23:59:58.111Z" }, - { url = "https://files.pythonhosted.org/packages/4a/4b/c71ef18ef83c82f99e6da8332910692af78ea32bd1d1d76c9787dfa36aea/PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0", size = 754777, upload-time = "2023-07-18T00:00:06.716Z" }, - { url = "https://files.pythonhosted.org/packages/7d/39/472f2554a0f1e825bd7c5afc11c817cd7a2f3657460f7159f691fbb37c51/PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c", size = 738883, upload-time = "2023-07-18T00:00:14.423Z" }, - { url = "https://files.pythonhosted.org/packages/40/da/a175a35cf5583580e90ac3e2a3dbca90e43011593ae62ce63f79d7b28d92/PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", size = 750294, upload-time = "2023-08-28T18:43:37.153Z" }, - { url = "https://files.pythonhosted.org/packages/24/62/7fcc372442ec8ea331da18c24b13710e010c5073ab851ef36bf9dacb283f/PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c", size = 136936, upload-time = "2023-07-18T00:00:17.167Z" }, - { url = "https://files.pythonhosted.org/packages/84/4d/82704d1ab9290b03da94e6425f5e87396b999fd7eb8e08f3a92c158402bf/PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486", size = 152751, upload-time = "2023-07-18T00:00:19.939Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, ] [[package]] @@ -338,24 +345,33 @@ wheels = [ [[package]] name = "rich" -version = "14.3.2" +version = "14.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/74/99/a4cab2acbb884f80e558b0771e97e21e939c5dfb460f488d19df485e8298/rich-14.3.2.tar.gz", hash = "sha256:e712f11c1a562a11843306f5ed999475f09ac31ffb64281f73ab29ffdda8b3b8", size = 230143, upload-time = "2026-02-01T16:20:47.908Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/45/615f5babd880b4bd7d405cc0dc348234c5ffb6ed1ea33e152ede08b2072d/rich-14.3.2-py3-none-any.whl", hash = "sha256:08e67c3e90884651da3239ea668222d19bea7b589149d8014a21c633420dbb69", size = 309963, upload-time = "2026-02-01T16:20:46.078Z" }, + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, ] [[package]] name = "setuptools" -version = "80.10.2" +version = "82.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/db/cfac1baf10650ab4d1c111714410d2fbb77ac5a616db26775db562c8fab2/setuptools-82.0.1.tar.gz", hash = "sha256:7d872682c5d01cfde07da7bccc7b65469d3dca203318515ada1de5eda35efbf9", size = 1152316, upload-time = "2026-03-09T12:47:17.221Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/76/f789f7a86709c6b087c5a2f52f911838cad707cc613162401badc665acfe/setuptools-82.0.1-py3-none-any.whl", hash = "sha256:a59e362652f08dcd477c78bb6e7bd9d80a7995bc73ce773050228a348ce2e5bb", size = 1006223, upload-time = "2026-03-09T12:47:15.026Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/95/faf61eb8363f26aa7e1d762267a8d602a1b26d4f3a1e758e92cb3cb8b054/setuptools-80.10.2.tar.gz", hash = "sha256:8b0e9d10c784bf7d262c4e5ec5d4ec94127ce206e8738f29a437945fbc219b70", size = 1200343, upload-time = "2026-01-25T22:38:17.252Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/b8/f1f62a5e3c0ad2ff1d189590bfa4c46b4f3b6e49cef6f26c6ee4e575394d/setuptools-80.10.2-py3-none-any.whl", hash = "sha256:95b30ddfb717250edb492926c92b5221f7ef3fbcc2b07579bcd4a27da21d0173", size = 1064234, upload-time = "2026-01-25T22:38:15.216Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] [[package]] @@ -367,6 +383,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, ] +[[package]] +name = "tzlocal" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" }, +] + [[package]] name = "urllib3" version = "2.6.3" @@ -387,46 +415,50 @@ wheels = [ [[package]] name = "watchdog" -version = "2.1.1" +version = "6.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/7c/82cafb290b818c331192e52609af8d41c34b55f468957ba5bd0a8d2cb776/watchdog-2.1.1.tar.gz", hash = "sha256:2894440b4ea95a6ef4c5d152deedbe270cae46092682710b7028a04d6a6980f6", size = 105882, upload-time = "2021-05-10T13:51:02.629Z" } +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/85/655e322ef55e4c98672e7f003fa95dff5b58149c50465b93ef947b31be90/watchdog-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22c13c19599b0dec7192f8f7d26404d5223cb36c9a450e96430483e685dccd7e", size = 83829, upload-time = "2021-05-10T13:50:42.16Z" }, - { url = "https://files.pythonhosted.org/packages/bc/27/6f250c6a534b8811dcd0efcd5f9574dd2b8c077571aa66544947b0b76d0c/watchdog-2.1.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:aa59afc87a892ed92d7d88d09f4b736f1336fc35540b403da7ee00c3be74bd07", size = 74713, upload-time = "2021-05-10T13:50:46.671Z" }, - { url = "https://files.pythonhosted.org/packages/ad/5f/3813b46bd10efa4870798850af2685cc3cce6e989d5a3397df0ee1cfe0e6/watchdog-2.1.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:a1b3f76e2a0713b406348dd5b9df2aa02bdd741a6ddf54f4c6410b395e077502", size = 74715, upload-time = "2021-05-10T13:50:48.086Z" }, - { url = "https://files.pythonhosted.org/packages/81/29/6abfafa77d6c6ead43ba92fd5537fd2fc33f7dc916460457d06cba45a398/watchdog-2.1.1-py3-none-manylinux2014_i686.whl", hash = "sha256:9f1b124fe2d4a1f37b7068f6289c2b1eba44859eb790bf6bd709adff224a5469", size = 74710, upload-time = "2021-05-10T13:50:49.983Z" }, - { url = "https://files.pythonhosted.org/packages/34/87/5298db4bf7bef093254bedd48fee54a3fc63f30185b0f2359d0bb8f213a4/watchdog-2.1.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:a9005f968220b715101d5fcdde5f5deda54f0d1873f618724f547797171f5e97", size = 74711, upload-time = "2021-05-10T13:50:51.297Z" }, - { url = "https://files.pythonhosted.org/packages/0d/5a/629a27cdb7c76744402598c76c0170d9298ecb61bb9238143811d1089897/watchdog-2.1.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:027c532e2fd3367d55fe235510fc304381a6cc88d0dcd619403e57ffbd83c1d2", size = 74714, upload-time = "2021-05-10T13:50:52.802Z" }, - { url = "https://files.pythonhosted.org/packages/fb/ef/5d2a010e6dea46260103a0615e7f433833a37dcee851e743cbf039a85b82/watchdog-2.1.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:4d83c89ba24bd67b7a7d5752a4ef953ec40db69d4d30582bd1f27d3ecb6b61b0", size = 74714, upload-time = "2021-05-10T13:50:54.87Z" }, - { url = "https://files.pythonhosted.org/packages/58/f6/6b538562aaa62294ca0a1d18b59d9fcb1a43fe166fa5b3a258f445d64119/watchdog-2.1.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:67c645b1e500cc74d550e9aad4829309c5084dc55e8dc4e1c25d5da23e5be239", size = 74714, upload-time = "2021-05-10T13:50:56.916Z" }, - { url = "https://files.pythonhosted.org/packages/73/8f/cb225682a508d2c43f6a1f4cfe274a75a2e156ec2d73af055d5d47542f26/watchdog-2.1.1-py3-none-win32.whl", hash = "sha256:12645d41d7307601b318c48861e776ce7a9fdcad9f74961013ec39037050582c", size = 74698, upload-time = "2021-05-10T13:50:58.739Z" }, - { url = "https://files.pythonhosted.org/packages/85/64/592c43792c0f6b18b031d5944ba1c36d4a2775d72cb06c4088050ab4834a/watchdog-2.1.1-py3-none-win_amd64.whl", hash = "sha256:16078cd241a95124acd4d8d3efba2140faec9300674b12413cc08be11b825d56", size = 74701, upload-time = "2021-05-10T13:50:59.987Z" }, - { url = "https://files.pythonhosted.org/packages/49/5c/69074682ce2e5dbe701755d778cdf5480448462e901ea088cc7e6e2c8261/watchdog-2.1.1-py3-none-win_ia64.whl", hash = "sha256:20d4cabfa2ad7239995d81a0163bc0264a3e104a64f33c6f0a21ad75a0d915d9", size = 74701, upload-time = "2021-05-10T13:51:01.299Z" }, + { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" }, + { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] [[package]] name = "wcwidth" -version = "0.5.3" +version = "0.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c2/62/a7c072fbfefb2980a00f99ca994279cb9ecf310cb2e6b2a4d2a28fe192b3/wcwidth-0.5.3.tar.gz", hash = "sha256:53123b7af053c74e9fe2e92ac810301f6139e64379031f7124574212fb3b4091", size = 157587, upload-time = "2026-01-31T03:52:10.92Z" } +sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/c1/d73f12f8cdb1891334a2ccf7389eed244d3941e74d80dd220badb937f3fb/wcwidth-0.5.3-py3-none-any.whl", hash = "sha256:d584eff31cd4753e1e5ff6c12e1edfdb324c995713f75d26c29807bb84bf649e", size = 92981, upload-time = "2026-01-31T03:52:09.14Z" }, + { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, ] [[package]] name = "websockets" -version = "9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0d/bd/5262054455ab2067e51de331bfbc53a1dfa9071af7c424cf7c0793c4349a/websockets-9.1.tar.gz", hash = "sha256:276d2339ebf0df4f45df453923ebd2270b87900eda5dfd4a6b0cfa15f82111c3", size = 76694, upload-time = "2021-05-27T19:34:30.628Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/b3/0ff0676cb0043bc85f4cb548733a37b5e7e9b82fe253edd0f5d173b2ec43/websockets-9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e9e5fd6dbdf95d99bc03732ded1fc8ef22ebbc05999ac7e0c7bf57fe6e4e5ae2", size = 88721, upload-time = "2021-05-27T19:34:22.69Z" }, - { url = "https://files.pythonhosted.org/packages/46/e7/ebbe5d8ce59c77b59a13551fe0103c73131fab0b7cfb2fdc10b58f252e13/websockets-9.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9e7fdc775fe7403dbd8bc883ba59576a6232eac96dacb56512daacf7af5d618d", size = 101297, upload-time = "2021-05-27T19:34:23.713Z" }, - { url = "https://files.pythonhosted.org/packages/96/d0/3e5beec93673fc4526e118f45e56df8a292b6f009002675614f4e3dfcf3a/websockets-9.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:597c28f3aa7a09e8c070a86b03107094ee5cdafcc0d55f2f2eac92faac8dc67d", size = 102182, upload-time = "2021-05-27T19:34:24.705Z" }, - { url = "https://files.pythonhosted.org/packages/18/9c/3334655fd3eb93fe3b728203db354711cec48044c2d7b8bfbde9ea0ffc5d/websockets-9.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:ad893d889bc700a5835e0a95a3e4f2c39e91577ab232a3dc03c262a0f8fc4b5c", size = 101299, upload-time = "2021-05-27T19:34:25.696Z" }, - { url = "https://files.pythonhosted.org/packages/c8/31/bb59de44c04c5d81083a5ee93eeb3e9b30d32aa8f6b72080eb5da4bb73f7/websockets-9.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:1d6b4fddb12ab9adf87b843cd4316c4bd602db8d5efd2fb83147f0458fe85135", size = 102182, upload-time = "2021-05-27T19:34:26.602Z" }, - { url = "https://files.pythonhosted.org/packages/21/9b/112c8439c718432fd3fb1f6fa56050767a70ad977712008cec036d061a7a/websockets-9.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:ebf459a1c069f9866d8569439c06193c586e72c9330db1390af7c6a0a32c4afd", size = 102252, upload-time = "2021-05-27T19:34:27.549Z" }, - { url = "https://files.pythonhosted.org/packages/d6/c9/8d3e0904e624b1b83f1939170ed28002c971dcf960093eb2154af8408b67/websockets-9.1-cp39-cp39-win32.whl", hash = "sha256:be5fd35e99970518547edc906efab29afd392319f020c3c58b0e1a158e16ed20", size = 89556, upload-time = "2021-05-27T19:34:28.646Z" }, - { url = "https://files.pythonhosted.org/packages/d7/d7/cd60ce74675402998e285f9f54baf86c80daaa473e557c92f53c01b10f2b/websockets-9.1-cp39-cp39-win_amd64.whl", hash = "sha256:85db8090ba94e22d964498a47fdd933b8875a1add6ebc514c7ac8703eb97bbf0", size = 90217, upload-time = "2021-05-27T19:34:29.707Z" }, +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] [[package]] diff --git a/docker-compose.yml b/docker-compose.yml index 46c1783de..de8bfe42e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,7 +3,7 @@ services: # Web Services #---------------------------------------------------------------------------------------------------- caddy: - image: caddy:2.10.0 + image: caddy:2.11.1 env_file: .env environment: - ACME_AGREE=true @@ -22,9 +22,11 @@ services: - django django: + container_name: django build: context: . dockerfile: packaging/container/Containerfile + image: django_site-worker # NOTE: We use watchmedo to reload gunicorn nicely, Uvicorn + Gunicorn reloads don't work well command: ["python manage.py migrate --no-input && python manage.py collectstatic --no-input && cd /app/src && watchmedo auto-restart -p '*.py' --recursive -- python3 ./gunicorn_run.py"] environment: @@ -95,6 +97,7 @@ services: # Local development helper, rebuilds RiotJS/Stylus on change #---------------------------------------------------------------------------------------------------- builder: + container_name: builder build: context: . dockerfile: packaging/container/Containerfile.builder @@ -115,6 +118,7 @@ services: #---------------------------------------------------------------------------------------------------- db: image: postgres:18-alpine + container_name: db env_file: .env environment: - PGDATA=/var/lib/postgresql/18/docker @@ -136,6 +140,7 @@ services: # Rabbitmq & Flower monitoring tool #---------------------------------------------------------------------------------------------------- rabbit: + container_name: rabbit build: context: . dockerfile: packaging/container/Containerfile.rabbitmq @@ -161,6 +166,7 @@ services: max-file: "5" flower: + container_name: flower image: mher/flower env_file: .env environment: @@ -179,6 +185,7 @@ services: # Redis #---------------------------------------------------------------------------------------------------- redis: + container_name: redis image: redis ports: - 6379:6379 @@ -195,9 +202,8 @@ services: # This auto-reloads command: ["watchmedo auto-restart -p '*.py' --recursive -- celery -A celery_config worker -B -Q site-worker -l info -n site-worker@%n --concurrency=2"] working_dir: /app/src - build: - context: . - dockerfile: packaging/container/Containerfile + container_name: site_worker + image: django_site-worker depends_on: - rabbit - db @@ -219,6 +225,7 @@ services: compute_worker: command: ["celery -A compute_worker worker -l info -Q compute-worker -n compute-worker@%n"] working_dir: /app + container_name: compute_worker build: context: . dockerfile: packaging/container/Containerfile.compute_worker diff --git a/documentation/docs/Developers_and_Administrators/How-to-deploy-Codabench-on-your-server.md b/documentation/docs/Developers_and_Administrators/How-to-deploy-Codabench-on-your-server.md index fb2895b7d..aa11532d4 100644 --- a/documentation/docs/Developers_and_Administrators/How-to-deploy-Codabench-on-your-server.md +++ b/documentation/docs/Developers_and_Administrators/How-to-deploy-Codabench-on-your-server.md @@ -439,4 +439,7 @@ Caddyfile : path /{$AWS_STORAGE_BUCKET_NAME}* /{$AWS_STORAGE_PRIVATE_BUCKET_NAME}* } reverse_proxy @min_bucket minio:{$MINIO_PORT} -``` \ No newline at end of file +``` + +## Codabench Instance behind a reverse proxy +If you put your instance behind a reverse proxy and want that proxy to contact the instance via http or https, your `DOMAIN_NAME` might not be reachable from the outside. In this case, you can set `DOMAIN_NAME` as your internal domain name, used by the reverse proxy, and `EXTERNAL_DOMAIN_NAME` as the domain name that is known on external networks (like the internet). \ No newline at end of file diff --git a/documentation/docs/Developers_and_Administrators/Upgrading_Codabench/Postgres-18.md b/documentation/docs/Developers_and_Administrators/Upgrading_Codabench/Postgres-18.md new file mode 100644 index 000000000..a872e8629 --- /dev/null +++ b/documentation/docs/Developers_and_Administrators/Upgrading_Codabench/Postgres-18.md @@ -0,0 +1,66 @@ +!!! note "After upgrading from Codabench <1.24.0, you will need to perform important manual interventions." + +## Rabbit +We also need to log into the RabbitMQ interface and enable the flags it wants us to enable after upgrading. + +RabbitMQ port, username and password to access the interface are defined in the `.env` file. + +![Rabbit-screenshot](../_attachments/554725936-6659efd8-e953-42dd-b885-629c41beb1c7.png) + +More information about feature flags [here](https://www.rabbitmq.com/docs/feature-flags) + + +## Database (Postgres 12 -> 18) +### 1. Maintenance mode on to avoid update on the database during the upgrade: + +```sh +touch maintenance_mode/maintenance.on +git pull +``` + +### 2. Create the new `postgres.conf` file from the sample: + +```sh +cp my-postgres_sample.conf my-postgres.conf +``` + +### 3. Rebuild docker containers to take into account the new images: + +```sh +docker compose build --no-cache +``` + +### 4. Dump the database, remove it and reload it on the new configuration: + +```sh +# Dump database +docker compose exec db bash -lc 'PGPASSWORD="$DB_PASSWORD" pg_dump -Fc -U "$DB_USERNAME" -d "$DB_NAME" -f /app/backups/upgrade-1.24.dump' +``` + +```sh +# Check that dump file is not empty +docker compose exec db bash -lc 'ls -lh /app/backups/upgrade-1.24.dump && pg_restore -l /app/backups/upgrade-1.24.dump | head' +``` + +**/!\ Dangerous operation here: confirm that your dump worked before removing the database!** + +```sh +# Remove database +sudo rm -rf var/postgres +``` + +```sh +# Launch the new containers (containing the updated databse image and Restore from backup) +docker compose up -d db +docker compose exec db bash -lc 'PGPASSWORD="$DB_PASSWORD" pg_restore --verbose --clean --no-acl --no-owner -h $DB_HOST -U "$DB_USERNAME" -d "$DB_NAME" /app/backups/upgrade-1.24.dump' +``` + +_See [this](https://www.postgresql.org/docs/18/upgrading.html) for more details._ + +### 5 Restart the rest of the services and disable maintenance mode: + +```sh +docker compose up -d +rm maintenance_mode/maintenance.on +``` + diff --git a/documentation/docs/Developers_and_Administrators/_attachments/554725936-6659efd8-e953-42dd-b885-629c41beb1c7.png b/documentation/docs/Developers_and_Administrators/_attachments/554725936-6659efd8-e953-42dd-b885-629c41beb1c7.png new file mode 100644 index 000000000..d383aabd8 Binary files /dev/null and b/documentation/docs/Developers_and_Administrators/_attachments/554725936-6659efd8-e953-42dd-b885-629c41beb1c7.png differ diff --git a/documentation/docs/Organizers/Benchmark_Creation/Benchmark-Examples.md b/documentation/docs/Organizers/Benchmark_Creation/Benchmark-Examples.md index 6b8c9fe6c..3c82874b5 100644 --- a/documentation/docs/Organizers/Benchmark_Creation/Benchmark-Examples.md +++ b/documentation/docs/Organizers/Benchmark_Creation/Benchmark-Examples.md @@ -11,6 +11,7 @@ More details are given below, to help you select the bundle that suits your own ### Classify Wheat Seeds We propose three versions of the [Classify Wheat Seeds](https://github.com/codalab/competition-examples/tree/master/codabench/wheat_seeds): + - [Result submission bundle](https://github.com/codalab/competition-examples/tree/master/codabench/wheat_seeds/results_submission_bundle), with simple submission of predictions - [Code submission bundle](https://github.com/codalab/competition-examples/tree/master/codabench/wheat_seeds/code_submission_bundle), with submission of Python algorithm - [Ingestion during scoring bundle](https://github.com/codalab/competition-examples/tree/master/codabench/wheat_seeds/ingestion_during_scoring_bundle), where ingestion and scoring run in parallel @@ -22,9 +23,10 @@ We propose three versions of the [Classify Wheat Seeds](https://github.com/codal ### AutoWSL You can find two versions of the [Automated Weakly Supervised Learning Benchmark](https://github.com/codalab/competition-examples/tree/master/codabench/autowsl): + - [Code submission benchmark](https://github.com/codalab/competition-examples/tree/master/codabench/autowsl/code_submission) - [Dataset submission benchmark](https://github.com/codalab/competition-examples/tree/master/codabench/autowsl/dataset_submission) ### GPU test -[GPU test bundle](https://github.com/codalab/competition-examples/tree/master/codabench/example_GPU) is an example bundle to test if GPUs are available or not. It serves as testing compute workers and does **not** contain any problem to solve. \ No newline at end of file +[GPU test bundle](https://github.com/codalab/competition-examples/tree/master/codabench/example_GPU) is an example bundle to test if GPUs are available or not. It serves as testing compute workers and does **not** contain any problem to solve. diff --git a/documentation/docs/Organizers/Running_a_benchmark/Compute-Worker-Management---Setup.md b/documentation/docs/Organizers/Running_a_benchmark/Compute-Worker-Management---Setup.md index fdf9e9e70..5735af914 100644 --- a/documentation/docs/Organizers/Running_a_benchmark/Compute-Worker-Management---Setup.md +++ b/documentation/docs/Organizers/Running_a_benchmark/Compute-Worker-Management---Setup.md @@ -27,11 +27,11 @@ b) Install manually, following the steps at: https://docs.docker.com/install/ ## Pull Compute Worker Image On the compute worker machine, run the following command in a shell: ```bash -docker pull codalab/competitions-v2-compute-worker +docker pull codalab/codabench-compute-worker ``` That will pull the latest image for the v2 worker. For specific versions, see the docker hub page at: -https://hub.docker.com/r/codalab/competitions-v2-compute-worker/tags +https://hub.docker.com/r/codalab/codabench-compute-worker/tags ## Start CPU worker @@ -108,21 +108,6 @@ You can then launch the worker by running this command in the terminal where the docker compose up -d ``` -### Deprecated method (one liner) -Alternately, you can use the docker run below: -```bash -docker run \ - -v /codabench:/codabench \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -d \ - --env-file .env \ - --name compute_worker \ - --restart unless-stopped \ - --log-opt max-size=50m \ - --log-opt max-file=3 \ - codalab/competitions-v2-compute-worker:latest -``` - ## Start GPU worker Make a `.env` file, as explained in CPU worker instructions. @@ -163,23 +148,6 @@ You can then launch the worker by running this command in the terminal where the docker compose up -d ``` - -### NVIDIA-docker Wrapper (deprecated method) -[Nvidia installation instructions](https://github.com/NVIDIA/nvidia-docker#quickstart) -```bash -nvidia-docker run \ - -v /codabench:/codabench \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /var/lib/nvidia-docker/nvidia-docker.sock:/var/lib/nvidia-docker/nvidia-docker.sock \ - -d \ - --env-file .env \ - --name compute_worker \ - --restart unless-stopped \ - --log-opt max-size=50m \ - --log-opt max-file=3 \ - codalab/competitions-v2-compute-worker:gpu -``` - Note that a competition docker image including CUDA and other GPU libraries, such as `codalab/codalab-legacy:gpu`, is then required. ## Check logs @@ -213,7 +181,7 @@ It is recommended to store the docker container hostname to identify the worker. ```sh $ docker ps CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -1a2b3d4e5f67 codalab/competitions-v2-compute-worker:latest "/bin/sh -c 'celery …" 3 days ago Up 3 days compute_worker +1a2b3d4e5f67 codalab/codabench-compute-worker:latest "/bin/sh -c 'celery …" 3 days ago Up 3 days compute_worker ``` For each submission made to your queue, you can know what worker computed the ingestion and the scoring jobs in the [server status page](Server-status-page.md). @@ -273,4 +241,4 @@ Update the worker: docker compose down docker compose pull docker compose up -d -``` \ No newline at end of file +``` diff --git a/documentation/mkdocs.yml b/documentation/mkdocs.yml index b452cf2fb..341169c8f 100644 --- a/documentation/mkdocs.yml +++ b/documentation/mkdocs.yml @@ -166,6 +166,7 @@ nav: - Minio Image Upgrade (version < 1.21.0): Developers_and_Administrators/Upgrading_Codabench/Minio-image.md - Docker-Py (version < 1.22.0): Developers_and_Administrators/Upgrading_Codabench/Docker-py.md - Django 4 Upgrades (version < 1.23.0): Developers_and_Administrators/Upgrading_Codabench/Django-4.md + - Postgres 18 upgrade (version < 1.24.0): Developers_and_Administrators/Upgrading_Codabench/Postgres-18.md - Newsletters Archive: - 2024: Newsletters_Archive/CodaLab-in-2024.md - 2025: Newsletters_Archive/CodaLab-in-2025.md diff --git a/package.json b/package.json index dc727bd94..11d82838a 100644 --- a/package.json +++ b/package.json @@ -2,11 +2,11 @@ "name": "competitions-v2", "version": "0.0.1", "dependencies": { - "jquery": "^3.2.1", - "npm-watch": "^0.2.0", - "riot": "^3.6.3", - "stylus": "^0.54.5", - "uglify-js": "^3.0.28" + "jquery": "^4.0.0", + "npm-watch": "^0.13.0", + "riot": "^3.13.2", + "stylus": "^0.64.0", + "uglify-js": "^3.19.3" }, "devDependencies": {}, "watch": { diff --git a/packaging/container/Containerfile b/packaging/container/Containerfile index 448a6ed17..faa65f28f 100644 --- a/packaging/container/Containerfile +++ b/packaging/container/Containerfile @@ -1,20 +1,19 @@ FROM almalinux:10-minimal + RUN microdnf install -y tar gzip ENV PYTHONUNBUFFERED=1 # Install UV and add paths to PATH for uv and the future .venv created by uv -RUN curl -LsSf https://astral.sh/uv/install.sh | sh +RUN curl -LsSf https://astral.sh/uv/install.sh | sh && microdnf remove -y tar gzip && microdnf clean all ENV PATH=$PATH:/root/.local/bin ENV PATH=$PATH:/.venv/bin - -COPY pyproject.toml ./ -COPY uv.lock ./ +# Copy pyproject and uv.lock +COPY pyproject.toml uv.lock ./ # Install dependencies RUN uv sync --all-extras --frozen - WORKDIR /app ENTRYPOINT ["/bin/bash", "-c"] diff --git a/packaging/container/Containerfile.compute_worker b/packaging/container/Containerfile.compute_worker index f5808687f..7749fceb6 100644 --- a/packaging/container/Containerfile.compute_worker +++ b/packaging/container/Containerfile.compute_worker @@ -1,17 +1,14 @@ FROM almalinux:10-minimal + RUN microdnf install -y tar gzip # This makes output not buffer and return immediately, nice for seeing results in stdout ENV PYTHONUNBUFFERED=1 -COPY compute_worker/pyproject.toml ./ -COPY compute_worker/uv.lock ./ -COPY compute_worker/celery_config.py ./ -COPY compute_worker/compute_worker.py ./ - +COPY compute_worker/pyproject.toml compute_worker/uv.lock compute_worker/celery_config.py compute_worker/compute_worker.py ./ # Install UV and add paths to PATH for uv and the future .venv created by uv -RUN curl -LsSf https://astral.sh/uv/install.sh | sh +RUN curl -LsSf https://astral.sh/uv/install.sh | sh && microdnf remove -y tar gzip && microdnf clean all ENV PATH=$PATH:/root/.local/bin ENV PATH=$PATH:/.venv/bin @@ -19,5 +16,6 @@ ENV PATH=$PATH:/.venv/bin # Install dependencies RUN uv sync --frozen COPY src/settings/logs_loguru.py /.venv/bin + ENTRYPOINT ["/bin/bash", "-c"] CMD ["celery -A compute_worker worker -l info -Q compute-worker -n compute-worker@$HOSTNAME --concurrency=1"] diff --git a/packaging/container/Containerfile.rabbitmq b/packaging/container/Containerfile.rabbitmq index 29314380a..37f44d67e 100644 --- a/packaging/container/Containerfile.rabbitmq +++ b/packaging/container/Containerfile.rabbitmq @@ -1,4 +1,4 @@ -FROM rabbitmq:4.2.0-management +FROM rabbitmq:4.2.4-management ARG WORKER_CONNECTION_TIMEOUT RUN echo "consumer_timeout = $WORKER_CONNECTION_TIMEOUT" >> /etc/rabbitmq/conf.d/10-defaults.conf diff --git a/pyproject.toml b/pyproject.toml index 4202d6d14..ba4285da6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,71 +3,70 @@ name = "codabench" version = "0.1.0" description = "" authors = [{ name = "Codalab" }] -requires-python = ">=3.10,<3.11" +requires-python = "==3.13.11" readme = "README.md" classifiers = [ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", ] dependencies = [ - "django>=4.2.0,<5", + "django==5.2.12", "django-oauth-toolkit==1.6.3", - "django-cors-middleware==1.5.0", - "social-auth-core>=4.1.0,<5", - "social-auth-app-django>=5.0.0,<6", - "six==1.16.0", - "django-extensions>=3.2,<4", - "channels==4.2.0", + "social-auth-core==4.8.5", + "social-auth-app-django==5.6.0", + "django-extensions==4.1.0", + "channels==4.3.2", "channels-redis==4.0.0", - "pillow==10.3.0", - "celery==4.4.7", - "gunicorn==22.0.0", - "urllib3>=1.25.4,<1.27", - "uvicorn>=0.22.0,<0.23", - "pyyaml==5.3.1", - "watchdog==2.1.1", - "argh==0.26.2", - "python-dateutil==2.7.3", - "bpython>=0.21.0,<0.22", - "websockets>=10.4.0,<11", - "aiofiles==0.4.0", - "oyaml==0.7", - "factory-boy==2.11.1", - "bleach>=5.0.0", - "django-debug-toolbar==3.2", - "django-querycount==0.7.0", + "pillow==12.1.1", + "celery==5.6.2", + "gunicorn==23.0", + "urllib3==2.6.3", + "uvicorn==0.38", + "pyyaml==6.0.3", + "watchdog==6.0.0", + "argh==0.31.3", + "python-dateutil==2.9.0", + "bpython==0.26", + "websockets==16.0.0", + "aiofiles==25.1.0", + "oyaml==1.0", + "factory-boy==3.3.3", "blessings==1.7", "django-su>=1.0.0,<2", - "django-ajax-selects==2.0.0", + "django-ajax-selects==3.0.3", "dj-database-url==0.4.2", "psycopg2-binary>=2.9.9,<3", - "django-redis==4.12.1", + "django-redis==6.0.0", "django-storages[azure]>=1.14.6,<2", "azure-storage-blob>=12,<13", "azure-storage-common==2.1.0", - "boto3==1.26.76", - "whitenoise==5.2.0", - "djangorestframework>=3.13.0", + "boto3==1.42.50", + "whitenoise==6.11.0", + "djangorestframework==3.16.1", "djangorestframework-csv==3.0.1", - "drf-extensions==0.4.0", - "markdown==2.6.11", - "pygments==2.2.0", - "drf-writable-nested==0.6.2", - "django-filter==2.4.0", - "flex==6.12.0", + "drf-extensions==0.8.0", + "markdown==3.10.2", + "pygments==2.19.2", + "drf-writable-nested==0.7.2", + "flex==6.14.1", "pyrabbit2==1.0.7", - "django-enforce-host==1.0.1", - "twisted==24.7.0", - "ipdb==0.13", - "jinja2==3.1.4", - "requests==2.32.2", - "drf-extra-fields>=3.5.0", - "botocore==1.29.76", - "s3transfer==0.6.0", + "django-enforce-host==1.1.0", + "twisted==25.5.0", + "ipdb==0.13.13", + "jinja2==3.1.6", + "requests==2.33.0", + "drf-extra-fields==3.7.0", + "botocore==1.42.50", + "s3transfer==0.16.0", "drf-spectacular>=0.28.0,<0.29", - "coreapi>=2.3.3,<3", "loguru>=0.7.3,<0.8", "tzdata>=2025.3", + "setuptools==82.0.0", + "pytz>=2025.2", + "django-filter==25.1", + "django-cors-headers==4.9.0", + "nh3==0.3.3", + "configobj==5.0.9", ] [tool.uv] @@ -79,9 +78,11 @@ build-backend = "uv_build" [dependency-groups] dev = [ - "flake8>=3.8.4", - "pytest==7.4.4", - "pytest-django==4.11.1", + "django-querycount==0.7.0", + "django-debug-toolbar==6.2.0", + "flake8==7.3.0", + "pytest==9.0.2", + "pytest-django==4.12.0", ] [tool.pytest.ini_options] DJANGO_SETTINGS_MODULE = "settings.develop" # Just "settings" since pytest will be running from src/ diff --git a/setup.cfg b/setup.cfg index efdf38296..16f5d925e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -14,10 +14,3 @@ isort-show-traceback = True # E501 -- line too long # F405 -- name may be undefined, or defined from star imports # E402 -- module level import not at top of file - -[tool:pytest] -addopts = --ds=settings.test --reuse-db -python_paths = src/ -testpaths = - src/apps - src/tests diff --git a/src/apps/api/fields.py b/src/apps/api/fields.py index 3ca9680e6..65775d267 100644 --- a/src/apps/api/fields.py +++ b/src/apps/api/fields.py @@ -3,7 +3,6 @@ import binascii import json -import six from django.core.files.base import ContentFile from drf_extra_fields.fields import Base64ImageField from rest_framework.exceptions import ValidationError @@ -36,7 +35,7 @@ def to_internal_value(self, named_json_data): file_name = data["file_name"] base64_data = data["data"] - if isinstance(base64_data, six.string_types): + if isinstance(base64_data, str): # Strip base64 header. if ';base64,' in base64_data: header, base64_data = base64_data.split(';base64,') diff --git a/src/apps/api/pagination.py b/src/apps/api/pagination.py index 5f3cbdc41..03a3be364 100644 --- a/src/apps/api/pagination.py +++ b/src/apps/api/pagination.py @@ -22,3 +22,60 @@ def get_paginated_response(self, data): 'page_size': self.page_size, 'results': data }) + + +class DynamicChoicePagination(PageNumberPagination): + """ + Dynamic pagination : + - default : 50 objects. + - predetermined values : 50, 100, 500, all + - if page_size=all => fetch all objects, capped by lax_page_size + """ + page_size = 50 + page_size_query_param = 'page_size' + max_page_size = 1000 + _allowed_sizes = (50, 100, 500, 'all') + + def get_page_size(self, request): + raw = request.query_params.get(self.page_size_query_param) + if raw is None: + return self.page_size + + raw_lower = str(raw).lower() + if raw_lower == 'all': + return self.max_page_size + + try: + val = int(raw) + except (TypeError, ValueError): + return self.page_size + + if val in (50, 100, 500): + return min(val, self.max_page_size) + return self.page_size + + def paginate_queryset(self, queryset, request, view=None): + raw = request.query_params.get(self.page_size_query_param) + self.requested_page_size = str(raw).lower() if raw is not None else str(self.page_size) + + page_size = self.get_page_size(request) + if isinstance(page_size, int) and page_size > 0: + self.page_size = min(page_size, self.max_page_size) + else: + self.page_size = self.page_size + + return super().paginate_queryset(queryset, request, view) + + def get_paginated_response(self, data): + page_size_value = getattr(self, 'requested_page_size', None) + if page_size_value is None: + page_size_value = self.page_size + + return Response({ + 'next': self.get_next_link(), + 'previous': self.get_previous_link(), + 'count': self.page.paginator.count, + 'page_size': page_size_value, + 'results': data, + 'allowed_page_sizes': [50, 100, 500, 'all'], + }) diff --git a/src/apps/api/serializers/competitions.py b/src/apps/api/serializers/competitions.py index d4557a1c6..a0fb14fda 100644 --- a/src/apps/api/serializers/competitions.py +++ b/src/apps/api/serializers/competitions.py @@ -458,6 +458,7 @@ class CompetitionSerializerSimple(serializers.ModelSerializer): created_by = serializers.CharField(source='created_by.username', read_only=True) owner_display_name = serializers.SerializerMethodField() participants_count = serializers.IntegerField(read_only=True) + first_phase_start = serializers.DateTimeField(read_only=True) class Meta: model = Competition @@ -467,6 +468,7 @@ class Meta: 'created_by', 'owner_display_name', 'created_when', + 'first_phase_start', 'published', 'participants_count', 'logo', @@ -477,8 +479,7 @@ class Meta: 'contact_email', 'report', 'is_featured', - 'submissions_count', - 'participants_count' + 'submissions_count' ) def get_created_by(self, obj): diff --git a/src/apps/api/serializers/datasets.py b/src/apps/api/serializers/datasets.py index abdde3788..795aa72ec 100644 --- a/src/apps/api/serializers/datasets.py +++ b/src/apps/api/serializers/datasets.py @@ -46,12 +46,19 @@ def validate_is_public(self, is_public): return is_public def validate(self, attrs): + # Check for duplicate name if 'name' in attrs: existing_lookup = Data.objects.filter(name=attrs['name'], created_by=self.context['request'].user) if self.instance: existing_lookup = existing_lookup.exclude(pk=self.instance.pk) if existing_lookup.exists(): raise ValidationError("You already have a dataset by this name, please delete that dataset or rename this one") + + # Validate file type + request_sassy_file_name = attrs.get('request_sassy_file_name') + if request_sassy_file_name and not request_sassy_file_name.endswith('.zip'): + raise ValidationError("Only zip files are allowed!") + return attrs def create(self, validated_data): diff --git a/src/apps/api/serializers/submissions.py b/src/apps/api/serializers/submissions.py index 6def18976..9c91737ca 100644 --- a/src/apps/api/serializers/submissions.py +++ b/src/apps/api/serializers/submissions.py @@ -8,7 +8,6 @@ from api.mixins import DefaultUserCreateMixin from api.serializers import leaderboards -# from api.serializers.profiles import SimpleOrganizationSerializer from api.serializers.tasks import TaskSerializer from api.serializers.submission_leaderboard import SubmissionScoreSerializer from competitions.models import Submission, SubmissionDetails, CompetitionParticipant, Phase diff --git a/src/apps/api/tests/test_datasets.py b/src/apps/api/tests/test_datasets.py index b116184d7..8f676d90a 100644 --- a/src/apps/api/tests/test_datasets.py +++ b/src/apps/api/tests/test_datasets.py @@ -30,8 +30,8 @@ def test_dataset_api_checks_duplicate_names_for_same_user(self): resp = self.client.post(reverse("data-list"), { 'name': 'Test!', 'type': Data.COMPETITION_BUNDLE, - 'request_sassy_file_name': faker.file_name(), - 'file_name': faker.file_name(), + 'request_sassy_file_name': faker.file_name(extension='.zip'), + 'file_name': faker.file_name(extension='.zip'), 'file_size': 1000, }) @@ -42,7 +42,7 @@ def test_dataset_api_checks_duplicate_names_for_same_user(self): resp = self.client.put(reverse("data-detail", args=(self.existing_dataset.pk,)), { 'name': 'Test!', 'type': Data.COMPETITION_BUNDLE, - 'request_sassy_file_name': faker.file_name(), + 'request_sassy_file_name': faker.file_name(extension='.zip'), 'file_size': 1000, }) assert resp.status_code == 200 @@ -77,8 +77,8 @@ def test_dataset_api_check_quota(self): resp = self.client.post(reverse("data-list"), { 'name': 'new-file-test', 'type': Data.COMPETITION_BUNDLE, - 'request_sassy_file_name': faker.file_name(), - 'file_name': faker.file_name(), + 'request_sassy_file_name': faker.file_name(extension='.zip'), + 'file_name': faker.file_name(extension='.zip'), 'file_size': file_size, }) @@ -90,12 +90,28 @@ def test_dataset_api_check_quota(self): resp = self.client.post(reverse("data-list"), { 'name': 'new-file-test', 'type': Data.COMPETITION_BUNDLE, - 'request_sassy_file_name': faker.file_name(), + 'request_sassy_file_name': faker.file_name(extension='.zip'), 'file_name': faker.file_name(), 'file_size': file_size, }) assert resp.status_code == 201 + def test_dataset_api_rejects_non_zip_files(self): + self.client.login(username='creator', password='creator') + + # Attempt to upload a non-zip file + resp = self.client.post(reverse("data-list"), { + 'name': 'non-zip-test', + 'type': Data.COMPETITION_BUNDLE, + 'request_sassy_file_name': faker.file_name(extension='.py'), + 'file_name': faker.file_name(extension='.py'), + 'file_size': 1000, + }) + + assert resp.status_code == 400 + assert "non_field_errors" in resp.data + assert resp.data["non_field_errors"][0] == "Only zip files are allowed!" + class DatasetDetailTests(TestCase): def setUp(self): diff --git a/src/apps/api/views/analytics.py b/src/apps/api/views/analytics.py index 552b7cf30..ae9ef667d 100644 --- a/src/apps/api/views/analytics.py +++ b/src/apps/api/views/analytics.py @@ -16,7 +16,6 @@ import os import datetime -import coreapi import pytz import logging logger = logging.getLogger(__name__) @@ -26,42 +25,6 @@ delete_orphan_files_task = None -class SimpleFilterBackend(BaseFilterBackend): - def get_schema_fields(self, view): - fields = [ - coreapi.Field( - name='start_date', - location='query', - required=True, - type='string', - description='Beginning of query interval (inclusive) (YYYY-MM-DD format string)' - ), - coreapi.Field( - name='end_date', - location='query', - required=True, - type='string', - description='End of query interval (exclusive) (YYYY-MM-DD format string)' - ), - coreapi.Field( - name='time_unit', - location='query', - required=True, - type='string', - description='Unit of time (choose 1 of month, week, or day)' - ), - coreapi.Field( - name='format', - location='query', - required=False, - type='string', - description='If csv data is desired set format=csv, otherwise do not set.' - ), - ] - - return fields - - def merge_dicts(d1, d2): d = {**d1, **d2} return d @@ -111,7 +74,7 @@ class AnalyticsView(APIView): Return the total number of users joined, competitions created, published competitions created, and submissions made within a given time interval. Also returns the number of comps, users, and subs created within the time range for each time unit. """ - filter_backends = (SimpleFilterBackend,) + filter_backends = (BaseFilterBackend,) renderer_classes = (JSONRenderer, AnalyticsRenderer,) def get(self, request): @@ -399,7 +362,6 @@ def check_orphans_deletion_status(request): if not request.user.is_superuser: raise PermissionDenied(detail="Admin only") - global delete_orphan_files_task state = None if delete_orphan_files_task: state = delete_orphan_files_task.state diff --git a/src/apps/api/views/competitions.py b/src/apps/api/views/competitions.py index e3b9804f7..7cacb433b 100644 --- a/src/apps/api/views/competitions.py +++ b/src/apps/api/views/competitions.py @@ -869,7 +869,7 @@ class CompetitionParticipantViewSet(ModelViewSet): queryset = CompetitionParticipant.objects.all() serializer_class = CompetitionParticipantSerializer filter_backends = (DjangoFilterBackend, SearchFilter) - filter_fields = ('user__username', 'user__email', 'status', 'competition', 'user__is_deleted') + filterset_fields = ('user__username', 'user__email', 'status', 'competition', 'user__is_deleted') search_fields = ('user__username', 'user__email',) def get_queryset(self): diff --git a/src/apps/api/views/datasets.py b/src/apps/api/views/datasets.py index 0c95a8a3c..fa22187b7 100644 --- a/src/apps/api/views/datasets.py +++ b/src/apps/api/views/datasets.py @@ -21,7 +21,7 @@ class DataViewSet(ModelViewSet): queryset = Data.objects.all() filter_backends = (DjangoFilterBackend, SearchFilter) - filter_fields = ('type', 'name', 'key', 'was_created_by_competition', 'is_public') + filterset_fields = ('type', 'name', 'key', 'was_created_by_competition', 'is_public') search_fields = ('file_name', 'name', 'description', 'key', 'competition__title',) pagination_class = BasicPagination diff --git a/src/apps/api/views/profiles.py b/src/apps/api/views/profiles.py index 7c7f6dcd8..65e2bb13c 100644 --- a/src/apps/api/views/profiles.py +++ b/src/apps/api/views/profiles.py @@ -166,7 +166,7 @@ def update_member_group(self, request, pk=None): @action(detail=True, methods=['post'], permission_classes=[IsOrganizationEditor]) def invite_users(self, request, pk=None): org = self.get_object() - if type(request.data['users']) != list: + if type(request.data['users']) is not list: raise ValidationError(f'Required data is an Array of User ID\'s not a {type(request.data["users"])} ') # Getting users, but filtering out any that are already in the organization users = User.objects.filter(id__in=request.data['users']).exclude(organizations=pk) diff --git a/src/apps/api/views/queues.py b/src/apps/api/views/queues.py index 23e486e85..ffb0b2550 100644 --- a/src/apps/api/views/queues.py +++ b/src/apps/api/views/queues.py @@ -14,7 +14,7 @@ class QueueViewSet(ModelViewSet): queryset = Queue.objects.all() serializer_class = serializers.QueueListSerializer - filter_fields = ('owner', 'is_public', 'name') + filterset_fields = ('owner', 'is_public', 'name') filter_backends = (DjangoFilterBackend, SearchFilter) search_fields = ('name',) pagination_class = BasicPagination diff --git a/src/apps/api/views/submissions.py b/src/apps/api/views/submissions.py index 7c7ddb538..bfd6f8889 100644 --- a/src/apps/api/views/submissions.py +++ b/src/apps/api/views/submissions.py @@ -16,6 +16,7 @@ from django.core.files.base import ContentFile from profiles.models import Organization, Membership +from api.pagination import DynamicChoicePagination from tasks.models import Task from api.serializers.submissions import SubmissionCreationSerializer, SubmissionSerializer, SubmissionFilesSerializer, SubmissionDetailSerializer from competitions.models import Submission, SubmissionDetails, Phase, CompetitionParticipant @@ -29,9 +30,10 @@ class SubmissionViewSet(ModelViewSet): queryset = Submission.objects.all().order_by('-pk') permission_classes = [] filter_backends = (DjangoFilterBackend, SearchFilter) - filter_fields = ('phase__competition', 'phase', 'status', 'is_soft_deleted') + filterset_fields = ('phase__competition', 'phase', 'status', 'is_soft_deleted') search_fields = ('data__data_file', 'description', 'name', 'owner__username') renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES + [renderers.CSVRenderer] + pagination_class = DynamicChoicePagination def check_object_permissions(self, request, obj): if self.action in ['submission_leaderboard_connection']: @@ -289,6 +291,9 @@ def get_renderer_context(self): 'created_when': 'Created When', 'status': 'Status', 'phase_name': 'Phase', + 'task.name': 'Task', + 'scores.0.score': 'Score', + 'on_leaderboard': 'On Leaderboard' } context["header"] = [k for k in context["labels"].keys()] return context @@ -559,6 +564,14 @@ def update_fact_sheet(self, request, pk): Submission.objects.filter(Q(parent=top_level_submission) | Q(id=top_level_submission.id)).update(fact_sheet_answers=request_data) return Response({}) + def paginate_queryset(self, queryset): + ''' + This Méthode is added to override pagination when trying to download the Sub CSV + ''' + if getattr(getattr(self.request, "accepted_renderer", None), "format", None) == "csv": + return None + return super().paginate_queryset(queryset) + @api_view(['POST']) @permission_classes((AllowAny,)) # permissions are checked via the submission secret diff --git a/src/apps/api/views/tasks.py b/src/apps/api/views/tasks.py index 9bed7c2fd..debf03237 100644 --- a/src/apps/api/views/tasks.py +++ b/src/apps/api/views/tasks.py @@ -29,7 +29,7 @@ class TaskViewSet(ModelViewSet): queryset = Task.objects.all() serializer_class = serializers.TaskSerializer - filter_fields = ('created_by', 'is_public') + filterset_fields = ('created_by', 'is_public') filter_backends = (DjangoFilterBackend, SearchFilter) search_fields = ('name', 'description',) pagination_class = BasicPagination diff --git a/src/apps/commands/management/commands/generate_data.py b/src/apps/commands/management/commands/generate_data.py index dfd062827..8297b88b3 100644 --- a/src/apps/commands/management/commands/generate_data.py +++ b/src/apps/commands/management/commands/generate_data.py @@ -31,7 +31,7 @@ def add_arguments(self, parser): def handle(self, *args, **kwargs): size = kwargs.get('size') or 3 no_admin = kwargs.get('no_admin') - print(f'Creating data of size {size} {"without an admin account." if no_admin else "with an admin account." }') + print(f'Creating data of size {size} {"without an admin account." if no_admin else "with an admin account."}') users = [] for i in range(size): if i == 0 and not no_admin: diff --git a/src/apps/competitions/admin.py b/src/apps/competitions/admin.py index 769888db8..98cb64147 100644 --- a/src/apps/competitions/admin.py +++ b/src/apps/competitions/admin.py @@ -92,9 +92,58 @@ def SubmissionsExport_as_csv(modeladmin, request, queryset): headers={"Content-Disposition": 'attachment; filename="submissions.csv"'}, ) writer = csv.writer(response) - writer.writerow(["ID", "Owner", "Status", "Task", "Phase", "Queue"]) + writer.writerow( + [ + "ID", + "Owner", + "Status", + "Submitted on", + "Task", + "Phase", + "Competition Title", + "Competition creation date", + "Queue", + "Scores", + "On Leaderboard", + ] + ) for obj in queryset: - writer.writerow([obj.id, obj.owner, obj.status, obj.task, obj.phase, obj.queue]) + scores_list = [] + for scores in obj.scores.all(): + scores_list.append(scores.score) + if obj.task is not None: + if len(scores_list) == 0: + writer.writerow( + [ + obj.id, + obj.owner, + obj.status, + obj.created_when, + obj.task, + obj.phase, + obj.phase.competition.title, + obj.phase.competition.created_when, + obj.queue, + "None", + obj.appear_on_leaderboards, + ] + ) + else: + writer.writerow( + [ + obj.id, + obj.owner, + obj.status, + obj.created_when, + obj.task, + obj.phase, + obj.phase.competition.title, + obj.phase.competition.created_when, + obj.queue, + scores_list[0], + obj.appear_on_leaderboards, + ] + ) return response diff --git a/src/apps/competitions/emails.py b/src/apps/competitions/emails.py index 016c2c1c6..33316daa0 100644 --- a/src/apps/competitions/emails.py +++ b/src/apps/competitions/emails.py @@ -10,7 +10,8 @@ def send_participation_requested_emails(participant): return context = { - 'participant': participant + 'participant': participant, + 'user': participant.user } # Notify Organizers codalab_send_mail( @@ -36,7 +37,8 @@ def send_participation_accepted_emails(participant): return context = { - 'participant': participant + 'participant': participant, + 'user': participant.user } codalab_send_mail( context_data=context, @@ -60,7 +62,8 @@ def send_participation_denied_emails(participant): return context = { - 'participant': participant + 'participant': participant, + 'user': participant.user } # Notify Organizers codalab_send_mail( diff --git a/src/apps/competitions/models.py b/src/apps/competitions/models.py index fb7e5f068..8477e1c62 100644 --- a/src/apps/competitions/models.py +++ b/src/apps/competitions/models.py @@ -102,6 +102,13 @@ def bundle_dataset(self): def all_organizers(self): return [self.created_by] + list(self.collaborators.all()) + @property + def first_phase_start(self): + first_phase = self.phases.filter(index=0).first() + if first_phase and first_phase.start: + return first_phase.start + return self.created_when + def user_has_admin_permission(self, user): if isinstance(user, int): try: @@ -153,7 +160,6 @@ def apply_phase_migration(self, current_phase, next_phase, force_migration=False created_by_migration=current_phase, participant=submission.participant, phase=next_phase, - task=submission.task, owner=submission.owner, data=submission.data, ) @@ -670,8 +676,8 @@ def cancel(self, status=CANCELLED): # If a custom queue is set, we need to fetch the appropriate celery app if self.phase.competition.queue: celery_app = app_for_vhost(str(self.phase.competition.queue.vhost)) - - celery_app.control.revoke(self.celery_task_id, terminate=True) + # We need to convert the UUID given by celery into a byte like object otherwise it won't work + celery_app.control.revoke(str(self.celery_task_id), terminate=True) self.status = status self.save() return True diff --git a/src/apps/competitions/tasks.py b/src/apps/competitions/tasks.py index 4c5e495b8..4990d04f5 100644 --- a/src/apps/competitions/tasks.py +++ b/src/apps/competitions/tasks.py @@ -15,6 +15,7 @@ from django.core.exceptions import ObjectDoesNotExist from django.core.files.base import ContentFile from django.db.models import Subquery, OuterRef, Count, Case, When, Value, F +from django.db import transaction from django.utils.text import slugify from django.utils.timezone import now from rest_framework.exceptions import ValidationError @@ -141,14 +142,6 @@ def _send_to_compute_worker(submission, is_scoring): submission = Submission.objects.get(id=submission.id) task = submission.task - # priority of scoring tasks is higher, we don't want to wait around for - # many submissions to be scored while we're waiting for results - if is_scoring: - # higher numbers are higher priority - priority = 10 - else: - priority = 0 - if not is_scoring: run_args['prediction_result'] = make_url_sassy( path=submission.prediction_result.name, @@ -204,37 +197,41 @@ def _send_to_compute_worker(submission, is_scoring): if submission.phase.competition.queue: # if the competition is running on a custom queue, not the default queue submission.queue_name = submission.phase.competition.queue.name or '' run_args['execution_time_limit'] = submission.phase.execution_time_limit # use the competition time limit - submission.save() - - # Send to special queue? Using `celery_app` var name here since we'd be overriding the imported `app` - # variable above - celery_app = app_or_default() - with celery_app.connection() as new_connection: - new_connection.virtual_host = str(submission.phase.competition.queue.vhost) - task = celery_app.send_task( + submission.save(update_fields=["queue_name"]) + if submission.status == Submission.SUBMITTING: + # Don't want to mark an already-prepared submission as "submitted" again, so + # only do this if we were previously "SUBMITTING" + submission.status = Submission.SUBMITTED + submission.save(update_fields=["status"]) + + def _enqueue_after_commit(): + # priority of scoring tasks is higher, we don't want to wait around for + # many submissions to be scored while we're waiting for results + priority = 10 if is_scoring else 0 + if submission.phase.competition.queue: + celery_app = app_or_default() + with celery_app.connection() as new_connection: + new_connection.virtual_host = str(submission.phase.competition.queue.vhost) + task = celery_app.send_task( + 'compute_worker_run', + args=(run_args,), + queue='compute-worker', + soft_time_limit=time_limit, + connection=new_connection, + priority=priority, + ) + else: + task = app.send_task( 'compute_worker_run', args=(run_args,), queue='compute-worker', soft_time_limit=time_limit, - connection=new_connection, priority=priority, ) - else: - task = app.send_task( - 'compute_worker_run', - args=(run_args,), - queue='compute-worker', - soft_time_limit=time_limit, - priority=priority, - ) - submission.celery_task_id = task.id - - if submission.status == Submission.SUBMITTING: - # Don't want to mark an already-prepared submission as "submitted" again, so - # only do this if we were previously "SUBMITTING" - submission.status = Submission.SUBMITTED + submission.celery_task_id = task.id + submission.save(update_fields=["celery_task_id"]) - submission.save() + transaction.on_commit(_enqueue_after_commit) def create_detailed_output_file(detail_name, submission): @@ -377,7 +374,7 @@ def mark_status_as_failed_and_delete_dataset(competition_creation_status, detail raise CompetitionUnpackingException("competition.yaml is missing from zip, check your folder structure " "to make sure it is in the root directory.") with open(yaml_path) as f: - competition_yaml = yaml.load(f.read()) + competition_yaml = yaml.safe_load(f.read()) yaml_version = str(competition_yaml.get('version', '1')) diff --git a/src/apps/competitions/tests/test_submissions.py b/src/apps/competitions/tests/test_submissions.py index a7ae024f2..ee5cdc850 100644 --- a/src/apps/competitions/tests/test_submissions.py +++ b/src/apps/competitions/tests/test_submissions.py @@ -247,7 +247,8 @@ def __init__(self): task = Task() celery_app.return_value = task mock_sassy.return_value = '' - run_submission(submission.pk) + with self.captureOnCommitCallbacks(execute=True): + run_submission(submission.pk) return celery_app def test_making_submission_creates_parent_sub_and_additional_sub_per_task(self): diff --git a/src/apps/forums/tests/test_smoke_tests.py b/src/apps/forums/tests/test_smoke_tests.py index 6ea251a43..a385c1c2e 100644 --- a/src/apps/forums/tests/test_smoke_tests.py +++ b/src/apps/forums/tests/test_smoke_tests.py @@ -1,61 +1,159 @@ +# apps/forums/tests/test_smoke_tests.py from django.test import TestCase from django.contrib.auth import get_user_model from django.urls import reverse -from competitions.models import Competition +from competitions.models import Competition, CompetitionParticipant from ..models import Forum, Thread, Post - User = get_user_model() -class ForumSmokeTests(TestCase): +class ForumTests(TestCase): - def setUp(self): - self.admin_user = User.objects.create_superuser("admin", "admin@example.com", "pass") - self.regular_user = User.objects.create_user("regular", email="regular@example.com", password="pass") + @classmethod + def setUpTestData(cls): + cls.admin_user = User.objects.create_superuser( + username="admin", + email="admin@example.com", + password="pass" + ) + cls.regular_user = User.objects.create_user( + username="regular", + email="regular@example.com", + password="pass" + ) + cls.other_user = User.objects.create_user( + username="other", + email="other@example.com", + password="pass" + ) - self.competition = Competition.objects.create( + cls.competition = Competition.objects.create( title="Test Competition", - created_by=self.admin_user, + created_by=cls.admin_user, published=False, + forum_enabled=True ) - self.forum = Forum.objects.create(competition=self.competition) - self.thread = Thread.objects.create(forum=self.forum, started_by=self.regular_user) - self.post = Post.objects.create(thread=self.thread, posted_by=self.regular_user) - def test_forum_thread_list_view_returns_200(self): - resp = self.client.get(reverse("forums:forum_detail", kwargs={'forum_pk': self.forum.pk})) - self.assertEquals(resp.status_code, 200) + cls.forum = Forum.objects.create(competition=cls.competition) - def test_forum_post_new_thread_non_logged_in_returns_302(self): - resp = self.client.get(reverse("forums:forum_new_thread", kwargs={'forum_pk': self.forum.pk})) - self.assertEquals(resp.status_code, 302) + CompetitionParticipant.objects.create( + competition=cls.competition, + user=cls.regular_user, + status=CompetitionParticipant.APPROVED + ) - def test_forum_post_new_thread_view_returns_200(self): - self.client.login(username="regular", password="pass") - resp = self.client.get(reverse("forums:forum_new_thread", kwargs={'forum_pk': self.forum.pk})) - self.assertEquals(resp.status_code, 200) + cls.thread = Thread.objects.create( + forum=cls.forum, + started_by=cls.regular_user + ) + + cls.post = Post.objects.create( + thread=cls.thread, + posted_by=cls.regular_user, + content="Initial post" + ) + + def test_forum_detail_view_returns_200(self): + resp = self.client.get(reverse("forums:forum_detail", kwargs={"forum_pk": self.forum.pk})) + self.assertEqual(resp.status_code, 200) + + def test_thread_detail_view_returns_200(self): + resp = self.client.get(reverse( + "forums:forum_thread_detail", + kwargs={"forum_pk": self.forum.pk, "thread_pk": self.thread.pk} + )) + self.assertEqual(resp.status_code, 200) - def test_forum_view_thread_returns_200(self): - resp = self.client.get(reverse("forums:forum_thread_detail", kwargs={'forum_pk': self.forum.pk, 'thread_pk': self.thread.pk})) - self.assertEquals(resp.status_code, 200) + def test_create_thread_requires_login(self): + resp = self.client.get(reverse("forums:forum_new_thread", kwargs={"forum_pk": self.forum.pk})) + self.assertEqual(resp.status_code, 302) - def test_forum_new_post_requires_login_returns_302(self): - resp = self.client.get(reverse("forums:forum_new_post", kwargs={'forum_pk': self.forum.pk, 'thread_pk': self.thread.pk})) - self.assertEquals(resp.status_code, 302) + def test_create_thread_post(self): + self.client.login(username="regular", password="pass") + resp = self.client.post(reverse("forums:forum_new_thread", kwargs={"forum_pk": self.forum.pk}), { + "title": "New thread", + "content": "Hello world", + }) + self.assertEqual(resp.status_code, 302) + self.assertEqual(Thread.objects.count(), 2) + + def test_create_post_requires_login(self): + resp = self.client.get(reverse( + "forums:forum_new_post", + kwargs={"forum_pk": self.forum.pk, "thread_pk": self.thread.pk} + )) + self.assertEqual(resp.status_code, 302) - def test_forum_new_post_returns_200(self): + def test_create_post(self): self.client.login(username="regular", password="pass") - resp = self.client.get(reverse("forums:forum_new_post", kwargs={'forum_pk': self.forum.pk, 'thread_pk': self.thread.pk})) - self.assertEquals(resp.status_code, 200) - - def test_forum_delete_post_returns_200(self): - self.client.login(username='admin', password='pass') - resp = self.client.delete(reverse("forums:forum_delete_post", kwargs={'forum_pk': self.forum.pk, 'thread_pk': self.thread.pk, 'post_pk': self.post.pk})) - self.assertEquals(resp.status_code, 302) - - def test_forum_delete_thread_returns_200(self): - self.client.login(username='admin', password='pass') - resp = self.client.delete(reverse("forums:forum_delete_thread", kwargs={'forum_pk': self.forum.pk, 'thread_pk': self.thread.pk})) - self.assertEquals(resp.status_code, 302) + resp = self.client.post(reverse( + "forums:forum_new_post", + kwargs={"forum_pk": self.forum.pk, "thread_pk": self.thread.pk} + ), {"content": "Another message"}) + self.assertEqual(resp.status_code, 302) + self.assertEqual(Post.objects.count(), 2) + + def test_delete_post_by_admin(self): + self.client.login(username="admin", password="pass") + resp = self.client.post(reverse( + "forums:forum_delete_post", + kwargs={ + "forum_pk": self.forum.pk, + "thread_pk": self.thread.pk, + "post_pk": self.post.pk + } + )) + self.assertEqual(resp.status_code, 302) + self.assertEqual(Post.objects.filter(pk=self.post.pk).count(), 0) + + def test_delete_post_forbidden_for_other_user(self): + p = Post.objects.create(thread=self.thread, posted_by=self.regular_user, content="temp-forb") + + self.client.login(username="other", password="pass") + resp = self.client.post(reverse( + "forums:forum_delete_post", + kwargs={ + "forum_pk": self.forum.pk, + "thread_pk": self.thread.pk, + "post_pk": p.pk + } + )) + + exists_after = Post.objects.filter(pk=p.pk).exists() + self.assertIn(resp.status_code, (302, 403)) + if resp.status_code == 403: + self.assertTrue(exists_after, "Post should remain when deletion is forbidden (403).") + + def test_delete_thread_by_admin(self): + t = Thread.objects.create(forum=self.forum, started_by=self.regular_user) + Post.objects.create(thread=t, posted_by=self.regular_user, content="to be deleted") + + self.client.login(username="admin", password="pass") + resp = self.client.post(reverse( + "forums:forum_delete_thread", + kwargs={ + "forum_pk": self.forum.pk, + "thread_pk": t.pk + } + )) + self.assertEqual(resp.status_code, 302) + self.assertEqual(Thread.objects.filter(pk=t.pk).count(), 0) + + def test_delete_thread_forbidden_for_other_user(self): + t = Thread.objects.create(forum=self.forum, started_by=self.regular_user) + Post.objects.create(thread=t, posted_by=self.regular_user, content="keep me") + + self.client.login(username="other", password="pass") + resp = self.client.post(reverse( + "forums:forum_delete_thread", + kwargs={ + "forum_pk": self.forum.pk, + "thread_pk": t.pk + } + )) + + self.assertIn(resp.status_code, (302, 403)) + if resp.status_code == 403: + self.assertEqual(Thread.objects.filter(pk=t.pk).count(), 1) diff --git a/src/apps/forums/views.py b/src/apps/forums/views.py index 5896ed8d5..cb571269c 100644 --- a/src/apps/forums/views.py +++ b/src/apps/forums/views.py @@ -115,20 +115,25 @@ class DeletePostView(ForumBaseMixin, LoginRequiredMixin, DeleteView): model = Post pk_url_kwarg = 'post_pk' - def delete(self, request, *args, **kwargs): + def get_success_url(self): + post = self.get_object() + if post.thread: + return post.thread.get_absolute_url() if post.thread.posts.count() > 1 else post.thread.forum.get_absolute_url() + return '/' + + def form_valid(self, form): self.object = self.get_object() - if self.object.posted_by == request.user or \ - request.user in self.object.thread.forum.competition.collaborators.all() or \ - self.object.thread.forum.competition.created_by == request.user: + if self.object.posted_by == self.request.user or \ + self.request.user in self.object.thread.forum.competition.collaborators.all() or \ + self.object.thread.forum.competition.created_by == self.request.user: # If there are more posts in the thread, leave it around, otherwise delete it + success_url = self.get_success_url() if self.object.thread.posts.count() == 1: - success_url = self.object.thread.forum.get_absolute_url() self.object.thread.delete() - else: - success_url = self.object.thread.get_absolute_url() self.object.delete() return HttpResponseRedirect(success_url) + else: raise PermissionDenied("Cannot delete a post you don't own in a competition you aren't organizing!") @@ -165,11 +170,15 @@ class DeleteThreadView(ForumBaseMixin, LoginRequiredMixin, DeleteView): model = Thread pk_url_kwarg = 'thread_pk' - def delete(self, request, *args, **kwargs): + def get_success_url(self): + thread = self.get_object() + return thread.forum.get_absolute_url() + + def form_valid(self, form): self.object = self.get_object() - if self.object.forum.competition.created_by == request.user or \ - self.object.started_by == request.user: + if self.object.forum.competition.created_by == self.request.user or \ + self.object.started_by == self.request.user: success_url = self.object.forum.get_absolute_url() self.object.delete() diff --git a/src/apps/profiles/tokens.py b/src/apps/profiles/tokens.py index 2842f2df4..9984f109a 100644 --- a/src/apps/profiles/tokens.py +++ b/src/apps/profiles/tokens.py @@ -1,13 +1,12 @@ from django.contrib.auth.tokens import PasswordResetTokenGenerator -import six class AccountActivationTokenGenerator(PasswordResetTokenGenerator): def _make_hash_value(self, user, timestamp): return ( - six.text_type(user.pk) + - six.text_type(timestamp) + - six.text_type(user.is_active) + str(user.pk) + + str(timestamp) + + str(user.is_active) ) @@ -17,9 +16,9 @@ def _make_hash_value(self, user, timestamp): class AccountDeletionTokenGenerator(PasswordResetTokenGenerator): def _make_hash_value(self, user, timestamp): return ( - six.text_type(user.pk) + - six.text_type(timestamp) + - six.text_type(user.is_deleted) + str(user.pk) + + str(timestamp) + + str(user.is_deleted) ) diff --git a/src/apps/profiles/views.py b/src/apps/profiles/views.py index 39db7f614..81c8dc1de 100644 --- a/src/apps/profiles/views.py +++ b/src/apps/profiles/views.py @@ -19,6 +19,7 @@ from api.serializers.profiles import UserSerializer, OrganizationDetailSerializer, OrganizationEditSerializer, \ UserNotificationSerializer +from api.serializers.competitions import CompetitionSerializerSimple from .forms import SignUpForm, LoginForm, ActivationForm from .models import User, DeletedUser, Organization, Membership from oidc_configurations.models import Auth_Organization @@ -67,7 +68,23 @@ class UserDetailView(LoginRequiredMixin, DetailView): def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) - context['serialized_user'] = json.dumps(UserSerializer(self.get_object()).data) + user = self.get_object() + user_data = UserSerializer(user).data + # Fetch competitions organized by this user (as owner or collaborator) + organized_qs = ( + Competition.objects + .filter( + Q(created_by=user) | Q(collaborators=user), + published=True, + ) + .distinct() + .order_by("-created_when") + ) + # Serialize into the same shape your public-list cards expect + user_data["competitions_organized"] = CompetitionSerializerSimple( + organized_qs, many=True, context={"request": self.request} + ).data + context["serialized_user"] = json.dumps(user_data).replace(" Name Type - Uploaded... + Uploaded Publish Edit Delete @@ -61,7 +61,7 @@ Name - Uploaded... + Uploaded diff --git a/src/static/riot/competitions/detail/leaderboards.tag b/src/static/riot/competitions/detail/leaderboards.tag index 11191123e..32050a480 100644 --- a/src/static/riot/competitions/detail/leaderboards.tag +++ b/src/static/riot/competitions/detail/leaderboards.tag @@ -57,7 +57,11 @@ { submission.owner } { submission.organization.name } - { pretty_date(submission.created_when) } + + { pretty_date(submission.created_when) } + {submission.id} @@ -88,6 +92,12 @@ return '' } } + + self.sort_date_value = function (date_string) { + if (!date_string) return 0 + const dt = luxon.DateTime.fromISO(date_string) + return dt.isValid ? dt.toMillis() : 0 + } self.bold_class = function(column, submission){ // Return `text-bold` if submission has diff --git a/src/static/riot/competitions/detail/submission_manager.tag b/src/static/riot/competitions/detail/submission_manager.tag index 5ffe959ef..730b2face 100644 --- a/src/static/riot/competitions/detail/submission_manager.tag +++ b/src/static/riot/competitions/detail/submission_manager.tag @@ -187,6 +187,37 @@ +