|
| 1 | +"""Celery configuration and in-memory task queue fallback""" |
| 2 | + |
| 3 | +import logging |
| 4 | +import threading |
| 5 | +import uuid |
| 6 | +from concurrent.futures import ThreadPoolExecutor |
| 7 | +from datetime import datetime, timedelta, timezone |
| 8 | + |
| 9 | +from pypsa_app.backend.settings import settings |
| 10 | + |
| 11 | +logger = logging.getLogger(__name__) |
| 12 | + |
| 13 | +_tasks = {} |
| 14 | +_lock = threading.Lock() |
| 15 | +_pool = ThreadPoolExecutor(max_workers=1, thread_name_prefix="task") |
| 16 | + |
| 17 | + |
| 18 | +class InMemoryAsyncResult: |
| 19 | + def __init__(self, task_id): |
| 20 | + self.id = task_id |
| 21 | + |
| 22 | + @property |
| 23 | + def state(self): |
| 24 | + with _lock: |
| 25 | + return _tasks.get(self.id, {}).get("state", "PENDING") |
| 26 | + |
| 27 | + @property |
| 28 | + def result(self): |
| 29 | + with _lock: |
| 30 | + t = _tasks.get(self.id, {}) |
| 31 | + return t.get("result") if t.get("state") == "SUCCESS" else None |
| 32 | + |
| 33 | + @property |
| 34 | + def info(self): |
| 35 | + with _lock: |
| 36 | + t = _tasks.get(self.id, {}) |
| 37 | + if t.get("state") == "FAILURE": |
| 38 | + return t.get("exception") |
| 39 | + if t.get("state") == "PROGRESS": |
| 40 | + return t.get("meta", {}) |
| 41 | + return None |
| 42 | + |
| 43 | + |
| 44 | +class InMemoryTaskQueue: |
| 45 | + def task(self, *args, **kwargs): |
| 46 | + bind = kwargs.get("bind", False) |
| 47 | + |
| 48 | + def decorator(func): |
| 49 | + def apply_async(args=(), kwargs=None, **options): |
| 50 | + tid = str(uuid.uuid4()) |
| 51 | + now = datetime.now(timezone.utc) |
| 52 | + |
| 53 | + with _lock: |
| 54 | + _tasks[tid] = {"state": "PENDING", "created_at": now} |
| 55 | + cutoff = now - timedelta(hours=24) |
| 56 | + for k in list(_tasks.keys()): |
| 57 | + if _tasks[k].get("created_at", cutoff) < cutoff: |
| 58 | + del _tasks[k] |
| 59 | + |
| 60 | + class Task: |
| 61 | + request = type("Request", (), {"id": tid})() |
| 62 | + |
| 63 | + @staticmethod |
| 64 | + def update_state(state=None, meta=None): |
| 65 | + with _lock: |
| 66 | + if tid in _tasks and state: |
| 67 | + _tasks[tid]["state"] = state |
| 68 | + if tid in _tasks and meta: |
| 69 | + _tasks[tid]["meta"] = meta |
| 70 | + |
| 71 | + def run(): |
| 72 | + try: |
| 73 | + res = ( |
| 74 | + func(Task(), *args, **(kwargs or {})) |
| 75 | + if bind |
| 76 | + else func(*args, **(kwargs or {})) |
| 77 | + ) |
| 78 | + with _lock: |
| 79 | + _tasks[tid].update({"state": "SUCCESS", "result": res}) |
| 80 | + except Exception as e: |
| 81 | + with _lock: |
| 82 | + _tasks[tid].update( |
| 83 | + {"state": "FAILURE", "exception": str(e)} |
| 84 | + ) |
| 85 | + logger.error( |
| 86 | + "Task failed", |
| 87 | + extra={"task_id": tid, "error": str(e)}, |
| 88 | + exc_info=True, |
| 89 | + ) |
| 90 | + |
| 91 | + _pool.submit(run) |
| 92 | + return InMemoryAsyncResult(tid) |
| 93 | + |
| 94 | + func.apply_async = apply_async |
| 95 | + func.name = kwargs.get("name", func.__name__) |
| 96 | + return func |
| 97 | + |
| 98 | + return decorator |
| 99 | + |
| 100 | + |
| 101 | +# Try to use Celery with Redis, fall back to in-memory task queue |
| 102 | +try: |
| 103 | + from celery import Celery |
| 104 | + |
| 105 | + # Only use real Celery if Redis URL is configured |
| 106 | + if not settings.redis_url: |
| 107 | + logger.warning( |
| 108 | + "Redis URL not configured - using in-memory task queue", |
| 109 | + extra={"backend": "in-memory", "background_tasks_enabled": True}, |
| 110 | + ) |
| 111 | + task_app = InMemoryTaskQueue() |
| 112 | + else: |
| 113 | + task_app = Celery( |
| 114 | + "pypsa_app", |
| 115 | + broker=settings.redis_url, |
| 116 | + backend=settings.redis_url, |
| 117 | + include=["pypsa_app.backend.tasks"], |
| 118 | + ) |
| 119 | + |
| 120 | + task_app.conf.update( |
| 121 | + accept_content=["json"], |
| 122 | + result_expires=86400, |
| 123 | + worker_prefetch_multiplier=1, |
| 124 | + worker_max_tasks_per_child=10, |
| 125 | + task_soft_time_limit=3600, |
| 126 | + task_time_limit=7200, |
| 127 | + task_acks_late=True, |
| 128 | + ) |
| 129 | + |
| 130 | + logger.info( |
| 131 | + "Initialized Celery with Redis backend", |
| 132 | + extra={"redis_url": settings.redis_url, "backend": "celery"}, |
| 133 | + ) |
| 134 | + |
| 135 | +except ImportError: |
| 136 | + logger.warning( |
| 137 | + "Celery not installed - using in-memory task queue", |
| 138 | + extra={"backend": "in-memory", "background_tasks_enabled": True}, |
| 139 | + ) |
| 140 | + task_app = InMemoryTaskQueue() |
0 commit comments