Skip to content

Commit 3ac8732

Browse files
authored
Merge pull request #34 from rendiffdev/fix/critical-bugs-storage-module
Fix critical bugs and add missing storage module
2 parents 99fbc4e + 6f45e03 commit 3ac8732

File tree

10 files changed

+820
-6
lines changed

10 files changed

+820
-6
lines changed

api/models/api_key.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,12 +47,12 @@ class APIKey(Base):
4747
created_by = Column(String(255), nullable=True)
4848

4949
@classmethod
50-
def generate_key(cls) -> tuple[str, str]:
50+
def generate_key(cls) -> tuple[str, str, str]:
5151
"""
5252
Generate a new API key.
53-
53+
5454
Returns:
55-
tuple: (raw_key, key_hash) where raw_key should be shown to user only once
55+
tuple: (raw_key, key_hash, key_prefix) where raw_key should be shown to user only once
5656
"""
5757
# Generate 32 random bytes (256 bits)
5858
raw_key = secrets.token_urlsafe(32)

api/models/job.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -81,8 +81,12 @@ class Job(Base):
8181
# Progress tracking
8282
progress = Column(Float, default=0.0)
8383
stage = Column(String, default="queued")
84+
current_stage = Column(String, default="queued") # Alias for compatibility
85+
status_message = Column(String, nullable=True)
8486
fps = Column(Float, nullable=True)
8587
eta_seconds = Column(Integer, nullable=True)
88+
updated_at = Column(DateTime, nullable=True)
89+
processing_stats = Column(JSON, nullable=True)
8690

8791
# Quality metrics
8892
vmaf_score = Column(Float, nullable=True)

api/services/job_service.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,8 @@ async def get_job_logs(
4343
# Job creation
4444
logs.append(f"[{job.created_at.isoformat()}] Job created: {job_id}")
4545
logs.append(f"[{job.created_at.isoformat()}] Status: QUEUED")
46-
logs.append(f"[{job.created_at.isoformat()}] Input URL: {job.input_url}")
47-
logs.append(f"[{job.created_at.isoformat()}] Operations: {len(job.operations)} operations requested")
46+
logs.append(f"[{job.created_at.isoformat()}] Input: {job.input_path}")
47+
logs.append(f"[{job.created_at.isoformat()}] Operations: {len(job.operations) if job.operations else 0} operations requested")
4848

4949
# Job parameters
5050
if job.options:
@@ -85,7 +85,7 @@ async def get_job_logs(
8585
if job.completed_at:
8686
if job.status == JobStatus.COMPLETED:
8787
logs.append(f"[{job.completed_at.isoformat()}] Status: COMPLETED")
88-
logs.append(f"[{job.completed_at.isoformat()}] Output URL: {job.output_url}")
88+
logs.append(f"[{job.completed_at.isoformat()}] Output: {job.output_path}")
8989
logs.append(f"[{job.completed_at.isoformat()}] Processing completed successfully")
9090

9191
# Calculate processing time

storage/__init__.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
"""
2+
Storage module for managing multiple storage backends.
3+
4+
Supports local filesystem, S3-compatible storage, and other backends.
5+
"""
6+
from storage.base import StorageBackend
7+
from storage.factory import create_storage_backend
8+
9+
__all__ = ["StorageBackend", "create_storage_backend"]

storage/azure.py

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
"""
2+
Azure Blob Storage backend.
3+
4+
Placeholder implementation - full implementation requires azure-storage-blob.
5+
"""
6+
from typing import Any, AsyncIterator, Dict, List, Optional, Union
7+
8+
from storage.base import StorageBackend
9+
10+
11+
class AzureStorageBackend(StorageBackend):
12+
"""Azure Blob Storage backend."""
13+
14+
def __init__(self, config: Dict[str, Any]):
15+
"""
16+
Initialize Azure storage backend.
17+
18+
Args:
19+
config: Configuration with:
20+
- container: Azure container name
21+
- connection_string: Azure connection string
22+
- account_name: Storage account name (alternative to connection_string)
23+
- account_key: Storage account key (alternative to connection_string)
24+
"""
25+
super().__init__(config)
26+
self.container = config.get("container")
27+
28+
if not self.container:
29+
raise ValueError("Azure backend requires 'container' in configuration")
30+
31+
# Check for azure-storage-blob
32+
try:
33+
from azure.storage.blob.aio import BlobServiceClient
34+
self._available = True
35+
except ImportError:
36+
self._available = False
37+
38+
async def exists(self, path: str) -> bool:
39+
"""Check if blob exists."""
40+
if not self._available:
41+
raise ImportError("Azure storage requires azure-storage-blob. Install with: pip install azure-storage-blob")
42+
raise NotImplementedError("Azure storage backend not fully implemented")
43+
44+
async def read(self, path: str) -> AsyncIterator[bytes]:
45+
"""Read blob as async iterator."""
46+
if not self._available:
47+
raise ImportError("Azure storage requires azure-storage-blob")
48+
raise NotImplementedError("Azure storage backend not fully implemented")
49+
50+
async def write(self, path: str, data: Union[bytes, AsyncIterator[bytes]]) -> int:
51+
"""Write data to blob."""
52+
if not self._available:
53+
raise ImportError("Azure storage requires azure-storage-blob")
54+
raise NotImplementedError("Azure storage backend not fully implemented")
55+
56+
async def delete(self, path: str) -> bool:
57+
"""Delete a blob."""
58+
if not self._available:
59+
raise ImportError("Azure storage requires azure-storage-blob")
60+
raise NotImplementedError("Azure storage backend not fully implemented")
61+
62+
async def list(self, path: str = "", recursive: bool = False) -> List[str]:
63+
"""List blobs in container."""
64+
if not self._available:
65+
raise ImportError("Azure storage requires azure-storage-blob")
66+
raise NotImplementedError("Azure storage backend not fully implemented")
67+
68+
async def ensure_dir(self, path: str) -> None:
69+
"""Azure doesn't need directory creation."""
70+
pass
71+
72+
async def get_status(self) -> Dict[str, Any]:
73+
"""Get backend status."""
74+
return {
75+
"name": self.name,
76+
"type": "azure",
77+
"container": self.container,
78+
"available": self._available,
79+
"implemented": False,
80+
}

storage/base.py

Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,147 @@
1+
"""
2+
Abstract base class for storage backends.
3+
"""
4+
from abc import ABC, abstractmethod
5+
from typing import Any, AsyncIterator, Dict, List, Optional, Union
6+
from pathlib import Path
7+
8+
9+
class StorageBackend(ABC):
10+
"""Abstract base class for storage backends."""
11+
12+
def __init__(self, config: Dict[str, Any]):
13+
"""
14+
Initialize storage backend.
15+
16+
Args:
17+
config: Backend configuration dictionary
18+
"""
19+
self.config = config
20+
self.name = config.get("name", "unknown")
21+
22+
@abstractmethod
23+
async def exists(self, path: str) -> bool:
24+
"""
25+
Check if a file exists.
26+
27+
Args:
28+
path: File path relative to backend root
29+
30+
Returns:
31+
True if file exists, False otherwise
32+
"""
33+
pass
34+
35+
@abstractmethod
36+
async def read(self, path: str) -> AsyncIterator[bytes]:
37+
"""
38+
Read file contents as an async iterator of chunks.
39+
40+
Args:
41+
path: File path relative to backend root
42+
43+
Yields:
44+
File content chunks as bytes
45+
"""
46+
pass
47+
48+
@abstractmethod
49+
async def write(self, path: str, data: Union[bytes, AsyncIterator[bytes]]) -> int:
50+
"""
51+
Write data to a file.
52+
53+
Args:
54+
path: File path relative to backend root
55+
data: File content as bytes or async iterator of chunks
56+
57+
Returns:
58+
Number of bytes written
59+
"""
60+
pass
61+
62+
@abstractmethod
63+
async def delete(self, path: str) -> bool:
64+
"""
65+
Delete a file.
66+
67+
Args:
68+
path: File path relative to backend root
69+
70+
Returns:
71+
True if deleted, False if not found
72+
"""
73+
pass
74+
75+
@abstractmethod
76+
async def list(self, path: str = "", recursive: bool = False) -> List[str]:
77+
"""
78+
List files in a directory.
79+
80+
Args:
81+
path: Directory path relative to backend root
82+
recursive: Whether to list recursively
83+
84+
Returns:
85+
List of file paths
86+
"""
87+
pass
88+
89+
@abstractmethod
90+
async def ensure_dir(self, path: str) -> None:
91+
"""
92+
Ensure a directory exists, creating it if necessary.
93+
94+
Args:
95+
path: Directory path relative to backend root
96+
"""
97+
pass
98+
99+
async def get_file_info(self, path: str) -> Optional[Dict[str, Any]]:
100+
"""
101+
Get file metadata.
102+
103+
Args:
104+
path: File path relative to backend root
105+
106+
Returns:
107+
Dictionary with file info or None if not found
108+
"""
109+
if not await self.exists(path):
110+
return None
111+
return {
112+
"path": path,
113+
"exists": True,
114+
}
115+
116+
async def get_size(self, path: str) -> int:
117+
"""
118+
Get file size in bytes.
119+
120+
Args:
121+
path: File path relative to backend root
122+
123+
Returns:
124+
File size in bytes
125+
"""
126+
info = await self.get_file_info(path)
127+
return info.get("size", 0) if info else 0
128+
129+
async def get_status(self) -> Dict[str, Any]:
130+
"""
131+
Get backend status.
132+
133+
Returns:
134+
Dictionary with backend status information
135+
"""
136+
return {
137+
"name": self.name,
138+
"type": self.__class__.__name__,
139+
"available": True,
140+
}
141+
142+
async def cleanup(self) -> None:
143+
"""Clean up backend resources."""
144+
pass
145+
146+
def __repr__(self) -> str:
147+
return f"<{self.__class__.__name__} name={self.name}>"

storage/factory.py

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
"""
2+
Factory for creating storage backends.
3+
"""
4+
from typing import Any, Dict
5+
6+
from storage.base import StorageBackend
7+
8+
9+
def create_storage_backend(config: Dict[str, Any]) -> StorageBackend:
10+
"""
11+
Create a storage backend from configuration.
12+
13+
Args:
14+
config: Backend configuration dictionary with at least:
15+
- type: Backend type (filesystem, s3, azure, gcs)
16+
- name: Backend name for identification
17+
18+
Returns:
19+
Configured StorageBackend instance
20+
21+
Raises:
22+
ValueError: If backend type is unknown or config is invalid
23+
"""
24+
backend_type = config.get("type", "").lower()
25+
26+
if not backend_type:
27+
raise ValueError("Backend configuration must include 'type'")
28+
29+
if backend_type in ("filesystem", "local", "file"):
30+
from storage.local import LocalStorageBackend
31+
return LocalStorageBackend(config)
32+
33+
elif backend_type in ("s3", "aws", "minio"):
34+
from storage.s3 import S3StorageBackend
35+
return S3StorageBackend(config)
36+
37+
elif backend_type in ("azure", "blob", "azure_blob"):
38+
from storage.azure import AzureStorageBackend
39+
return AzureStorageBackend(config)
40+
41+
elif backend_type in ("gcs", "google", "google_cloud"):
42+
from storage.gcs import GCSStorageBackend
43+
return GCSStorageBackend(config)
44+
45+
elif backend_type in ("nfs", "smb", "cifs", "network"):
46+
# Network storage uses local backend with network path
47+
from storage.local import LocalStorageBackend
48+
return LocalStorageBackend(config)
49+
50+
else:
51+
raise ValueError(f"Unknown storage backend type: {backend_type}")

0 commit comments

Comments
 (0)