Skip to content

Commit 865796a

Browse files
committed
Tests: WebDAV resource ops, drive create/update, validate/stats
- InternxtDAVResource: delete (real/pending/no-uuid/error), move_recursive (different folder/same folder/move+rename/pending/error), copy_move (root/subfolder/error). - InternxtDAVCollection: move_recursive (different parent/rename/uuid- resolve/error), copy_recursive (not implemented), date accessors. - drive_service.create_folder: default-to-root-uuid, explicit parent, timestamps included/omitted, parent cache update when present, no cache mutation when absent, raises when no root id. - drive_service.update_file: full encrypt+upload+replace cycle, parent cache invalidation, error wrapping. - validate_upload_sources: missing/dir-without-recursive/dir-with- recursive/oversized/mixed. - get_upload_statistics: single-file/recursive/non-recursive/empty/ nonexistent. Coverage: 76% -> 83%; webdav_provider: 70% -> 84%; drive.py: 78% -> 87%. Tests: 439 -> 480.
1 parent eca6e3a commit 865796a

5 files changed

Lines changed: 604 additions & 5 deletions

File tree

CHANGELOG.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -117,11 +117,11 @@ All Bandit medium+ findings (was 14, now 0) are resolved or annotated:
117117
| `config/config.py` | 85% |
118118
| `services/auth.py` | 100% |
119119
| `services/crypto.py` | 85% |
120-
| `services/drive.py` | 78% |
120+
| `services/drive.py` | 87% |
121121
| `services/network_utils.py` | 90% |
122-
| `services/webdav_provider.py` | 70% |
122+
| `services/webdav_provider.py` | 84% |
123123
| `services/webdav_server.py` | 58% |
124124
| `utils/api.py` | 74% |
125-
| **Total** | **76%** |
125+
| **Total** | **83%** |
126126

127-
(Total tests: **439** passing in ~3 seconds.)
127+
(Total tests: **480** passing in ~5 seconds.)

readme.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -270,7 +270,7 @@ internxt-python/
270270
│ └── webdav_server.py # WebDAV server management
271271
├── utils/
272272
│ └── api.py # HTTP API client
273-
├── tests/ # Pytest suite (~440 tests, 76% coverage)
273+
├── tests/ # Pytest suite (~480 tests, 83% coverage)
274274
├── pyproject.toml # Pytest, coverage, ruff config
275275
├── requirements-dev.txt # Dev/test dependencies
276276
└── .github/workflows/ci.yml # Lint + type-check + test on Py 3.10/3.11/3.12

tests/test_drive_create_update.py

Lines changed: 202 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,202 @@
1+
"""Tests for drive_service.create_folder (with cache-update + timestamps)
2+
and drive_service.update_file (the WebDAV PUT-on-existing path).
3+
"""
4+
from unittest.mock import patch
5+
6+
import pytest
7+
8+
from services.drive import drive_service
9+
10+
11+
@pytest.fixture(autouse=True)
12+
def _reset():
13+
drive_service.folder_content_cache.clear()
14+
drive_service._mem_reserved = 0
15+
yield
16+
drive_service.folder_content_cache.clear()
17+
drive_service._mem_reserved = 0
18+
19+
20+
@pytest.fixture
21+
def fake_creds():
22+
return {
23+
'user': {
24+
'rootFolderId': 'root-uuid',
25+
'bucket': '00' * 12,
26+
'mnemonic': ('abandon abandon abandon abandon abandon abandon '
27+
'abandon abandon abandon abandon abandon about'),
28+
'bridgeUser': 'u@example.com',
29+
'userId': 'u-42',
30+
},
31+
}
32+
33+
34+
# ---------- create_folder ----------
35+
36+
def test_create_folder_uses_root_when_no_parent_given(fake_creds):
37+
"""If parent_folder_uuid is None, must default to user's rootFolderId."""
38+
captured = {}
39+
def fake_api_create(payload):
40+
captured['payload'] = payload
41+
return {'uuid': 'new-uuid', 'plainName': payload['plainName']}
42+
43+
with patch.object(drive_service.auth, 'get_auth_details',
44+
return_value=fake_creds), \
45+
patch.object(drive_service.api, 'create_folder',
46+
side_effect=fake_api_create):
47+
result = drive_service.create_folder('NewDir')
48+
49+
assert captured['payload']['parentFolderUuid'] == 'root-uuid'
50+
assert captured['payload']['plainName'] == 'NewDir'
51+
assert result['uuid'] == 'new-uuid'
52+
53+
54+
def test_create_folder_with_explicit_parent(fake_creds):
55+
captured = {}
56+
def fake_api_create(payload):
57+
captured['payload'] = payload
58+
return {'uuid': 'new-uuid'}
59+
with patch.object(drive_service.auth, 'get_auth_details',
60+
return_value=fake_creds), \
61+
patch.object(drive_service.api, 'create_folder',
62+
side_effect=fake_api_create):
63+
drive_service.create_folder('Sub', parent_folder_uuid='custom-parent')
64+
assert captured['payload']['parentFolderUuid'] == 'custom-parent'
65+
66+
67+
def test_create_folder_includes_timestamps_when_provided(fake_creds):
68+
captured = {}
69+
def fake_api_create(payload):
70+
captured['payload'] = payload
71+
return {'uuid': 'new-uuid'}
72+
with patch.object(drive_service.auth, 'get_auth_details',
73+
return_value=fake_creds), \
74+
patch.object(drive_service.api, 'create_folder',
75+
side_effect=fake_api_create):
76+
drive_service.create_folder(
77+
'Dated', parent_folder_uuid='p',
78+
creation_time='2025-01-01T00:00:00Z',
79+
modification_time='2025-06-01T00:00:00Z',
80+
)
81+
assert captured['payload']['creationTime'] == '2025-01-01T00:00:00Z'
82+
assert captured['payload']['modificationTime'] == '2025-06-01T00:00:00Z'
83+
84+
85+
def test_create_folder_omits_timestamps_when_none(fake_creds):
86+
captured = {}
87+
def fake_api_create(payload):
88+
captured['payload'] = payload
89+
return {'uuid': 'new-uuid'}
90+
with patch.object(drive_service.auth, 'get_auth_details',
91+
return_value=fake_creds), \
92+
patch.object(drive_service.api, 'create_folder',
93+
side_effect=fake_api_create):
94+
drive_service.create_folder('Plain', parent_folder_uuid='p')
95+
assert 'creationTime' not in captured['payload']
96+
assert 'modificationTime' not in captured['payload']
97+
98+
99+
def test_create_folder_updates_parent_cache_when_present(fake_creds):
100+
"""If parent is already cached, the new folder appears in subsequent listings
101+
without a re-fetch."""
102+
drive_service.folder_content_cache['parent-uuid'] = (
103+
9999999999.0, {'folders': [], 'files': []},
104+
)
105+
106+
new_folder = {'uuid': 'new-uuid', 'plainName': 'NewDir'}
107+
with patch.object(drive_service.auth, 'get_auth_details',
108+
return_value=fake_creds), \
109+
patch.object(drive_service.api, 'create_folder',
110+
return_value=new_folder):
111+
drive_service.create_folder('NewDir', parent_folder_uuid='parent-uuid')
112+
113+
_, content = drive_service.folder_content_cache['parent-uuid']
114+
assert any(f.get('uuid') == 'new-uuid' for f in content['folders'])
115+
116+
117+
def test_create_folder_skips_cache_update_when_parent_not_cached(fake_creds):
118+
"""If parent isn't cached, no cache mutation — the next get_folder_content
119+
call will fetch fresh."""
120+
new_folder = {'uuid': 'new-uuid', 'plainName': 'NewDir'}
121+
with patch.object(drive_service.auth, 'get_auth_details',
122+
return_value=fake_creds), \
123+
patch.object(drive_service.api, 'create_folder',
124+
return_value=new_folder):
125+
drive_service.create_folder('NewDir', parent_folder_uuid='unknown-parent')
126+
# Cache stays empty (no entry was created)
127+
assert 'unknown-parent' not in drive_service.folder_content_cache
128+
129+
130+
def test_create_folder_raises_when_no_root_id():
131+
"""If creds have no rootFolderId AND no explicit parent → ValueError."""
132+
bad_creds = {'user': {}}
133+
with patch.object(drive_service.auth, 'get_auth_details',
134+
return_value=bad_creds):
135+
with pytest.raises(ValueError, match="No root folder"):
136+
drive_service.create_folder('X')
137+
138+
139+
# ---------- update_file (WebDAV PUT-on-existing) ----------
140+
141+
def test_update_file_full_cycle(tmp_path, fake_creds):
142+
"""update_file: read local file → encrypt → start upload → upload chunk →
143+
finish upload → replace_file metadata. Verify each step is called."""
144+
local = tmp_path / "doc.txt"
145+
local.write_bytes(b"updated content")
146+
147+
with patch.object(drive_service.api, 'get_file_metadata',
148+
return_value={'plainName': 'doc'}), \
149+
patch.object(drive_service.auth, 'get_auth_details',
150+
return_value=fake_creds), \
151+
patch.object(drive_service.api, 'start_upload',
152+
return_value={'uploads': [{
153+
'index': 0, 'size': 100,
154+
'url': 'https://upload', 'uuid': 'net-uuid',
155+
}]}), \
156+
patch.object(drive_service.api, 'upload_chunk') as mock_chunk, \
157+
patch.object(drive_service.api, 'finish_upload',
158+
return_value={'id': 'new-net-id'}), \
159+
patch.object(drive_service.api, 'replace_file',
160+
return_value={'success': True}) as mock_replace, \
161+
patch.object(drive_service, '_clear_parent_cache_for_item'):
162+
result = drive_service.update_file('file-uuid', str(local))
163+
164+
assert result['success'] is True
165+
mock_chunk.assert_called_once()
166+
# replace_file gets the new network file id and the local file size
167+
args, _ = mock_replace.call_args
168+
assert args[0] == 'file-uuid'
169+
assert args[1]['fileId'] == 'new-net-id'
170+
assert args[1]['size'] == len(b"updated content")
171+
172+
173+
def test_update_file_clears_parent_cache(tmp_path, fake_creds):
174+
"""After the update, parent cache must be invalidated so listings refresh."""
175+
local = tmp_path / "doc.txt"
176+
local.write_bytes(b"x")
177+
178+
with patch.object(drive_service.api, 'get_file_metadata',
179+
return_value={'plainName': 'doc'}), \
180+
patch.object(drive_service.auth, 'get_auth_details',
181+
return_value=fake_creds), \
182+
patch.object(drive_service.api, 'start_upload',
183+
return_value={'uploads': [{
184+
'index': 0, 'size': 1,
185+
'url': 'u', 'uuid': 'n',
186+
}]}), \
187+
patch.object(drive_service.api, 'upload_chunk'), \
188+
patch.object(drive_service.api, 'finish_upload',
189+
return_value={'id': 'nid'}), \
190+
patch.object(drive_service.api, 'replace_file',
191+
return_value={}), \
192+
patch.object(drive_service, '_clear_parent_cache_for_item') as mock_clear:
193+
drive_service.update_file('file-uuid', str(local))
194+
195+
mock_clear.assert_called_once_with('file-uuid', 'file')
196+
197+
198+
def test_update_file_wraps_errors():
199+
with patch.object(drive_service.api, 'get_file_metadata',
200+
side_effect=ConnectionError("net")):
201+
with pytest.raises(Exception, match="Failed to update file"):
202+
drive_service.update_file('file-uuid', '/tmp/nope')

tests/test_drive_validate_stats.py

Lines changed: 134 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,134 @@
1+
"""Tests for drive_service validation/statistics helpers:
2+
validate_upload_sources, get_upload_statistics.
3+
"""
4+
from pathlib import Path
5+
6+
7+
from services.drive import drive_service
8+
9+
10+
# ---------- validate_upload_sources ----------
11+
12+
def test_validate_returns_valid_paths_for_real_files(tmp_path):
13+
a = tmp_path / "a.txt"
14+
a.write_bytes(b"hello")
15+
b = tmp_path / "b.txt"
16+
b.write_bytes(b"world")
17+
valid, errors = drive_service.validate_upload_sources([str(a), str(b)])
18+
assert len(valid) == 2
19+
assert errors == []
20+
21+
22+
def test_validate_reports_missing_source(tmp_path):
23+
valid, errors = drive_service.validate_upload_sources([str(tmp_path / "ghost.txt")])
24+
assert valid == []
25+
assert any("not found" in e.lower() for e in errors)
26+
27+
28+
def test_validate_rejects_directory_without_recursive(tmp_path):
29+
sub = tmp_path / "subdir"
30+
sub.mkdir()
31+
valid, errors = drive_service.validate_upload_sources([str(sub)], recursive=False)
32+
assert valid == []
33+
assert any("recursive" in e.lower() for e in errors)
34+
35+
36+
def test_validate_accepts_directory_with_recursive(tmp_path):
37+
sub = tmp_path / "subdir"
38+
sub.mkdir()
39+
valid, errors = drive_service.validate_upload_sources([str(sub)], recursive=True)
40+
assert len(valid) == 1
41+
assert errors == []
42+
43+
44+
def test_validate_skips_oversized_files(tmp_path, monkeypatch):
45+
"""Files over the 20 GB limit are reported as errors, not in valid list."""
46+
big = tmp_path / "big.bin"
47+
big.write_bytes(b"x") # actually small
48+
49+
# Patch stat to claim huge size
50+
real_stat = Path.stat
51+
def fake_stat(self, *a, **k):
52+
result = real_stat(self, *a, **k)
53+
if self == big:
54+
# Return a stat result with the file's existing fields but huge size.
55+
class Fake:
56+
def __init__(self, real, size):
57+
for f in dir(real):
58+
if f.startswith('st_'):
59+
setattr(self, f, getattr(real, f))
60+
self.st_size = size
61+
return Fake(result, drive_service.TWENTY_GIGABYTES + 1)
62+
return result
63+
monkeypatch.setattr(Path, 'stat', fake_stat)
64+
65+
valid, errors = drive_service.validate_upload_sources([str(big)])
66+
assert valid == []
67+
assert any("too large" in e.lower() for e in errors)
68+
69+
70+
def test_validate_mixes_valid_and_invalid(tmp_path):
71+
good = tmp_path / "good.txt"
72+
good.write_bytes(b"x")
73+
valid, errors = drive_service.validate_upload_sources(
74+
[str(good), str(tmp_path / "missing.txt")])
75+
assert len(valid) == 1
76+
assert len(errors) == 1
77+
78+
79+
# ---------- get_upload_statistics ----------
80+
81+
def test_stats_for_single_file(tmp_path):
82+
f = tmp_path / "doc.txt"
83+
f.write_bytes(b"hello world")
84+
stats = drive_service.get_upload_statistics(f)
85+
assert stats['total_files'] == 1
86+
assert stats['total_size'] == 11
87+
assert stats['total_dirs'] == 0
88+
assert stats['file_list'] == [f]
89+
90+
91+
def test_stats_for_directory_recursive(tmp_path):
92+
"""Recursive walk counts files at every depth + intermediate dirs."""
93+
(tmp_path / "a.txt").write_bytes(b"a" * 100)
94+
sub = tmp_path / "sub"
95+
sub.mkdir()
96+
(sub / "b.txt").write_bytes(b"b" * 200)
97+
deeper = sub / "deeper"
98+
deeper.mkdir()
99+
(deeper / "c.txt").write_bytes(b"c" * 300)
100+
101+
stats = drive_service.get_upload_statistics(tmp_path, recursive=True)
102+
assert stats['total_files'] == 3
103+
assert stats['total_size'] == 600
104+
assert stats['total_dirs'] == 2 # 'sub' and 'deeper'
105+
106+
107+
def test_stats_for_directory_non_recursive_only_top_level(tmp_path):
108+
"""Without recursive, only direct children are counted."""
109+
(tmp_path / "top.txt").write_bytes(b"t" * 50)
110+
sub = tmp_path / "sub"
111+
sub.mkdir()
112+
(sub / "deep.txt").write_bytes(b"d" * 100)
113+
114+
stats = drive_service.get_upload_statistics(tmp_path, recursive=False)
115+
assert stats['total_files'] == 1 # 'top.txt' only
116+
assert stats['total_dirs'] == 1
117+
assert stats['total_size'] == 50
118+
119+
120+
def test_stats_for_empty_directory(tmp_path):
121+
sub = tmp_path / "empty"
122+
sub.mkdir()
123+
stats = drive_service.get_upload_statistics(sub, recursive=True)
124+
assert stats['total_files'] == 0
125+
assert stats['total_size'] == 0
126+
assert stats['total_dirs'] == 0
127+
assert stats['file_list'] == []
128+
129+
130+
def test_stats_for_nonexistent_path_is_empty(tmp_path):
131+
"""Non-file, non-dir input yields empty stats (no crash)."""
132+
stats = drive_service.get_upload_statistics(tmp_path / "missing")
133+
assert stats['total_files'] == 0
134+
assert stats['total_size'] == 0

0 commit comments

Comments
 (0)