Skip to content

Commit 9e7af44

Browse files
Merge pull request #8 from SolidLabResearch/codex/mvp-audit-fixes
[codex] align mvp dashboard with backend for main update
2 parents 22da443 + 2a29763 commit 9e7af44

105 files changed

Lines changed: 20595 additions & 232 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.gitignore

Lines changed: 33 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,52 +1,40 @@
1-
# Rust build artifacts
2-
target/
1+
# Rust
2+
/target/
33
Cargo.lock
4-
5-
# Debug symbols
4+
**/*.rs.bk
65
*.pdb
76

8-
# Backup files
9-
*~
10-
*.swp
11-
*.swo
12-
*.swn
7+
# Test data and logs
8+
test_data/
9+
server.log
10+
docker/mosquitto/log/
11+
docker/mosquitto/data/
12+
data/
13+
14+
# Python
15+
*.pyc
16+
__pycache__/
17+
*.py[cod]
18+
*$py.class
19+
20+
# Dashboard build artifacts
21+
janus-dashboard/dist/
22+
janus-dashboard/node_modules/
23+
janus-dashboard/.vscode/
24+
25+
# macOS
1326
.DS_Store
27+
.AppleDouble
28+
.LSOverride
1429

15-
# IDE and editor directories
16-
.idea/
30+
# Editor directories
1731
.vscode/
18-
*.iml
19-
.zed/
20-
21-
# Environment files
22-
.env
23-
.env.local
24-
.env.*.local
25-
26-
# Test coverage
27-
*.profraw
28-
*.profdata
29-
coverage/
30-
tarpaulin-report.html
31-
32-
# Documentation build
33-
target/doc/
34-
35-
# Temporary files
36-
tmp/
37-
temp/
38-
39-
# OS specific
40-
.DS_Store
41-
Thumbs.db
42-
43-
# RDF Store data
44-
fuseki-config/databases/
45-
46-
# Docker volumes
47-
*.db
48-
*.db-shm
49-
*.db-wal
32+
.idea/
33+
*.swp
34+
*.swo
35+
*~
5036

51-
# Data for the Benchmarking
52-
/data/
37+
# Temporary debug/test files
38+
debug_*.py
39+
tests/reproduction_test.rs
40+
tests/user_query_repro.rs

.zed/settings.json

Lines changed: 0 additions & 10 deletions
This file was deleted.

Cargo.toml

Lines changed: 33 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,13 +14,18 @@ readme = "README.md"
1414
regex = "1.0"
1515
serde = { version = "1.0", features = ["derive"] }
1616
bincode = "1.0"
17-
rsp-rs = "0.2.1"
17+
rsp-rs = "0.3.5"
1818
oxigraph = "0.5"
19-
rumqttc = "0.25.1"
19+
rumqttc = { version = "0.25.1", default-features = false }
2020
serde_json = "1.0.145"
21-
22-
[target.'cfg(not(windows))'.dependencies]
23-
rdkafka = "0.38.0"
21+
tokio = { version = "1.48.0", features = ["full"] }
22+
ctrlc = "3.5.1"
23+
clap = { version = "4.5", features = ["derive"] }
24+
axum = { version = "0.7", features = ["ws"] }
25+
tower-http = { version = "0.5", features = ["cors", "trace"] }
26+
tokio-tungstenite = "0.21"
27+
reqwest = { version = "0.11", features = ["json"] }
28+
futures-util = "0.3"
2429

2530
[lib]
2631
name = "janus"
@@ -30,6 +35,29 @@ path = "src/lib.rs"
3035
name = "janus"
3136
path = "src/main.rs"
3237

38+
[[bin]]
39+
name = "http_server"
40+
path = "src/bin/http_server.rs"
41+
42+
[dev-dependencies]
43+
criterion = { version = "0.5", features = ["html_reports"] }
44+
45+
[[bench]]
46+
name = "storage_write"
47+
harness = false
48+
49+
[[bench]]
50+
name = "historical_fixed"
51+
harness = false
52+
53+
[[bench]]
54+
name = "historical_sliding"
55+
harness = false
56+
57+
[[bench]]
58+
name = "live_injection"
59+
harness = false
60+
3361
[profile.release]
3462
opt-level = 3
3563
lto = true

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ check: fmt-check lint ## Run all checks (formatting and linting)
6161

6262
ci-check: ## Run full CI/CD checks locally before pushing
6363
@echo "$(BLUE)Running CI/CD checks...$(NC)"
64-
@./ci-check.sh
64+
@./scripts/ci-check.sh
6565

6666
clean: ## Clean build artifacts
6767
@echo "$(BLUE)Cleaning build artifacts...$(NC)"

README.md

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,8 +51,7 @@ Before pushing to the repository, run the CI/CD checks locally:
5151
make ci-check
5252

5353
# Or use the script directly
54-
./ci-check.sh
55-
```
54+
./scripts/ci-check.sh```
5655
5756
This will run:
5857
- **rustfmt** - Code formatting check

START_HERE.md

Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
# Janus HTTP API - START HERE
2+
3+
## Quick Start (30 seconds)
4+
5+
```bash
6+
# 1. Setup (one time)
7+
./scripts/test_setup.sh
8+
# 2. Start MQTT
9+
docker-compose up -d mosquitto
10+
11+
# 3. Start Server
12+
cargo run --bin http_server
13+
14+
# 4. Open Dashboard
15+
open examples/demo_dashboard.html
16+
```
17+
18+
Then click: **Start Replay****Start Query**
19+
20+
## What This Does
21+
22+
1. **Start Replay**: Loads RDF data from `data/sensors.nq`, publishes to MQTT, stores locally
23+
2. **Start Query**: Executes a JanusQL query, streams results via WebSocket to dashboard
24+
25+
## Documentation
26+
27+
- **QUICK_REFERENCE.md** - One-page cheat sheet
28+
- **RUNTIME_FIX_SUMMARY.md** - How the runtime issue was fixed
29+
- **COMPLETE_SOLUTION.md** - Full implementation details
30+
- **SETUP_GUIDE.md** - Detailed setup instructions
31+
- **README_HTTP_API.md** - Complete API documentation
32+
- **FINAL_TEST.md** - Verification steps
33+
34+
## Key Points
35+
36+
**No more runtime panics** - Fixed by spawning StreamBus in separate thread
37+
**Correct JanusQL syntax** - All examples updated to match parser
38+
**MQTT integration** - Full broker setup with Docker Compose
39+
**Two-button demo** - Interactive dashboard for easy testing
40+
**Production-ready** - Stable, tested, documented
41+
42+
⚠️ **Known limitation**: Replay metrics show status but not event counts (acceptable trade-off)
43+
44+
## Troubleshooting
45+
46+
```bash
47+
# Server won't start (port in use)
48+
lsof -ti:8080 | xargs kill -9
49+
50+
# MQTT not running
51+
docker-compose up -d mosquitto
52+
53+
# Check if working
54+
curl http://localhost:8080/health
55+
```
56+
57+
## Success Indicators
58+
59+
When everything works correctly:
60+
1. Server starts with clean output (no panics)
61+
2. Dashboard shows "Connected to Janus HTTP API server"
62+
3. Replay button → Status changes to "Running"
63+
4. Query button → WebSocket connects, results appear
64+
5. Results tagged as "historical" or "live"
65+
66+
## Need Help?
67+
68+
1. Read **QUICK_REFERENCE.md** for common commands
69+
2. Check **FINAL_TEST.md** for verification steps
70+
3. See **RUNTIME_FIX_SUMMARY.md** if you see panics
71+
4. Review **SETUP_GUIDE.md** for detailed instructions
72+
73+
---
74+
75+
**Everything is ready. Just run the Quick Start commands above!** 🚀

benches/historical_fixed.rs

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
2+
use janus::{
3+
execution::historical_executor::HistoricalExecutor,
4+
parsing::janusql_parser::{WindowDefinition, WindowType},
5+
querying::oxigraph_adapter::OxigraphAdapter,
6+
storage::{segmented_storage::StreamingSegmentedStorage, util::StreamingConfig},
7+
};
8+
use std::sync::{
9+
atomic::{AtomicU64, Ordering},
10+
Arc,
11+
};
12+
use std::time::{SystemTime, UNIX_EPOCH};
13+
14+
static COUNTER: AtomicU64 = AtomicU64::new(0);
15+
16+
fn unique_config() -> StreamingConfig {
17+
let id = COUNTER.fetch_add(1, Ordering::Relaxed);
18+
let ts = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_nanos();
19+
StreamingConfig {
20+
segment_base_path: format!("/tmp/janus_bench_fixed_{}_{}", ts, id),
21+
max_batch_events: 1_000_000,
22+
max_batch_age_seconds: 3600,
23+
max_batch_bytes: 1_000_000_000,
24+
sparse_interval: 64,
25+
entries_per_index_block: 256,
26+
}
27+
}
28+
29+
/// Write N events at timestamps [1000, 1000+N) into a fresh storage.
30+
/// These land in the in-memory batch buffer — no flush needed before querying.
31+
fn setup(n: usize) -> (Arc<StreamingSegmentedStorage>, WindowDefinition) {
32+
let storage = StreamingSegmentedStorage::new(unique_config()).unwrap();
33+
for i in 0..n as u64 {
34+
storage
35+
.write_rdf(
36+
1_000 + i,
37+
&format!("http://example.org/sensor{}", i % 5),
38+
"http://saref.etsi.org/core/hasValue",
39+
&format!("{}", 20 + (i % 10)),
40+
"http://example.org/graph",
41+
)
42+
.unwrap();
43+
}
44+
let window = WindowDefinition {
45+
window_name: "w".to_string(),
46+
stream_name: "http://example.org/stream".to_string(),
47+
width: n as u64,
48+
slide: n as u64,
49+
offset: None,
50+
start: Some(1_000),
51+
end: Some(1_000 + n as u64 - 1),
52+
window_type: WindowType::HistoricalFixed,
53+
};
54+
(Arc::new(storage), window)
55+
}
56+
57+
const SPARQL: &str = "SELECT ?s ?p ?o WHERE { ?s ?p ?o }";
58+
59+
fn historical_fixed(c: &mut Criterion) {
60+
let mut group = c.benchmark_group("historical/fixed_window");
61+
62+
for &n in &[100usize, 1_000, 10_000] {
63+
group.bench_with_input(BenchmarkId::new("events", n), &n, |b, &n| {
64+
b.iter_batched(
65+
|| setup(n),
66+
|(storage, window)| {
67+
let executor = HistoricalExecutor::new(storage, OxigraphAdapter::new());
68+
black_box(executor.execute_fixed_window(&window, SPARQL).unwrap())
69+
},
70+
criterion::BatchSize::SmallInput,
71+
);
72+
});
73+
}
74+
75+
group.finish();
76+
}
77+
78+
criterion_group!(benches, historical_fixed);
79+
criterion_main!(benches);

0 commit comments

Comments
 (0)