Skip to content

Commit d4d5fac

Browse files
committed
refactor: enforce single GuestCounter, harden encapsulation
- Add counter_taken AtomicBool to ExclusiveSharedMemory; guest_counter() returns an error on a second call instead of silently creating a duplicate with divergent cached state. - Initialize value to 0 instead of reading from the offset — scratch memory is always zero at that point. - Revert HostSharedMemory fields to private and add a #[cfg(test)] simulate_build() method so tests don't leak shm internals. - Add only_one_counter_allowed test. Signed-off-by: danbugs <danilochiarlone@gmail.com>
1 parent 4d829b2 commit d4d5fac

2 files changed

Lines changed: 68 additions & 30 deletions

File tree

src/hyperlight_host/src/mem/shared_mem.rs

Lines changed: 37 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ use std::io::Error;
2020
use std::mem::{align_of, size_of};
2121
#[cfg(target_os = "linux")]
2222
use std::ptr::null_mut;
23+
#[cfg(feature = "nanvix-unstable")]
24+
use std::sync::atomic::AtomicBool;
2325
use std::sync::{Arc, Mutex, RwLock};
2426

2527
use hyperlight_common::mem::PAGE_SIZE_USIZE;
@@ -142,6 +144,11 @@ pub struct ExclusiveSharedMemory {
142144
/// before `build()` (e.g. `GuestCounter`) can clone this `Arc` and
143145
/// will see `Some` once `build()` completes.
144146
pub(crate) deferred_hshm: Arc<Mutex<Option<HostSharedMemory>>>,
147+
/// Set to `true` once a [`GuestCounter`] has been handed out via
148+
/// [`UninitializedSandbox::guest_counter()`]. Prevents creating
149+
/// multiple counters that would have divergent cached values.
150+
#[cfg(feature = "nanvix-unstable")]
151+
pub(crate) counter_taken: AtomicBool,
145152
}
146153
unsafe impl Send for ExclusiveSharedMemory {}
147154

@@ -326,8 +333,8 @@ unsafe impl Send for GuestSharedMemory {}
326333
/// becoming completely divorced from the view of the VM.
327334
#[derive(Clone, Debug)]
328335
pub struct HostSharedMemory {
329-
pub(crate) region: Arc<HostMapping>,
330-
pub(crate) lock: Arc<RwLock<()>>,
336+
region: Arc<HostMapping>,
337+
lock: Arc<RwLock<()>>,
331338
}
332339
unsafe impl Send for HostSharedMemory {}
333340

@@ -430,6 +437,8 @@ impl ExclusiveSharedMemory {
430437
size: total_size,
431438
}),
432439
deferred_hshm: Arc::new(Mutex::new(None)),
440+
#[cfg(feature = "nanvix-unstable")]
441+
counter_taken: AtomicBool::new(false),
433442
})
434443
}
435444

@@ -549,6 +558,8 @@ impl ExclusiveSharedMemory {
549558
handle,
550559
}),
551560
deferred_hshm: Arc::new(Mutex::new(None)),
561+
#[cfg(feature = "nanvix-unstable")]
562+
counter_taken: AtomicBool::new(false),
552563
})
553564
}
554565

@@ -661,7 +672,13 @@ impl ExclusiveSharedMemory {
661672
};
662673
// Publish the HostSharedMemory so any pre-existing GuestCounter
663674
// can begin issuing volatile writes via the proper protocol.
664-
*self.deferred_hshm.lock().unwrap() = Some(hshm.clone());
675+
// The mutex is only locked here and during GuestCounter
676+
// operations; since build() consumes self, there is no
677+
// concurrent access that could poison it.
678+
#[allow(clippy::unwrap_used)]
679+
{
680+
*self.deferred_hshm.lock().unwrap() = Some(hshm.clone());
681+
}
665682
(
666683
hshm,
667684
GuestSharedMemory {
@@ -671,6 +688,19 @@ impl ExclusiveSharedMemory {
671688
)
672689
}
673690

691+
/// Populate the deferred `HostSharedMemory` slot without consuming
692+
/// `self`. Used in tests where `evolve()` / full `build()` is not
693+
/// available.
694+
#[cfg(all(test, feature = "nanvix-unstable"))]
695+
pub(crate) fn simulate_build(&self) {
696+
let lock = Arc::new(RwLock::new(()));
697+
let hshm = HostSharedMemory {
698+
region: self.region.clone(),
699+
lock,
700+
};
701+
*self.deferred_hshm.lock().unwrap() = Some(hshm);
702+
}
703+
674704
/// Gets the file handle of the shared memory region for this Sandbox
675705
#[cfg(target_os = "windows")]
676706
pub fn get_mmap_file_handle(&self) -> HANDLE {
@@ -859,6 +889,8 @@ impl SharedMemory for GuestSharedMemory {
859889
let mut excl = ExclusiveSharedMemory {
860890
region: self.region.clone(),
861891
deferred_hshm: Arc::new(Mutex::new(None)),
892+
#[cfg(feature = "nanvix-unstable")]
893+
counter_taken: AtomicBool::new(false),
862894
};
863895
let ret = f(&mut excl);
864896
drop(excl);
@@ -1225,6 +1257,8 @@ impl SharedMemory for HostSharedMemory {
12251257
let mut excl = ExclusiveSharedMemory {
12261258
region: self.region.clone(),
12271259
deferred_hshm: Arc::new(Mutex::new(None)),
1260+
#[cfg(feature = "nanvix-unstable")]
1261+
counter_taken: AtomicBool::new(false),
12281262
};
12291263
let ret = f(&mut excl);
12301264
drop(excl);

src/hyperlight_host/src/sandbox/uninitialized.rs

Lines changed: 31 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -76,8 +76,8 @@ pub(crate) struct SandboxRuntimeConfig {
7676
/// avoids adding lock overhead to `ExclusiveSharedMemory` during the
7777
/// exclusive setup phase.
7878
///
79-
/// The constructor takes `&mut self` on `UninitializedSandbox`, which
80-
/// prevents creating multiple instances simultaneously.
79+
/// Only one `GuestCounter` may be created per sandbox; a second call to
80+
/// [`UninitializedSandbox::guest_counter()`] returns an error.
8181
#[cfg(feature = "nanvix-unstable")]
8282
pub struct GuestCounter {
8383
inner: Mutex<GuestCounterInner>,
@@ -272,13 +272,25 @@ impl UninitializedSandbox {
272272
/// [`HostSharedMemory`] once [`build()`](ExclusiveSharedMemory::build)
273273
/// is called during [`evolve()`](Self::evolve).
274274
///
275-
/// Takes `&mut self` to prevent creating multiple instances
276-
/// simultaneously (multiple instances would have divergent cached
277-
/// values).
275+
/// This method can only be called once; a second call returns an error
276+
/// because multiple counters would have divergent cached values.
278277
#[cfg(feature = "nanvix-unstable")]
279278
pub fn guest_counter(&mut self) -> Result<GuestCounter> {
279+
use std::sync::atomic::Ordering;
280+
280281
use hyperlight_common::layout::SCRATCH_TOP_GUEST_COUNTER_OFFSET;
281282

283+
if self
284+
.mgr
285+
.scratch_mem
286+
.counter_taken
287+
.swap(true, Ordering::Relaxed)
288+
{
289+
return Err(new_error!(
290+
"GuestCounter has already been created for this sandbox"
291+
));
292+
}
293+
282294
let scratch_size = self.mgr.scratch_mem.mem_size();
283295
if (SCRATCH_TOP_GUEST_COUNTER_OFFSET as usize) > scratch_size {
284296
return Err(new_error!(
@@ -290,13 +302,12 @@ impl UninitializedSandbox {
290302

291303
let offset = scratch_size - SCRATCH_TOP_GUEST_COUNTER_OFFSET as usize;
292304
let deferred_hshm = self.mgr.scratch_mem.deferred_hshm.clone();
293-
let value = self.mgr.scratch_mem.read_u64(offset)?;
294305

295306
Ok(GuestCounter {
296307
inner: Mutex::new(GuestCounterInner {
297308
deferred_hshm,
298309
offset,
299-
value,
310+
value: 0,
300311
}),
301312
})
302313
}
@@ -1449,12 +1460,9 @@ mod tests {
14491460

14501461
#[cfg(feature = "nanvix-unstable")]
14511462
mod guest_counter_tests {
1452-
use std::sync::{Arc, RwLock};
1453-
14541463
use hyperlight_testing::simple_guest_as_string;
14551464

14561465
use crate::UninitializedSandbox;
1457-
use crate::mem::shared_mem::HostSharedMemory;
14581466
use crate::sandbox::uninitialized::GuestBinary;
14591467

14601468
fn make_sandbox() -> UninitializedSandbox {
@@ -1465,24 +1473,21 @@ mod tests {
14651473
.expect("Failed to create sandbox")
14661474
}
14671475

1468-
/// Simulate `build()` for the scratch region by storing a
1469-
/// `HostSharedMemory` into the deferred slot.
1470-
fn simulate_build(sandbox: &UninitializedSandbox) {
1471-
let lock = Arc::new(RwLock::new(()));
1472-
let hshm = HostSharedMemory {
1473-
region: sandbox.mgr.scratch_mem.region.clone(),
1474-
lock,
1475-
};
1476-
*sandbox.mgr.scratch_mem.deferred_hshm.lock().unwrap() = Some(hshm);
1477-
}
1478-
14791476
#[test]
14801477
fn create_guest_counter() {
14811478
let mut sandbox = make_sandbox();
14821479
let counter = sandbox.guest_counter();
14831480
assert!(counter.is_ok());
14841481
}
14851482

1483+
#[test]
1484+
fn only_one_counter_allowed() {
1485+
let mut sandbox = make_sandbox();
1486+
let _c1 = sandbox.guest_counter().unwrap();
1487+
let c2 = sandbox.guest_counter();
1488+
assert!(c2.is_err());
1489+
}
1490+
14861491
#[test]
14871492
fn fails_before_build() {
14881493
let mut sandbox = make_sandbox();
@@ -1495,22 +1500,21 @@ mod tests {
14951500
fn increment_decrement() {
14961501
let mut sandbox = make_sandbox();
14971502
let counter = sandbox.guest_counter().unwrap();
1498-
simulate_build(&sandbox);
1503+
sandbox.mgr.scratch_mem.simulate_build();
14991504

1500-
let initial = counter.value().unwrap();
15011505
counter.increment().unwrap();
1502-
assert_eq!(counter.value().unwrap(), initial + 1);
1506+
assert_eq!(counter.value().unwrap(), 1);
15031507
counter.increment().unwrap();
1504-
assert_eq!(counter.value().unwrap(), initial + 2);
1508+
assert_eq!(counter.value().unwrap(), 2);
15051509
counter.decrement().unwrap();
1506-
assert_eq!(counter.value().unwrap(), initial + 1);
1510+
assert_eq!(counter.value().unwrap(), 1);
15071511
}
15081512

15091513
#[test]
15101514
fn underflow_returns_error() {
15111515
let mut sandbox = make_sandbox();
15121516
let counter = sandbox.guest_counter().unwrap();
1513-
simulate_build(&sandbox);
1517+
sandbox.mgr.scratch_mem.simulate_build();
15141518

15151519
assert_eq!(counter.value().unwrap(), 0);
15161520
let result = counter.decrement();

0 commit comments

Comments
 (0)