Skip to content

Commit 076894c

Browse files
committed
Add TLBI (TLB Invalidate) instruction wrappers
Add safe wrappers for AArch64 TLBI system instructions at EL1, following the existing pattern established by barrier.rs. Covered instructions: - VMALLE1 / VMALLE1IS — invalidate all EL1&0 TLB entries - VAE1 / VAE1IS — invalidate by VA (with ASID) - VALE1 / VALE1IS — invalidate by VA, last level only - VAAE1 / VAAE1IS — invalidate by VA, all ASIDs - VAALE1 / VAALE1IS — invalidate by VA, all ASIDs, last level - ASIDE1 / ASIDE1IS — invalidate by ASID Each wrapper includes documentation with operand encoding details per Arm ARM §C5.5 / §D8.10. Signed-off-by: ZhiHong Niu <z.h.niu@outlook.com> Signed-off-by: Niu Zhihong <zhihong@nzhnb.com>
1 parent 5e4403c commit 076894c

File tree

2 files changed

+312
-0
lines changed

2 files changed

+312
-0
lines changed

src/asm.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
1111
pub mod barrier;
1212
pub mod random;
13+
pub mod tlbi;
1314

1415
/// The classic no-op
1516
#[inline(always)]

src/asm/tlbi.rs

Lines changed: 311 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,311 @@
1+
// SPDX-License-Identifier: Apache-2.0 OR MIT
2+
//
3+
// Copyright (c) 2018-2026 by the author(s)
4+
//
5+
// Author(s):
6+
// - ZhiHong Niu <zhihong@nzhnb.com>
7+
8+
//! TLB Invalidate (TLBI) instructions.
9+
//!
10+
//! Provides Rust wrappers around AArch64 TLBI system instructions for
11+
//! invalidating TLB entries at EL1.
12+
//!
13+
//! # Operand Encoding
14+
//!
15+
//! For address-based TLBI operations (`vae1`, `vale1`, `vaae1`, `vaale1`),
16+
//! the `u64` operand is encoded as specified in the Arm ARM:
17+
//!
18+
//! - Bits \[55:48\]: ASID (for non-"AA" variants)
19+
//! - Bits \[43:0\]: VA\[55:12\] (the virtual address shifted right by 12)
20+
//!
21+
//! It is the caller's responsibility to construct the operand correctly.
22+
//!
23+
//! # Synchronization
24+
//!
25+
//! TLBI instructions must be surrounded by appropriate barrier instructions
26+
//! (DSB before and DSB+ISB after) to ensure the invalidation takes effect.
27+
//! See [`barrier`](super::barrier).
28+
//!
29+
//! # References
30+
//!
31+
//! - [Arm ARM §C5.5 - A64 system instructions for TLB maintenance](https://developer.arm.com/documentation/ddi0487/latest)
32+
//! - [Arm ARM §D8.10 - TLB maintenance instructions](https://developer.arm.com/documentation/ddi0487/latest)
33+
//!
34+
//! # Example
35+
//!
36+
//! ```no_run
37+
//! use aarch64_cpu::asm::{barrier, tlbi};
38+
//!
39+
//! // Invalidate all EL1&0 regime TLB entries.
40+
//! // A DSB before ensures prior stores to page tables are visible;
41+
//! // a DSB+ISB after ensures the invalidation completes before
42+
//! // subsequent memory accesses.
43+
//! barrier::dsb(barrier::SY);
44+
//! tlbi::vmalle1();
45+
//! barrier::dsb(barrier::SY);
46+
//! barrier::isb(barrier::SY);
47+
//!
48+
//! // Invalidate by VA: encode operand as VA[55:12] | (ASID << 48).
49+
//! let vaddr: u64 = 0x8000_0000;
50+
//! let asid: u64 = 1;
51+
//! let operand = (vaddr >> 12) | (asid << 48);
52+
//! barrier::dsb(barrier::SY);
53+
//! tlbi::vae1(operand);
54+
//! barrier::dsb(barrier::SY);
55+
//! barrier::isb(barrier::SY);
56+
//! ```
57+
58+
/// Invalidate all EL1&0 regime TLB entries in the current VMID.
59+
///
60+
/// Executes `TLBI VMALLE1`.
61+
#[inline(always)]
62+
pub fn vmalle1() {
63+
match () {
64+
#[cfg(target_arch = "aarch64")]
65+
() => unsafe { core::arch::asm!("TLBI VMALLE1", options(nostack)) },
66+
67+
#[cfg(not(target_arch = "aarch64"))]
68+
() => unimplemented!(),
69+
}
70+
}
71+
72+
/// Invalidate all EL1&0 regime TLB entries in the current VMID
73+
/// on all PEs in the same Inner Shareable domain.
74+
///
75+
/// Executes `TLBI VMALLE1IS`.
76+
#[inline(always)]
77+
pub fn vmalle1is() {
78+
match () {
79+
#[cfg(target_arch = "aarch64")]
80+
() => unsafe { core::arch::asm!("TLBI VMALLE1IS", options(nostack)) },
81+
82+
#[cfg(not(target_arch = "aarch64"))]
83+
() => unimplemented!(),
84+
}
85+
}
86+
87+
/// Invalidate TLB entries by VA, EL1&0, current VMID.
88+
///
89+
/// Executes `TLBI VAE1, <Xt>`.
90+
///
91+
/// # Operand Encoding
92+
///
93+
/// - Bits \[55:48\]: ASID
94+
/// - Bits \[43:0\]: VA\[55:12\]
95+
#[inline(always)]
96+
pub fn vae1(val: u64) {
97+
match () {
98+
#[cfg(target_arch = "aarch64")]
99+
() => unsafe { core::arch::asm!("TLBI VAE1, {v}", v = in(reg) val, options(nostack)) },
100+
101+
#[cfg(not(target_arch = "aarch64"))]
102+
() => {
103+
let _ = val;
104+
unimplemented!()
105+
}
106+
}
107+
}
108+
109+
/// Invalidate TLB entries by VA, EL1&0, current VMID,
110+
/// Inner Shareable.
111+
///
112+
/// Executes `TLBI VAE1IS, <Xt>`.
113+
///
114+
/// # Operand Encoding
115+
///
116+
/// - Bits \[55:48\]: ASID
117+
/// - Bits \[43:0\]: VA\[55:12\]
118+
#[inline(always)]
119+
pub fn vae1is(val: u64) {
120+
match () {
121+
#[cfg(target_arch = "aarch64")]
122+
() => unsafe { core::arch::asm!("TLBI VAE1IS, {v}", v = in(reg) val, options(nostack)) },
123+
124+
#[cfg(not(target_arch = "aarch64"))]
125+
() => {
126+
let _ = val;
127+
unimplemented!()
128+
}
129+
}
130+
}
131+
132+
/// Invalidate TLB entries by VA, last level, EL1&0, current VMID.
133+
///
134+
/// Executes `TLBI VALE1, <Xt>`.
135+
///
136+
/// Only invalidates entries from the last level of the translation
137+
/// table walk (leaf entries), which can be more efficient than `vae1`
138+
/// when intermediate entries are known to be unaffected.
139+
///
140+
/// # Operand Encoding
141+
///
142+
/// - Bits \[55:48\]: ASID
143+
/// - Bits \[43:0\]: VA\[55:12\]
144+
#[inline(always)]
145+
pub fn vale1(val: u64) {
146+
match () {
147+
#[cfg(target_arch = "aarch64")]
148+
() => unsafe { core::arch::asm!("TLBI VALE1, {v}", v = in(reg) val, options(nostack)) },
149+
150+
#[cfg(not(target_arch = "aarch64"))]
151+
() => {
152+
let _ = val;
153+
unimplemented!()
154+
}
155+
}
156+
}
157+
158+
/// Invalidate TLB entries by VA, last level, EL1&0, current VMID,
159+
/// Inner Shareable.
160+
///
161+
/// Executes `TLBI VALE1IS, <Xt>`.
162+
///
163+
/// # Operand Encoding
164+
///
165+
/// - Bits \[55:48\]: ASID
166+
/// - Bits \[43:0\]: VA\[55:12\]
167+
#[inline(always)]
168+
pub fn vale1is(val: u64) {
169+
match () {
170+
#[cfg(target_arch = "aarch64")]
171+
() => unsafe { core::arch::asm!("TLBI VALE1IS, {v}", v = in(reg) val, options(nostack)) },
172+
173+
#[cfg(not(target_arch = "aarch64"))]
174+
() => {
175+
let _ = val;
176+
unimplemented!()
177+
}
178+
}
179+
}
180+
181+
/// Invalidate TLB entries by VA, all ASIDs, EL1&0, current VMID.
182+
///
183+
/// Executes `TLBI VAAE1, <Xt>`.
184+
///
185+
/// Invalidates entries matching the VA regardless of ASID.
186+
///
187+
/// # Operand Encoding
188+
///
189+
/// - Bits \[43:0\]: VA\[55:12\]
190+
#[inline(always)]
191+
pub fn vaae1(val: u64) {
192+
match () {
193+
#[cfg(target_arch = "aarch64")]
194+
() => unsafe { core::arch::asm!("TLBI VAAE1, {v}", v = in(reg) val, options(nostack)) },
195+
196+
#[cfg(not(target_arch = "aarch64"))]
197+
() => {
198+
let _ = val;
199+
unimplemented!()
200+
}
201+
}
202+
}
203+
204+
/// Invalidate TLB entries by VA, all ASIDs, EL1&0, current VMID,
205+
/// Inner Shareable.
206+
///
207+
/// Executes `TLBI VAAE1IS, <Xt>`.
208+
///
209+
/// # Operand Encoding
210+
///
211+
/// - Bits \[43:0\]: VA\[55:12\]
212+
#[inline(always)]
213+
pub fn vaae1is(val: u64) {
214+
match () {
215+
#[cfg(target_arch = "aarch64")]
216+
() => unsafe { core::arch::asm!("TLBI VAAE1IS, {v}", v = in(reg) val, options(nostack)) },
217+
218+
#[cfg(not(target_arch = "aarch64"))]
219+
() => {
220+
let _ = val;
221+
unimplemented!()
222+
}
223+
}
224+
}
225+
226+
/// Invalidate TLB entries by VA, all ASIDs, last level, EL1&0,
227+
/// current VMID.
228+
///
229+
/// Executes `TLBI VAALE1, <Xt>`.
230+
///
231+
/// # Operand Encoding
232+
///
233+
/// - Bits \[43:0\]: VA\[55:12\]
234+
#[inline(always)]
235+
pub fn vaale1(val: u64) {
236+
match () {
237+
#[cfg(target_arch = "aarch64")]
238+
() => unsafe { core::arch::asm!("TLBI VAALE1, {v}", v = in(reg) val, options(nostack)) },
239+
240+
#[cfg(not(target_arch = "aarch64"))]
241+
() => {
242+
let _ = val;
243+
unimplemented!()
244+
}
245+
}
246+
}
247+
248+
/// Invalidate TLB entries by VA, all ASIDs, last level, EL1&0,
249+
/// current VMID, Inner Shareable.
250+
///
251+
/// Executes `TLBI VAALE1IS, <Xt>`.
252+
///
253+
/// # Operand Encoding
254+
///
255+
/// - Bits \[43:0\]: VA\[55:12\]
256+
#[inline(always)]
257+
pub fn vaale1is(val: u64) {
258+
match () {
259+
#[cfg(target_arch = "aarch64")]
260+
() => unsafe { core::arch::asm!("TLBI VAALE1IS, {v}", v = in(reg) val, options(nostack)) },
261+
262+
#[cfg(not(target_arch = "aarch64"))]
263+
() => {
264+
let _ = val;
265+
unimplemented!()
266+
}
267+
}
268+
}
269+
270+
/// Invalidate TLB entries by ASID, EL1&0, current VMID.
271+
///
272+
/// Executes `TLBI ASIDE1, <Xt>`.
273+
///
274+
/// # Operand Encoding
275+
///
276+
/// - Bits \[55:48\]: ASID
277+
#[inline(always)]
278+
pub fn aside1(val: u64) {
279+
match () {
280+
#[cfg(target_arch = "aarch64")]
281+
() => unsafe { core::arch::asm!("TLBI ASIDE1, {v}", v = in(reg) val, options(nostack)) },
282+
283+
#[cfg(not(target_arch = "aarch64"))]
284+
() => {
285+
let _ = val;
286+
unimplemented!()
287+
}
288+
}
289+
}
290+
291+
/// Invalidate TLB entries by ASID, EL1&0, current VMID,
292+
/// Inner Shareable.
293+
///
294+
/// Executes `TLBI ASIDE1IS, <Xt>`.
295+
///
296+
/// # Operand Encoding
297+
///
298+
/// - Bits \[55:48\]: ASID
299+
#[inline(always)]
300+
pub fn aside1is(val: u64) {
301+
match () {
302+
#[cfg(target_arch = "aarch64")]
303+
() => unsafe { core::arch::asm!("TLBI ASIDE1IS, {v}", v = in(reg) val, options(nostack)) },
304+
305+
#[cfg(not(target_arch = "aarch64"))]
306+
() => {
307+
let _ = val;
308+
unimplemented!()
309+
}
310+
}
311+
}

0 commit comments

Comments
 (0)