1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
//! # **P**ortable **S**tack **M**anipulation
//! This crate provides portable functions to control the stack pointer and inspect the properties
//! of the stack. This crate does not attempt to provide safe abstractions to any operations, the
//! only goals are correctness, portability and efficiency (in that exact order).  As a consequence
//! most functions you will find in this crate are unsafe.
//!
//! Note, that the stack allocation is left up to the user. Unless you’re writing a safe
//! abstraction over stack manipulation, this is unlikely to be the crate you want. Instead
//! consider one of the safe abstractions over this crate such as `stacker`. Another good place to
//! look at is the crates.io’s reverse dependency list.

#![allow(unused_macros)]
#![no_std]

macro_rules! extern_item {
    (unsafe $($toks: tt)+) => {
        unsafe extern "C" $($toks)+
    };
    ($($toks: tt)+) => {
        extern "C" $($toks)+
    };
}

// Surprising: turns out subsequent macro_rules! override previous definitions, instead of
// erroring? Convenient for us in this case, though.
#[cfg(target_arch = "x86_64")]
macro_rules! extern_item {
    (unsafe $($toks: tt)+) => {
        unsafe extern "sysv64" $($toks)+
    };
    ($($toks: tt)+) => {
        extern "sysv64" $($toks)+
    };
}

#[cfg(target_arch = "x86")]
macro_rules! extern_item {
    (unsafe $($toks: tt)+) => {
        unsafe extern "fastcall" $($toks)+
    };
    ($($toks: tt)+) => {
        extern "fastcall" $($toks)+
    };
}

#[cfg(target_arch = "arm")]
macro_rules! extern_item {
    (unsafe $($toks: tt)+) => {
        unsafe extern "aapcs" $($toks)+
    };
    ($($toks: tt)+) => {
        extern "aapcs" $($toks)+
    };
}

// NB: this could be nicer across multiple blocks but we cannot do it because of
// https://github.com/rust-lang/rust/issues/65847
extern_item! { {
    #![link(name="psm_s")]

    #[cfg(asm)]
    fn rust_psm_stack_direction() -> u8;
    #[cfg(asm)]
    fn rust_psm_stack_pointer() -> *mut u8;

    #[cfg(all(switchable_stack, not(target_os = "windows")))]
    #[link_name="rust_psm_replace_stack"]
    fn _rust_psm_replace_stack(
        data: usize,
        callback: extern_item!(unsafe fn(usize) -> !),
        sp: *mut u8
    ) -> !;
    #[cfg(all(switchable_stack, not(target_os = "windows")))]
    #[link_name="rust_psm_on_stack"]
    fn _rust_psm_on_stack(
        data: usize,
        return_ptr: usize,
        callback: extern_item!(unsafe fn(usize, usize)),
        sp: *mut u8,
    );
    #[cfg(all(switchable_stack, target_os = "windows"))]
    fn rust_psm_replace_stack(
        data: usize,
        callback: extern_item!(unsafe fn(usize) -> !),
        sp: *mut u8,
        stack_base: *mut u8
    ) -> !;
    #[cfg(all(switchable_stack, target_os = "windows"))]
    fn rust_psm_on_stack(
        data: usize,
        return_ptr: usize,
        callback: extern_item!(unsafe fn(usize, usize)),
        sp: *mut u8,
        stack_base: *mut u8
    );
} }

#[cfg(all(switchable_stack, not(target_os = "windows")))]
#[inline(always)]
unsafe fn rust_psm_replace_stack(
    data: usize,
    callback: extern_item!(unsafe fn(usize) -> !),
    sp: *mut u8,
    _: *mut u8,
) -> ! {
    _rust_psm_replace_stack(data, callback, sp)
}

#[cfg(all(switchable_stack, not(target_os = "windows")))]
#[inline(always)]
unsafe fn rust_psm_on_stack(
    data: usize,
    return_ptr: usize,
    callback: extern_item!(unsafe fn(usize, usize)),
    sp: *mut u8,
    _: *mut u8,
) {
    _rust_psm_on_stack(data, return_ptr, callback, sp)
}

/// Run the closure on the provided stack.
///
/// Once the closure completes its execution, the original stack pointer is restored and execution
/// returns to the caller.
///
/// `base` address must be the low address of the stack memory region, regardless of the stack
/// growth direction. It is not necessary for the whole region `[base; base + size]` to be usable
/// at the time this function called, however it is required that at least the following hold:
///
/// * Both `base` and `base + size` are aligned up to the target-specific requirements;
/// * Depending on `StackDirection` value for the platform, the end of the stack memory region,
///   which would end up containing the first frame(s), must have sufficient number of pages
///   allocated to execute code until more pages are commited. The other end should contain a guard
///   page (not writable, readable or executable) to ensure Rust’s soundness guarantees.
///
/// Note, that some or all of these considerations are irrelevant to some applications. For
/// example, Rust’s soundness story relies on all stacks having a guard-page, however if the user
/// is able to guarantee that the memory region used for stack cannot be exceeded, a guard page may
/// end up being an expensive unnecessity.
///
/// The previous stack may not be deallocated. If an ability to deallocate the old stack is desired
/// consider `replace_stack` instead.
///
/// # Guidelines
///
/// Memory regions that are aligned to a single page (usually 4kB) are an extremely portable choice
/// for stacks.
///
/// Allocate at least 4kB of stack. Some architectures (such as SPARC) consume stack memory
/// significantly faster compared to the more usual architectures such as x86 or ARM. Allocating
/// less than 4kB of memory may make it impossible to commit more pages without overflowing the
/// stack later on.
///
/// # Unsafety
///
/// The stack `base` address must be aligned as appropriate for the target.
///
/// The stack `size` must be a multiple of stack alignment required by target.
///
/// The `size` must not overflow `isize`.
///
/// `callback` must not unwind or return control flow by any other means than directly returning.
///
/// # Examples
///
/// ```
/// use std::alloc;
/// const STACK_ALIGN: usize = 4096;
/// const STACK_SIZE: usize = 4096;
/// unsafe {
///     let layout = alloc::Layout::from_size_align(STACK_SIZE, STACK_ALIGN).unwrap();
///     let new_stack = alloc::alloc(layout);
///     assert!(!new_stack.is_null(), "allocations must succeed!");
///     let (stack, result) = psm::on_stack(new_stack, STACK_SIZE, || {
///         (psm::stack_pointer(), 4 + 4)
///     });
///     println!("4 + 4 = {} has been calculated on stack {:p}", result, stack);
/// }
/// ```
#[cfg(switchable_stack)]
pub unsafe fn on_stack<R, F: FnOnce() -> R>(base: *mut u8, size: usize, callback: F) -> R {
    use core::mem::MaybeUninit;

    extern_item! {
        unsafe fn with_on_stack<R, F: FnOnce() -> R>(callback_ptr: usize, return_ptr: usize) {
            let return_ptr = (*(return_ptr as *mut MaybeUninit<R>)).as_mut_ptr();
            let callback = (*(callback_ptr as *mut MaybeUninit<F>)).as_ptr();
            // Safe to move out from `F`, because closure in is forgotten in `on_stack` and dropping
            // only occurs in this callback.
            return_ptr.write((callback.read())());
        }
    }
    let sp = match StackDirection::new() {
        StackDirection::Ascending => base,
        StackDirection::Descending => base.offset(size as isize),
    };
    let mut callback: MaybeUninit<F> = MaybeUninit::new(callback);
    let mut return_value: MaybeUninit<R> = MaybeUninit::uninit();
    rust_psm_on_stack(
        &mut callback as *mut MaybeUninit<F> as usize,
        &mut return_value as *mut MaybeUninit<R> as usize,
        with_on_stack::<R, F>,
        sp,
        base,
    );
    return return_value.assume_init();
}

/// Run the provided non-terminating computation on an entirely new stack.
///
/// `base` address must be the low address of the stack memory region, regardless of the stack
/// growth direction. It is not necessary for the whole region `[base; base + size]` to be usable
/// at the time this function called, however it is required that at least the following hold:
///
/// * Both `base` and `base + size` are aligned up to the target-specific requirements;
/// * Depending on `StackDirection` value for the platform, the end of the stack memory region,
///   which would end up containing the first frame(s), must have sufficient number of pages
///   allocated to execute code until more pages are commited. The other end should contain a guard
///   page (not writable, readable or executable) to ensure Rust’s soundness guarantees.
///
/// Note, that some or all of these considerations are irrelevant to some applications. For
/// example, Rust’s soundness story relies on all stacks having a guard-page, however if the user
/// is able to guarantee that the memory region used for stack cannot be exceeded, a guard page may
/// end up being an expensive unnecessity.
///
/// The previous stack is not deallocated and may not be deallocated unless the data on the old
/// stack is not referenced in any way (by e.g. the `callback` closure).
///
/// On platforms where multiple stack pointers are available, the “current” stack pointer is
/// replaced.
///
/// # Guidelines
///
/// Memory regions that are aligned to a single page (usually 4kB) are an extremely portable choice
/// for stacks.
///
/// Allocate at least 4kB of stack. Some architectures (such as SPARC) consume stack memory
/// significantly faster compared to the more usual architectures such as x86 or ARM. Allocating
/// less than 4kB of memory may make it impossible to commit more pages without overflowing the
/// stack later on.
///
/// # Unsafety
///
/// The stack `base` address must be aligned as appropriate for the target.
///
/// The stack `size` must be a multiple of stack alignment required by target.
///
/// The `size` must not overflow `isize`.
///
/// `callback` must not return (not enforced by typesystem currently because `!` is unstable),
/// unwind or otherwise return control flow to any of the previous frames.
#[cfg(switchable_stack)]
pub unsafe fn replace_stack<F: FnOnce()>(base: *mut u8, size: usize, callback: F) -> ! {
    extern_item! { unsafe fn with_replaced_stack<F: FnOnce()>(d: usize) -> ! {
        // Safe to move out, because the closure is essentially forgotten by
        // this being required to never return...
        ::core::ptr::read(d as *const F)();
        ::core::hint::unreachable_unchecked();
    } }
    let sp = match StackDirection::new() {
        StackDirection::Ascending => base,
        StackDirection::Descending => base.offset(size as isize),
    };
    rust_psm_replace_stack(
        &callback as *const F as usize,
        with_replaced_stack::<F>,
        sp,
        base,
    );
}

/// The direction into which stack grows as stack frames are made.
///
/// This is a target-specific property that can be obtained at runtime by calling
/// `StackDirection::new()`.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum StackDirection {
    Ascending = 1,
    Descending = 2,
}

impl StackDirection {
    /// Obtain the stack growth direction.
    #[cfg(asm)]
    pub fn new() -> StackDirection {
        const ASC: u8 = StackDirection::Ascending as u8;
        const DSC: u8 = StackDirection::Descending as u8;
        unsafe {
            match rust_psm_stack_direction() {
                ASC => StackDirection::Ascending,
                DSC => StackDirection::Descending,
                _ => ::core::hint::unreachable_unchecked(),
            }
        }
    }
}

/// Returns current stack pointer.
///
/// Note, that the stack pointer returned is from the perspective of the caller. From the
/// perspective of `stack_pointer` function the pointer returned is the frame pointer.
///
/// While it is a goal to minimize the amount of stack used by this function, implementations for
/// some targets may be unable to avoid allocating a stack frame. This makes this function
/// suitable for stack exhaustion detection only in conjunction with sufficient padding.
///
/// Using `stack_pointer` to check for stack exhaustion is tricky to get right. It is impossible to
/// know the callee’s frame size, therefore such value must be derived some other way. A common
/// approach is to use stack padding (reserve enough stack space for any function to be called) and
/// check against the padded threshold. If padding is chosen incorrectly, a situation similar to
/// one described below may occur:
///
/// 1. For stack exhaustion check, remaining stack is checked against `stack_pointer` with the
///    padding applied;
/// 2. Callee allocates more stack than was accounted for with padding, and accesses pages outside
///    the stack, invalidating the execution (by e.g. crashing).
#[cfg(asm)]
pub fn stack_pointer() -> *mut u8 {
    unsafe { rust_psm_stack_pointer() }
}

/// Macro that outputs its tokens only if `psm::on_stack` and `psm::replace_stack` are available.
///
/// # Examples
///
/// ```
/// # use psm::psm_stack_manipulation;
/// psm_stack_manipulation! {
///     yes {
///         /* Functions `on_stack` and `replace_stack` are available here */
///     }
///     no {
///         /* Functions `on_stack` and `replace_stack` are not available here */
///     }
/// }
/// ```
#[cfg(switchable_stack)]
#[macro_export]
macro_rules! psm_stack_manipulation {
    (yes { $($yes: tt)* } no { $($no: tt)* }) => { $($yes)* };
}

/// Macro that outputs its tokens only if `psm::on_stack` and `psm::replace_stack` are available.
///
/// # Examples
///
/// ```
/// # use psm::psm_stack_manipulation;
/// psm_stack_manipulation! {
///     yes {
///         /* Functions `on_stack` and `replace_stack` are available here */
///     }
///     no {
///         /* Functions `on_stack` and `replace_stack` are not available here */
///     }
/// }
/// ```
#[cfg(not(switchable_stack))]
#[macro_export]
macro_rules! psm_stack_manipulation {
    (yes { $($yes: tt)* } no { $($no: tt)* }) => { $($no)* };
}

/// Macro that outputs its tokens only if `psm::stack_pointer` and `psm::StackDirection::new` are
/// available.
///
/// # Examples
///
/// ```
/// # use psm::psm_stack_information;
/// psm_stack_information! {
///     yes {
///         /* `psm::stack_pointer` and `psm::StackDirection::new` are available here */
///     }
///     no {
///         /* `psm::stack_pointer` and `psm::StackDirection::new` are not available here */
///     }
/// }
/// ```
#[cfg(asm)]
#[macro_export]
macro_rules! psm_stack_information {
    (yes { $($yes: tt)* } no { $($no: tt)* }) => { $($yes)* };
}

/// Macro that outputs its tokens only if `psm::stack_pointer` and `psm::StackDirection::new` are
/// available.
///
/// # Examples
///
/// ```
/// # use psm::psm_stack_information;
/// psm_stack_information! {
///     yes {
///         /* `psm::stack_pointer` and `psm::StackDirection::new` are available here */
///     }
///     no {
///         /* `psm::stack_pointer` and `psm::StackDirection::new` are not available here */
///     }
/// }
/// ```
#[cfg(not(asm))]
#[macro_export]
macro_rules! psm_stack_information {
    (yes { $($yes: tt)* } no { $($no: tt)* }) => { $($no)* };
}