1use core::{
2 mem::{align_of, size_of},
3 ptr::drop_in_place,
4};
5use synchronization::blocking_mutex::{Mutex, raw::CriticalSectionRawMutex};
6
7use crate::context;
8
9#[derive(Debug, Clone, Copy, Default)]
10struct MutexState {
11 task: Option<usize>,
12 lock_count: u32, }
14
15pub struct RawMutex {
16 mutex: Mutex<CriticalSectionRawMutex, MutexState>,
17 recursive: bool,
18}
19
20impl RawMutex {
21 pub fn new(recursive: bool) -> Self {
22 Self {
23 mutex: Mutex::new(MutexState::default()),
24 recursive,
25 }
26 }
27
28 pub fn is_valid_pointer(pointer: *const RawMutex) -> bool {
29 !pointer.is_null() && (pointer as usize).is_multiple_of(align_of::<Self>())
30 }
31
32 pub unsafe fn from_pointer<'a>(pointer: *const RawMutex) -> Option<&'a Self> {
39 unsafe {
40 if !Self::is_valid_pointer(pointer) {
41 return None;
42 }
43 Some(&*pointer)
44 }
45 }
46
47 pub unsafe fn from_mutable_pointer<'a>(pointer: *mut RawMutex) -> Option<&'a mut Self> {
54 unsafe {
55 if !Self::is_valid_pointer(pointer) {
56 return None;
57 }
58 Some(&mut *pointer)
59 }
60 }
61
62 pub fn lock(&self) -> bool {
63 let current_task = context::get_instance()
64 .get_current_task_identifier()
65 .into_inner() as usize;
66
67 unsafe {
68 self.mutex.lock_mut(|state| {
69 if let Some(owner) = state.task {
70 if owner == current_task && self.recursive {
71 state.lock_count += 1;
73 return true;
74 }
75 return false;
77 }
78
79 state.task = Some(current_task);
81 state.lock_count = 1;
82 true
83 })
84 }
85 }
86
87 pub fn unlock(&self) -> bool {
88 let current_task = context::get_instance()
89 .get_current_task_identifier()
90 .into_inner() as usize;
91
92 unsafe {
93 self.mutex.lock_mut(|state| {
94 if let Some(owner) = state.task
96 && owner == current_task
97 {
98 if self.recursive && state.lock_count > 1 {
99 state.lock_count -= 1;
101 } else {
102 state.task = None;
104 state.lock_count = 0;
105 }
106 return true; }
108 false })
110 }
111 }
112}
113
114#[unsafe(no_mangle)]
115pub static RAW_MUTEX_SIZE: usize = size_of::<RawMutex>();
116
117#[unsafe(no_mangle)]
127pub unsafe extern "C" fn xila_initialize_mutex(mutex: *mut RawMutex) -> bool {
128 unsafe {
129 if mutex.is_null() {
130 return false;
131 }
132
133 if !(mutex as usize).is_multiple_of(align_of::<RawMutex>()) {
134 return false;
135 }
136
137 mutex.write(RawMutex::new(false));
138
139 true
140 }
141}
142
143#[unsafe(no_mangle)]
152pub unsafe extern "C" fn xila_initialize_recursive_mutex(mutex: *mut RawMutex) -> bool {
153 unsafe {
154 if mutex.is_null() {
155 return false;
156 }
157
158 if !(mutex as usize).is_multiple_of(align_of::<RawMutex>()) {
159 return false;
160 }
161
162 mutex.write(RawMutex::new(true));
163
164 true
165 }
166}
167
168#[unsafe(no_mangle)]
176pub unsafe extern "C" fn xila_lock_mutex(mutex: *mut RawMutex) -> bool {
177 unsafe {
178 let mutex = match RawMutex::from_mutable_pointer(mutex) {
179 Some(mutex) => mutex,
180 None => return false,
181 };
182
183 mutex.lock()
184 }
185}
186
187#[unsafe(no_mangle)]
196pub unsafe extern "C" fn xila_unlock_mutex(mutex: *mut RawMutex) -> bool {
197 unsafe {
198 let mutex = match RawMutex::from_mutable_pointer(mutex) {
199 Some(mutex) => mutex,
200 None => return false,
201 };
202
203 mutex.unlock()
204 }
205}
206
207#[unsafe(no_mangle)]
216pub unsafe extern "C" fn xila_destroy_mutex(mutex: *mut RawMutex) -> bool {
217 unsafe {
218 let mutex = match RawMutex::from_mutable_pointer(mutex) {
219 Some(mutex) => mutex,
220 None => return false,
221 };
222
223 drop_in_place(mutex);
225
226 true }
228}