1use crate::{HeapRegs, RegisterStorage, RegisterStorageEnum};
6use starnix_uapi::errors::Errno;
7use starnix_uapi::{__NR_restart_syscall, error, user_regs_struct};
8
9const SYSCALL_INSTRUCTION_SIZE_BYTES: u64 = 2;
11
12#[derive(Default, Clone, Eq, PartialEq)]
18pub struct RegisterState<T: RegisterStorage> {
19 pub real_registers: T,
20
21 pub orig_rax: u64,
25}
26
27impl<T: RegisterStorage> RegisterState<T> {
28 pub fn save_registers_for_restart(&mut self, syscall_number: u64) {
30 self.rax = -(starnix_uapi::ENOSYS as i64) as u64;
35
36 self.orig_rax = syscall_number;
38 }
39
40 pub fn prepare_for_custom_restart(&mut self) {
42 self.rax = __NR_restart_syscall as u64;
43 }
44
45 pub fn restore_original_return_register(&mut self) {
47 self.rax = self.orig_rax;
48 }
49
50 pub fn instruction_pointer_register(&self) -> u64 {
53 self.ip
54 }
55
56 pub fn set_instruction_pointer_register(&mut self, new_ip: u64) {
59 self.ip = new_ip;
60 }
61
62 pub fn rewind_syscall_instruction(&mut self) {
64 self.ip -= SYSCALL_INSTRUCTION_SIZE_BYTES;
65 }
66
67 pub fn return_register(&self) -> u64 {
70 self.rax
71 }
72
73 pub fn set_return_register(&mut self, return_value: u64) {
76 self.rax = return_value;
77 }
78
79 pub fn stack_pointer_register(&self) -> u64 {
81 self.rsp
82 }
83
84 pub fn set_stack_pointer_register(&mut self, sp: u64) {
86 self.rsp = sp;
87 }
88
89 pub fn set_thread_pointer_register(&mut self, tp: u64) {
91 self.fs_base = tp;
92 }
93
94 pub fn set_arg0_register(&mut self, rdi: u64) {
96 self.rdi = rdi;
97 }
98
99 pub fn set_arg1_register(&mut self, rsi: u64) {
101 self.rsi = rsi;
102 }
103
104 pub fn set_arg2_register(&mut self, rdx: u64) {
106 self.rdx = rdx;
107 }
108
109 pub fn syscall_register(&self) -> u64 {
111 self.orig_rax
112 }
113
114 pub fn reset_flags(&mut self) {
116 self.flags = 0;
117 }
118
119 pub fn apply_user_register(
121 &mut self,
122 offset: usize,
123 f: &mut dyn FnMut(&mut u64),
124 ) -> Result<(), Errno> {
125 if offset == memoffset::offset_of!(user_regs_struct, r15) {
126 f(&mut self.r15);
127 } else if offset == memoffset::offset_of!(user_regs_struct, r14) {
128 f(&mut self.r14);
129 } else if offset == memoffset::offset_of!(user_regs_struct, r13) {
130 f(&mut self.r13);
131 } else if offset == memoffset::offset_of!(user_regs_struct, r12) {
132 f(&mut self.r12);
133 } else if offset == memoffset::offset_of!(user_regs_struct, rbp) {
134 f(&mut self.rbp);
135 } else if offset == memoffset::offset_of!(user_regs_struct, rbx) {
136 f(&mut self.rbx);
137 } else if offset == memoffset::offset_of!(user_regs_struct, r11) {
138 f(&mut self.r11);
139 } else if offset == memoffset::offset_of!(user_regs_struct, r10) {
140 f(&mut self.r10);
141 } else if offset == memoffset::offset_of!(user_regs_struct, r9) {
142 f(&mut self.r9);
143 } else if offset == memoffset::offset_of!(user_regs_struct, r8) {
144 f(&mut self.r8);
145 } else if offset == memoffset::offset_of!(user_regs_struct, rax) {
146 f(&mut self.rax);
147 } else if offset == memoffset::offset_of!(user_regs_struct, rcx) {
148 f(&mut self.rcx);
149 } else if offset == memoffset::offset_of!(user_regs_struct, rdx) {
150 f(&mut self.rdx);
151 } else if offset == memoffset::offset_of!(user_regs_struct, rsi) {
152 f(&mut self.rsi);
153 } else if offset == memoffset::offset_of!(user_regs_struct, rdi) {
154 f(&mut self.rdi);
155 } else if offset == memoffset::offset_of!(user_regs_struct, orig_rax) {
156 f(&mut self.orig_rax);
157 } else if offset == memoffset::offset_of!(user_regs_struct, rip) {
158 f(&mut self.ip);
159 } else if offset == memoffset::offset_of!(user_regs_struct, cs) {
160 let mut val = 0;
161 f(&mut val);
162 } else if offset == memoffset::offset_of!(user_regs_struct, eflags) {
163 f(&mut self.flags);
164 } else if offset == memoffset::offset_of!(user_regs_struct, rsp) {
165 f(&mut self.rsp);
166 } else if offset == memoffset::offset_of!(user_regs_struct, ss) {
167 let mut val = 0;
168 f(&mut val);
169 } else if offset == memoffset::offset_of!(user_regs_struct, fs_base) {
170 f(&mut self.fs_base);
171 } else if offset == memoffset::offset_of!(user_regs_struct, gs_base) {
172 f(&mut self.gs_base);
173 } else if offset == memoffset::offset_of!(user_regs_struct, ds) {
174 let mut val = 0;
175 f(&mut val);
176 } else if offset == memoffset::offset_of!(user_regs_struct, es) {
177 let mut val = 0;
178 f(&mut val);
179 } else if offset == memoffset::offset_of!(user_regs_struct, fs) {
180 let mut val = 0;
181 f(&mut val);
182 } else if offset == memoffset::offset_of!(user_regs_struct, gs) {
183 let mut val = 0;
184 f(&mut val);
185 } else {
186 return error!(EINVAL);
187 };
188 Ok(())
189 }
190
191 pub fn load(&mut self, regs: zx::sys::zx_restricted_state_t) {
192 *self.real_registers = regs;
193 self.sync_stack_ptr();
194 }
195
196 pub fn sync_stack_ptr(&mut self) {
197 self.orig_rax = self.rax;
198 }
199}
200
201impl<T: RegisterStorage> std::fmt::Debug for RegisterState<T> {
202 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
203 f.debug_struct("RegisterState")
204 .field("real_registers", &self.real_registers)
205 .field("orig_rax", &format_args!("{:#x}", &self.orig_rax))
206 .finish()
207 }
208}
209
210impl<T: RegisterStorage> std::ops::Deref for RegisterState<T> {
211 type Target = zx::sys::zx_restricted_state_t;
212
213 fn deref(&self) -> &Self::Target {
214 &*self.real_registers
215 }
216}
217
218impl<T: RegisterStorage> std::ops::DerefMut for RegisterState<T> {
219 fn deref_mut(&mut self) -> &mut Self::Target {
220 &mut *self.real_registers
221 }
222}
223
224impl From<RegisterState<HeapRegs>> for RegisterState<RegisterStorageEnum> {
225 fn from(regs: RegisterState<HeapRegs>) -> Self {
226 Self { real_registers: regs.real_registers.into(), orig_rax: regs.orig_rax }
227 }
228}
229
230impl From<RegisterState<RegisterStorageEnum>> for RegisterState<HeapRegs> {
231 fn from(regs: RegisterState<RegisterStorageEnum>) -> Self {
232 Self { real_registers: regs.real_registers.into(), orig_rax: regs.orig_rax }
233 }
234}