wasmer_compiler_cranelift/
eh.rs1use cranelift_codegen::{
8 ExceptionContextLoc, FinalizedMachCallSite, FinalizedMachExceptionHandler,
9};
10use cranelift_entity::EntityRef;
11use itertools::Itertools;
12use std::collections::hash_map::Entry;
13use std::collections::{HashMap, HashSet};
14use std::convert::TryFrom;
15use std::io::{Cursor, Write};
16
17use wasmer_compiler::types::{
18 relocation::{Relocation, RelocationKind, RelocationTarget},
19 section::{CustomSection, CustomSectionProtection, SectionBody, SectionIndex},
20};
21
22#[derive(Debug, Clone)]
24pub struct TagRelocation {
25 pub offset: u32,
27 pub tag: u32,
29}
30
31#[derive(Debug, Clone)]
34pub struct FunctionLsdaData {
35 pub bytes: Vec<u8>,
36 pub relocations: Vec<TagRelocation>,
37}
38
39pub fn build_function_lsda<'a>(
42 call_sites: impl Iterator<Item = FinalizedMachCallSite<'a>>,
43 function_length: usize,
44 pointer_bytes: u8,
45) -> Option<FunctionLsdaData> {
46 let mut sites = Vec::new();
47
48 for site in call_sites {
49 let mut catches = Vec::new();
50 let mut landing_pad = None;
51
52 for handler in site.exception_handlers {
55 match handler {
56 FinalizedMachExceptionHandler::Tag(tag, offset) => {
57 landing_pad = Some(landing_pad.unwrap_or(*offset));
58 catches.push(ExceptionType::Tag {
59 tag: u32::try_from(tag.index()).expect("tag index fits in u32"),
60 });
61 }
62 FinalizedMachExceptionHandler::Default(offset) => {
63 landing_pad = Some(landing_pad.unwrap_or(*offset));
64 catches.push(ExceptionType::CatchAll);
65 }
66 FinalizedMachExceptionHandler::Context(context) => {
67 match context {
71 ExceptionContextLoc::SPOffset(_) | ExceptionContextLoc::GPR(_) => {}
72 }
73 }
74 }
75 }
76
77 if catches.is_empty() {
78 continue;
79 }
80
81 let landing_pad = landing_pad.expect("landing pad offset set when catches exist");
82 let cs_start = site.ret_addr.saturating_sub(1);
83
84 sites.push(CallSiteDesc {
85 start: cs_start,
86 len: 1,
87 landing_pad,
88 actions: catches,
89 });
90 }
91
92 if sites.is_empty() {
93 return None;
94 }
95
96 let mut current_pos = 0u32;
99 let mut filled_sites = Vec::new();
100
101 for site in sites {
102 if site.start > current_pos {
103 filled_sites.push(CallSiteDesc {
105 start: current_pos,
106 len: site.start - current_pos,
107 landing_pad: 0,
108 actions: Vec::new(),
109 });
110 }
111 current_pos = site.start + site.len;
112 filled_sites.push(site);
113 }
114
115 if current_pos < function_length as u32 {
117 filled_sites.push(CallSiteDesc {
118 start: current_pos,
119 len: function_length as u32 - current_pos,
120 landing_pad: 0,
121 actions: Vec::new(),
122 });
123 }
124
125 let sites = filled_sites;
126
127 let mut type_entries = TypeTable::new();
128 let mut callsite_actions = Vec::with_capacity(sites.len());
129
130 for site in &sites {
131 #[cfg(debug_assertions)]
132 {
133 let catch_all_positions = site
136 .actions
137 .iter()
138 .positions(|a| matches!(a, ExceptionType::CatchAll))
139 .collect_vec();
140 assert!(catch_all_positions.iter().at_most_one().is_ok());
141 if let Some(&i) = catch_all_positions.first() {
142 assert!(i == site.actions.len() - 1);
143 }
144 }
145
146 let action_indices = site
147 .actions
148 .iter()
149 .rev()
152 .map(|action| type_entries.get_or_insert(*action) as i32)
153 .collect_vec();
154 callsite_actions.push(action_indices);
155 }
156
157 let action_table = encode_action_table(&callsite_actions);
158 let call_site_table = encode_call_site_table(&sites, &action_table);
159 let (type_table_bytes, type_table_relocs) = type_entries.encode(pointer_bytes);
160
161 let call_site_table_len = call_site_table.len() as u64;
162 let mut writer = Cursor::new(Vec::new());
163 writer
164 .write_all(&gimli::DW_EH_PE_omit.0.to_le_bytes())
165 .unwrap(); if type_entries.is_empty() {
168 writer
169 .write_all(&gimli::DW_EH_PE_omit.0.to_le_bytes())
170 .unwrap();
171 } else {
172 writer
173 .write_all(&gimli::DW_EH_PE_absptr.0.to_le_bytes())
174 .unwrap();
175 }
176
177 if !type_entries.is_empty() {
178 let ttype_table_end = 1 + uleb128_len(call_site_table_len)
180 + call_site_table.len()
181 + action_table.bytes.len()
182 + type_table_bytes.len();
183 leb128::write::unsigned(&mut writer, ttype_table_end as u64).unwrap();
184 }
185
186 writer
187 .write_all(&gimli::DW_EH_PE_udata4.0.to_le_bytes())
188 .unwrap();
189 leb128::write::unsigned(&mut writer, call_site_table_len).unwrap();
190 writer.write_all(&call_site_table).unwrap();
191 writer.write_all(&action_table.bytes).unwrap();
192
193 let type_table_offset = writer.position() as u32;
194 writer.write_all(&type_table_bytes).unwrap();
195
196 let mut relocations = Vec::new();
197 for reloc in type_table_relocs {
198 relocations.push(TagRelocation {
199 offset: type_table_offset + reloc.offset,
200 tag: reloc.tag,
201 });
202 }
203
204 Some(FunctionLsdaData {
205 bytes: writer.into_inner(),
206 relocations,
207 })
208}
209
210pub fn build_tag_section(
212 lsda_data: &[Option<FunctionLsdaData>],
213) -> Option<(CustomSection, HashMap<u32, u32>)> {
214 let mut unique_tags = HashSet::new();
215 for data in lsda_data.iter().flatten() {
216 for reloc in &data.relocations {
217 unique_tags.insert(reloc.tag);
218 }
219 }
220
221 if unique_tags.is_empty() {
222 return None;
223 }
224
225 let mut tags: Vec<u32> = unique_tags.into_iter().collect();
226 tags.sort_unstable();
227
228 let mut bytes = Vec::with_capacity(tags.len() * std::mem::size_of::<u32>());
229 let mut offsets = HashMap::new();
230 for tag in tags {
231 let offset = bytes.len() as u32;
232 bytes.extend_from_slice(&tag.to_ne_bytes());
233 offsets.insert(tag, offset);
234 }
235
236 let section = CustomSection {
237 protection: CustomSectionProtection::Read,
238 alignment: None,
239 bytes: SectionBody::new_with_vec(bytes),
240 relocations: Vec::new(),
241 };
242
243 Some((section, offsets))
244}
245
246pub fn build_lsda_section(
258 lsda_data: Vec<Option<FunctionLsdaData>>,
259 pointer_bytes: u8,
260 tag_offsets: &HashMap<u32, u32>,
261 tag_section_index: Option<SectionIndex>,
262) -> (Option<CustomSection>, Vec<Option<u32>>) {
263 let mut bytes = Vec::new();
264 let mut relocations = Vec::new();
265 let mut offsets_per_function = Vec::with_capacity(lsda_data.len());
266
267 let pointer_kind = match pointer_bytes {
268 4 => RelocationKind::Abs4,
269 8 => RelocationKind::Abs8,
270 other => panic!("unsupported pointer size {other} for LSDA generation"),
271 };
272
273 for data in lsda_data.into_iter() {
274 if let Some(data) = data {
275 let base = bytes.len() as u32;
276 bytes.extend_from_slice(&data.bytes);
277
278 for reloc in &data.relocations {
279 let target_offset = tag_offsets
280 .get(&reloc.tag)
281 .copied()
282 .expect("missing tag offset for relocation");
283 relocations.push(Relocation {
284 kind: pointer_kind,
285 reloc_target: RelocationTarget::CustomSection(
286 tag_section_index
287 .expect("tag section index must exist when relocations are present"),
288 ),
289 offset: base + reloc.offset,
290 addend: target_offset as i64,
291 });
292 }
293
294 offsets_per_function.push(Some(base));
295 } else {
296 offsets_per_function.push(None);
297 }
298 }
299
300 if bytes.is_empty() {
301 (None, offsets_per_function)
302 } else {
303 (
304 Some(CustomSection {
305 protection: CustomSectionProtection::Read,
306 alignment: None,
307 bytes: SectionBody::new_with_vec(bytes),
308 relocations,
309 }),
310 offsets_per_function,
311 )
312 }
313}
314
315#[derive(Debug)]
316struct CallSiteDesc {
317 start: u32,
318 len: u32,
319 landing_pad: u32,
320 actions: Vec<ExceptionType>,
321}
322
323#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)]
324enum ExceptionType {
325 Tag { tag: u32 },
326 CatchAll,
327}
328
329#[derive(Debug)]
330struct TypeTable {
331 entries: indexmap::IndexSet<ExceptionType>,
332}
333
334impl TypeTable {
335 fn new() -> Self {
336 Self {
337 entries: indexmap::IndexSet::new(),
338 }
339 }
340
341 fn is_empty(&self) -> bool {
342 self.entries.is_empty()
343 }
344
345 fn get_or_insert(&mut self, exception: ExceptionType) -> usize {
346 self.entries.insert(exception);
347
348 self.entries
350 .get_index_of(&exception)
351 .expect("must be already inserted")
352 + 1
353 }
354
355 fn encode(&self, pointer_bytes: u8) -> (Vec<u8>, Vec<TagRelocation>) {
356 let mut bytes = Vec::with_capacity(self.entries.len() * pointer_bytes as usize);
357 let mut relocations = Vec::new();
358
359 for entry in self.entries.iter().rev() {
361 let offset = bytes.len() as u32;
362 match entry {
363 ExceptionType::Tag { tag } => {
364 bytes.extend(std::iter::repeat_n(0, pointer_bytes as usize));
365 relocations.push(TagRelocation { offset, tag: *tag });
366 }
367 ExceptionType::CatchAll => {
368 bytes.extend(std::iter::repeat_n(0, pointer_bytes as usize));
369 }
370 }
371 }
372
373 (bytes, relocations)
374 }
375}
376
377struct ActionTable {
378 bytes: Vec<u8>,
379 first_action_offsets: Vec<Option<u32>>,
380}
381
382fn encode_action_table(callsite_actions: &[Vec<i32>]) -> ActionTable {
383 let mut writer = Cursor::new(Vec::new());
384 let mut first_action_offsets = Vec::new();
385
386 let mut cache = HashMap::new();
387
388 for actions in callsite_actions {
389 if actions.is_empty() {
390 first_action_offsets.push(None);
391 } else {
392 match cache.entry(actions.clone()) {
393 Entry::Occupied(entry) => {
394 first_action_offsets.push(Some(*entry.get()));
395 }
396 Entry::Vacant(entry) => {
397 let mut last_action_start = 0;
398 for (i, &ttype_index) in actions.iter().enumerate() {
399 let next_action_start = writer.position();
400 leb128::write::signed(&mut writer, ttype_index as i64)
401 .expect("leb128 write failed");
402
403 if i != 0 {
404 let displacement = last_action_start - writer.position() as i64;
406 leb128::write::signed(&mut writer, displacement)
407 .expect("leb128 write failed");
408 } else {
409 leb128::write::signed(&mut writer, 0).expect("leb128 write failed");
410 }
411 last_action_start = next_action_start as i64;
412 }
413 let last_action_start = last_action_start as u32;
414 entry.insert(last_action_start);
415 first_action_offsets.push(Some(last_action_start));
416 }
417 }
418 }
419 }
420
421 ActionTable {
422 bytes: writer.into_inner(),
423 first_action_offsets,
424 }
425}
426
427fn encode_call_site_table(callsites: &[CallSiteDesc], action_table: &ActionTable) -> Vec<u8> {
428 let mut writer = Cursor::new(Vec::new());
429 for (idx, site) in callsites.iter().enumerate() {
430 write_encoded_offset(site.start, &mut writer);
431 write_encoded_offset(site.len, &mut writer);
432 write_encoded_offset(site.landing_pad, &mut writer);
433
434 let action = match action_table.first_action_offsets[idx] {
435 Some(offset) => offset as u64 + 1,
436 None => 0,
437 };
438 leb128::write::unsigned(&mut writer, action).expect("leb128 write failed");
439 }
440 writer.into_inner()
441}
442
443fn write_encoded_offset(val: u32, out: &mut impl Write) {
444 out.write_all(&val.to_le_bytes())
446 .expect("write to buffer failed")
447}
448
449fn uleb128_len(value: u64) -> usize {
450 let mut cursor = Cursor::new([0u8; 10]);
451 leb128::write::unsigned(&mut cursor, value).unwrap()
452}