1use crate::bytes::serde::section_header::SectionHeader;
3use crate::tri;
4use indexmap::IndexMap;
5use winnow::{
6 Parser,
7 binary::{self, Endianness},
8 error::{ContextError, StrContext, StrContextValue::*},
9 token::take_while,
10};
11
12pub type LocalFixups = IndexMap<u32, u32>;
13pub type GLobalFixups = IndexMap<u32, (u32, u32)>;
14pub type VirtualFixups = IndexMap<u32, (u32, u32)>;
16
17#[derive(Debug, Clone, Default, PartialEq, Eq)]
22pub struct Fixups {
23 pub local_fixups: LocalFixups,
31
32 pub global_fixups: GLobalFixups,
38
39 pub virtual_fixups: VirtualFixups,
53}
54
55pub const LOCAL_FIXUP_ONE_SIZE: u32 = 8;
57pub const GLOBAL_FIXUP_ONE_SIZE: u32 = 12;
59pub const VIRTUAL_FIXUP_ONE_SIZE: u32 = 12;
61
62pub const FIXUP_VALUE_FOR_ALIGN: u32 = u32::MAX;
64
65impl Fixups {
66 pub const fn new(
67 local_fixups: LocalFixups,
68 global_fixups: GLobalFixups,
69 virtual_fixups: VirtualFixups,
70 ) -> Self {
71 Self {
72 local_fixups,
73 global_fixups,
74 virtual_fixups,
75 }
76 }
77
78 pub fn from_section_header<'a>(
83 header: &SectionHeader,
84 endian: Endianness,
85 ) -> impl Parser<&'a [u8], Self, ContextError> {
86 let SectionHeader {
87 local_fixups_offset,
88 global_fixups_offset,
89 virtual_fixups_offset,
90 exports_offset,
91 ..
92 } = *header;
93
94 let local_range = global_fixups_offset - local_fixups_offset;
95 let global_range = virtual_fixups_offset - global_fixups_offset;
96 let virtual_range = exports_offset - virtual_fixups_offset;
97 let _needs_bytes_len = local_range + global_range + virtual_range;
98
99 #[cfg(feature = "tracing")]
100 tracing::trace!(local_range, global_range, virtual_range);
101
102 move |bytes: &mut &'a [u8]| {
103 #[cfg(feature = "tracing")]
104 if _needs_bytes_len as usize > bytes.len() {
105 tracing::error!("need {_needs_bytes_len}. but got {}", bytes.len());
106 }
107
108 let local_max_len = local_range / LOCAL_FIXUP_ONE_SIZE;
109 let global_max_len = global_range / GLOBAL_FIXUP_ONE_SIZE;
110 let virtual_max_len = virtual_range / VIRTUAL_FIXUP_ONE_SIZE;
111 Ok(Self {
112 local_fixups: tri!(read_local_fixups(bytes, endian, local_max_len)),
113 global_fixups: tri!(read_fixups(bytes, endian, global_max_len)),
114 virtual_fixups: tri!(read_fixups(bytes, endian, virtual_max_len)),
115 })
116 }
117 }
118}
119
120fn read_local_fixups(
125 bytes: &mut &[u8],
126 endian: Endianness,
127 len: u32,
128) -> winnow::ModalResult<LocalFixups> {
129 let mut local_map = LocalFixups::new();
130 for _ in 0..len {
131 if let Ok(local_src) = binary::u32::<&[u8], ContextError>(endian)
132 .verify(|&src| src != FIXUP_VALUE_FOR_ALIGN)
133 .context(StrContext::Expected(Description("local_fixup.src(u32)")))
134 .parse_next(bytes)
135 {
136 let local_dst = tri!(
137 binary::u32(endian)
138 .context(StrContext::Expected(Description("local_fixup.dst(u32)")))
139 .parse_next(bytes)
140 );
141
142 #[cfg(feature = "tracing")]
143 tracing::trace!(local_src, local_dst);
144
145 local_map.insert(local_src, local_dst);
146 } else {
147 break;
148 };
149 }
150 tri!(take_while(0.., 0xff).parse_next(bytes)); Ok(local_map)
153}
154
155fn read_fixups(
160 bytes: &mut &[u8],
161 endian: Endianness,
162 len: u32,
163) -> winnow::ModalResult<VirtualFixups> {
164 let mut fixups = VirtualFixups::new();
165 for _ in 0..len {
166 if let Ok(src) = binary::u32::<&[u8], ContextError>(endian)
167 .verify(|src| *src != FIXUP_VALUE_FOR_ALIGN)
168 .parse_next(bytes)
169 {
170 #[cfg(feature = "tracing")]
171 tracing::trace!(src);
172
173 let index = tri!(
174 binary::u32(endian)
175 .context(StrContext::Expected(Description("fixup.index(u32)")))
176 .parse_next(bytes)
177 );
178 #[cfg(feature = "tracing")]
179 tracing::trace!(index);
180
181 let dst = tri!(
182 binary::u32(endian)
183 .context(StrContext::Expected(Description("fixup.dst(u32)")))
184 .parse_next(bytes)
185 );
186 #[cfg(feature = "tracing")]
187 tracing::trace!(dst);
188
189 fixups.insert(src, (index, dst));
190 } else {
191 break;
192 }
193 }
194 tri!(take_while(0.., 0xff).parse_next(bytes)); Ok(fixups)
197}
198
199#[cfg(test)]
200mod tests {
201 use super::*;
202
203 #[test]
204 fn should_parse_fixups() {
205 #[rustfmt::skip]
206 const FIXUPS: &[u8] = &[
207 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x18, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0xb0, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x08, 0x01, 0x00, 0x00, 0xd0, 0x00, 0x00, 0x00, 0x30, 0x01, 0x00, 0x00, 0xd8, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x50, 0x01, 0x00, 0x00, 0xe8, 0x00, 0x00, 0x00, 0x60, 0x01, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x20, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4b, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x65, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x79, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, ];
221
222 let mut bytes = FIXUPS;
223
224 let local = read_local_fixups(&mut bytes, Endianness::Little, 5 * 16).unwrap();
225 let global = read_fixups(&mut bytes, Endianness::Little, 2 * 16).unwrap();
226 let virtual_fixups = read_fixups(&mut bytes, Endianness::Little, 3 * 16).unwrap();
227
228 assert_eq!(
229 local,
230 [
231 (216, 320),
232 (232, 352),
233 (0, 16),
234 (24, 64),
235 (256, 264),
236 (208, 304),
237 (176, 256),
238 (16, 40),
239 (224, 336)
240 ]
241 .into()
242 );
243
244 assert_eq!(global, [(32, (2, 80)), (112, (2, 128))].into());
245 assert_eq!(
246 virtual_fixups,
247 [(0, (0, 75,)), (80, (0, 101,)), (128, (0, 121))].into()
248 );
249 }
250}