physis/sqpack/
data.rs

1// SPDX-FileCopyrightText: 2023 Joshua Goins <josh@redstrate.com>
2// SPDX-License-Identifier: GPL-3.0-or-later
3
4use std::io::Write;
5use std::io::{Cursor, Read, Seek, SeekFrom};
6
7use crate::ByteBuffer;
8use binrw::BinRead;
9use binrw::BinWrite;
10use binrw::{BinReaderExt, binrw};
11
12use crate::common_file_operations::read_bool_from;
13use crate::model::ModelFileHeader;
14use crate::sqpack::read_data_block;
15
16#[binrw]
17#[brw(repr = i32)]
18#[derive(Debug, PartialEq, Eq)]
19/// The file type of the data entry.
20pub enum FileType {
21    /// Empty entry, usually invalid.
22    Empty = 1,
23    /// Encompasses every file that is not a model or a texture, which are stored in a special fashion.
24    Standard,
25    /// Model (MDL) files.
26    Model,
27    /// Texture (TEX) files.
28    Texture,
29}
30
31#[derive(BinRead)]
32struct StandardFileBlock {
33    #[br(pad_before = 8)]
34    num_blocks: u32,
35}
36
37#[derive(BinRead, Debug)]
38#[allow(dead_code)]
39struct TextureLodBlock {
40    compressed_offset: u32,
41    compressed_size: u32,
42    decompressed_size: u32,
43
44    block_offset: u32,
45    block_count: u32,
46}
47
48pub trait AnyNumberType<'a>:
49    BinRead<Args<'a> = ()> + BinWrite<Args<'a> = ()> + std::ops::AddAssign + Copy + Default + 'static
50{
51}
52
53impl<'a, T> AnyNumberType<'a> for T where
54    T: BinRead<Args<'a> = ()>
55        + BinWrite<Args<'a> = ()>
56        + std::ops::AddAssign
57        + Copy
58        + Default
59        + 'static
60{
61}
62
63#[derive(BinRead, BinWrite)]
64pub struct ModelMemorySizes<T: for<'a> AnyNumberType<'a>> {
65    pub stack_size: T,
66    pub runtime_size: T,
67
68    pub vertex_buffer_size: [T; 3],
69    pub edge_geometry_vertex_buffer_size: [T; 3],
70    pub index_buffer_size: [T; 3],
71}
72
73impl<T: for<'a> AnyNumberType<'a>> ModelMemorySizes<T> {
74    pub fn total(&self) -> T {
75        let mut total: T = T::default();
76
77        total += self.stack_size;
78        total += self.runtime_size;
79
80        for i in 0..3 {
81            total += self.vertex_buffer_size[i];
82            total += self.edge_geometry_vertex_buffer_size[i];
83            total += self.index_buffer_size[i];
84        }
85
86        total
87    }
88}
89
90#[derive(BinRead)]
91#[allow(dead_code)]
92pub struct ModelFileBlock {
93    pub num_blocks: u32,
94    pub num_used_blocks: u32,
95    pub version: u32,
96
97    pub uncompressed_size: ModelMemorySizes<u32>,
98    pub compressed_size: ModelMemorySizes<u32>,
99    pub offset: ModelMemorySizes<u32>,
100    pub index: ModelMemorySizes<u16>,
101    pub num: ModelMemorySizes<u16>,
102
103    pub vertex_declaration_num: u16,
104    pub material_num: u16,
105    pub num_lods: u8,
106
107    #[br(map = read_bool_from::<u8>)]
108    pub index_buffer_streaming_enabled: bool,
109    #[brw(pad_after = 1)]
110    #[br(map = read_bool_from::<u8>)]
111    pub edge_geometry_enabled: bool,
112}
113
114#[derive(BinRead, Debug)]
115struct TextureBlock {
116    #[br(pad_before = 8)]
117    num_blocks: u32,
118
119    #[br(count = num_blocks)]
120    lods: Vec<TextureLodBlock>,
121}
122
123/// A SqPack file info header. It can optionally contain extra information, such as texture or
124/// model data depending on the file type.
125#[derive(BinRead)]
126#[br(little)]
127struct FileInfo {
128    size: u32,
129    file_type: FileType,
130    file_size: u32,
131
132    #[br(if (file_type == FileType::Standard))]
133    standard_info: Option<StandardFileBlock>,
134
135    #[br(if (file_type == FileType::Model))]
136    model_info: Option<ModelFileBlock>,
137
138    #[br(if (file_type == FileType::Texture))]
139    texture_info: Option<TextureBlock>,
140}
141
142#[binrw]
143#[br(little)]
144pub struct Block {
145    #[br(pad_after = 4)]
146    offset: i32,
147}
148
149#[binrw]
150#[derive(Debug)]
151#[br(import { x : i32, y : i32 })]
152#[br(map = | _ : i32 | if x < 32000 { CompressionMode::Compressed{ compressed_length : x, decompressed_length : y} } else { CompressionMode::Uncompressed { file_size : y } } )]
153pub enum CompressionMode {
154    // we manually map here, because for this case the enum value is also a raw value we want to extract :-)
155    Compressed {
156        compressed_length: i32,
157        decompressed_length: i32,
158    },
159    Uncompressed {
160        file_size: i32,
161    },
162}
163
164#[binrw]
165#[derive(Debug)]
166#[brw(little)]
167pub struct BlockHeader {
168    #[brw(pad_after = 4)]
169    pub size: u32,
170
171    #[br(temp)]
172    #[bw(calc = match compression { CompressionMode::Compressed{ compressed_length, .. } => { *compressed_length } CompressionMode::Uncompressed{ .. } => { 32000 }})]
173    x: i32,
174
175    #[br(temp)]
176    #[bw(calc = match compression { CompressionMode::Compressed{ decompressed_length, .. } => { *decompressed_length } CompressionMode::Uncompressed{ file_size } => { *file_size }})]
177    y: i32,
178
179    #[br(args { x, y })]
180    #[brw(restore_position)]
181    pub compression: CompressionMode,
182}
183
184pub struct SqPackData {
185    file: std::fs::File,
186}
187
188// from https://users.rust-lang.org/t/how-best-to-convert-u8-to-u16/57551/4
189fn to_u8_slice(slice: &mut [u16]) -> &mut [u8] {
190    let byte_len = 2 * slice.len();
191    unsafe { std::slice::from_raw_parts_mut(slice.as_mut_ptr().cast::<u8>(), byte_len) }
192}
193
194impl SqPackData {
195    /// Creates a new reference to an existing dat file.
196    pub fn from_existing(path: &str) -> Option<Self> {
197        Some(Self {
198            file: std::fs::File::open(path).ok()?,
199        })
200    }
201
202    /// Reads from a certain offset inside of the dat file. This offset will be fixed automatically
203    /// by the function.
204    ///
205    /// If the block of data is successfully parsed, it returns the file data - otherwise is None.
206    pub fn read_from_offset(&mut self, offset: u64) -> Option<ByteBuffer> {
207        self.file
208            .seek(SeekFrom::Start(offset))
209            .expect("Unable to find offset in file.");
210
211        let file_info = FileInfo::read(&mut self.file).ok()?;
212
213        match file_info.file_type {
214            FileType::Empty => None,
215            FileType::Standard => self.read_standard_file(offset, &file_info),
216            FileType::Model => self.read_model_file(offset, &file_info),
217            FileType::Texture => self.read_texture_file(offset, &file_info),
218        }
219    }
220
221    /// Reads a standard file block.
222    fn read_standard_file(&mut self, offset: u64, file_info: &FileInfo) -> Option<ByteBuffer> {
223        let standard_file_info = file_info.standard_info.as_ref()?;
224
225        let mut blocks: Vec<Block> = Vec::with_capacity(standard_file_info.num_blocks as usize);
226
227        for _ in 0..standard_file_info.num_blocks {
228            blocks.push(Block::read(&mut self.file).ok()?);
229        }
230
231        let mut data: Vec<u8> = Vec::with_capacity(file_info.file_size as usize);
232
233        let starting_position = offset + (file_info.size as u64);
234
235        for i in 0..standard_file_info.num_blocks {
236            data.append(
237                &mut read_data_block(
238                    &mut self.file,
239                    starting_position + (blocks[i as usize].offset as u64),
240                )
241                .expect("Failed to read data block."),
242            );
243        }
244
245        Some(data)
246    }
247
248    /// Reads a model file block.
249    fn read_model_file(&mut self, offset: u64, file_info: &FileInfo) -> Option<ByteBuffer> {
250        let model_file_info = file_info.model_info.as_ref()?;
251
252        let mut buffer = Cursor::new(Vec::new());
253
254        let base_offset = offset + (file_info.size as u64);
255
256        let total_blocks = model_file_info.num.total();
257
258        let mut compressed_block_sizes: Vec<u16> = vec![0; total_blocks as usize];
259        let slice: &mut [u8] = to_u8_slice(&mut compressed_block_sizes);
260
261        self.file.read_exact(slice).ok()?;
262
263        let mut current_block = 0;
264
265        let mut vertex_data_offsets: [u32; 3] = [0; 3];
266        let mut vertex_data_sizes: [u32; 3] = [0; 3];
267
268        let mut index_data_offsets: [u32; 3] = [0; 3];
269        let mut index_data_sizes: [u32; 3] = [0; 3];
270
271        // start writing at 0x44
272        buffer.seek(SeekFrom::Start(0x44)).ok()?;
273
274        self.file
275            .seek(SeekFrom::Start(
276                base_offset + (model_file_info.offset.stack_size as u64),
277            ))
278            .ok()?;
279
280        // read from stack blocks
281        let mut read_model_blocks = |offset: u64, size: usize| -> Option<u64> {
282            self.file.seek(SeekFrom::Start(base_offset + offset)).ok()?;
283            let stack_start = buffer.position();
284            for _ in 0..size {
285                let last_pos = &self.file.stream_position().ok()?;
286
287                let data =
288                    read_data_block(&self.file, *last_pos).expect("Unable to read block data.");
289                // write to buffer
290                buffer.write_all(data.as_slice()).ok()?;
291
292                self.file
293                    .seek(SeekFrom::Start(
294                        last_pos + (compressed_block_sizes[current_block] as u64),
295                    ))
296                    .ok()?;
297                current_block += 1;
298            }
299
300            Some(buffer.position() - stack_start)
301        };
302
303        let stack_size = read_model_blocks(
304            model_file_info.offset.stack_size as u64,
305            model_file_info.num.stack_size as usize,
306        )? as u32;
307        let runtime_size = read_model_blocks(
308            model_file_info.offset.runtime_size as u64,
309            model_file_info.num.runtime_size as usize,
310        )? as u32;
311
312        let mut process_model_data =
313            |i: usize,
314             size: u32,
315             offset: u32,
316             offsets: &mut [u32; 3],
317             data_sizes: &mut [u32; 3]| {
318                if size != 0 {
319                    let current_vertex_offset = buffer.position() as u32;
320                    if i == 0 || current_vertex_offset != offsets[i - 1] {
321                        offsets[i] = current_vertex_offset;
322                    } else {
323                        offsets[i] = 0;
324                    }
325
326                    self.file
327                        .seek(SeekFrom::Start(base_offset + (offset as u64)))
328                        .ok();
329
330                    for _ in 0..size {
331                        let last_pos = self.file.stream_position().unwrap();
332
333                        let data = read_data_block(&self.file, last_pos)
334                            .expect("Unable to read raw model block!");
335
336                        buffer
337                            .write_all(data.as_slice())
338                            .expect("Unable to write to memory buffer!");
339
340                        data_sizes[i] += data.len() as u32;
341                        self.file
342                            .seek(SeekFrom::Start(
343                                last_pos + (compressed_block_sizes[current_block] as u64),
344                            ))
345                            .expect("Unable to seek properly.");
346                        current_block += 1;
347                    }
348                }
349            };
350
351        // process all 3 lods
352        for i in 0..3 {
353            // process vertices
354            process_model_data(
355                i,
356                model_file_info.num.vertex_buffer_size[i] as u32,
357                model_file_info.offset.vertex_buffer_size[i],
358                &mut vertex_data_offsets,
359                &mut vertex_data_sizes,
360            );
361
362            // TODO: process edges
363
364            // process indices
365            process_model_data(
366                i,
367                model_file_info.num.index_buffer_size[i] as u32,
368                model_file_info.offset.index_buffer_size[i],
369                &mut index_data_offsets,
370                &mut index_data_sizes,
371            );
372        }
373
374        let header = ModelFileHeader {
375            version: model_file_info.version,
376            stack_size,
377            runtime_size,
378            vertex_declaration_count: model_file_info.vertex_declaration_num,
379            material_count: model_file_info.material_num,
380            vertex_offsets: vertex_data_offsets,
381            index_offsets: index_data_offsets,
382            vertex_buffer_size: vertex_data_sizes,
383            index_buffer_size: index_data_sizes,
384            lod_count: model_file_info.num_lods,
385            index_buffer_streaming_enabled: model_file_info.index_buffer_streaming_enabled,
386            has_edge_geometry: model_file_info.edge_geometry_enabled,
387        };
388
389        buffer.seek(SeekFrom::Start(0)).ok()?;
390
391        header.write(&mut buffer).ok()?;
392
393        Some(buffer.into_inner())
394    }
395
396    /// Reads a texture file block.
397    fn read_texture_file(&mut self, offset: u64, file_info: &FileInfo) -> Option<ByteBuffer> {
398        let texture_file_info = file_info.texture_info.as_ref()?;
399
400        let mut data: Vec<u8> = Vec::with_capacity(file_info.file_size as usize);
401
402        // write the header if it exists
403        let mipmap_size = texture_file_info.lods[0].compressed_size;
404        if mipmap_size != 0 {
405            let original_pos = self.file.stream_position().ok()?;
406
407            self.file
408                .seek(SeekFrom::Start(offset + file_info.size as u64))
409                .ok()?;
410
411            let mut header = vec![0u8; texture_file_info.lods[0].compressed_offset as usize];
412            self.file.read_exact(&mut header).ok()?;
413
414            data.append(&mut header);
415
416            self.file.seek(SeekFrom::Start(original_pos)).ok()?;
417        }
418
419        for i in 0..texture_file_info.num_blocks {
420            let mut running_block_total = (texture_file_info.lods[i as usize].compressed_offset
421                as u64)
422                + offset
423                + (file_info.size as u64);
424
425            for _ in 0..texture_file_info.lods[i as usize].block_count {
426                let original_pos = self.file.stream_position().ok()?;
427
428                data.append(&mut read_data_block(&self.file, running_block_total)?);
429
430                self.file.seek(SeekFrom::Start(original_pos)).ok()?;
431
432                running_block_total += self.file.read_le::<i16>().ok()? as u64;
433            }
434        }
435
436        Some(data)
437    }
438}
439
440#[cfg(test)]
441mod tests {
442    use std::path::PathBuf;
443
444    use super::*;
445
446    #[test]
447    fn test_invalid() {
448        let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
449        d.push("resources/tests");
450        d.push("random");
451
452        let mut dat = SqPackData::from_existing(d.to_str().unwrap()).unwrap();
453
454        let empty_file_info = FileInfo {
455            size: 0,
456            file_type: FileType::Empty,
457            file_size: 0,
458            standard_info: None,
459            model_info: None,
460            texture_info: None,
461        };
462
463        // Reading invalid data should just be nothing, but no panics
464        assert!(dat.read_from_offset(0).is_none());
465        assert!(dat.read_standard_file(0, &empty_file_info).is_none());
466        assert!(dat.read_model_file(0, &empty_file_info).is_none());
467        assert!(dat.read_texture_file(0, &empty_file_info).is_none());
468    }
469}