diff --git a/bundle_pack/Cargo.lock b/bundle_pack/Cargo.lock deleted file mode 100644 index a2fc779..0000000 --- a/bundle_pack/Cargo.lock +++ /dev/null @@ -1,4 +0,0 @@ -[root] -name = "bundle_pack" -version = "0.0.1" - diff --git a/bundle_pack/Cargo.toml b/bundle_pack/Cargo.toml deleted file mode 100644 index 227bcb4..0000000 --- a/bundle_pack/Cargo.toml +++ /dev/null @@ -1,5 +0,0 @@ -[package] - -name = "bundle_pack" -version = "0.0.1" -authors = ["dpethes "] diff --git a/bundle_pack/src/main.rs b/bundle_pack/src/main.rs deleted file mode 100644 index 0ed7e82..0000000 --- a/bundle_pack/src/main.rs +++ /dev/null @@ -1,102 +0,0 @@ -#![allow(unstable)] -extern crate core; -extern crate collections; - -use std::io::File; -use std::os; -use collections::string::String; - - -struct FileEntry { - filename: String, - length: u32, - offset: u32 -} - -fn unpack_bundle(fpath: &Path) { - // check file magic - let mut f = File::open(fpath).unwrap(); - let mut magic = String::new(); - for i in 0..4 { - let c:u8 = f.read_u8().unwrap(); - magic.push(c as char); - } - println!("Magic: {}", magic); - - // file data size (= filesize - 8) - let asize = f.read_be_u32().unwrap(); - println!("Archive size: {}", asize); - - // bundle header - let mut bheader = String::new(); - for i in 0..4 { - let c:u8 = f.read_u8().unwrap(); - bheader.push(c as char); - } - let bsize = f.read_be_u32().unwrap(); - println!("Bundle header: {} size: {}", bheader, bsize); - - // skip padding - f.seek(65570, std::io::SeekStyle::SeekCur); - - // read file entries - let file_entries = (bsize - 65570) / 252; - println!("File entries: {}", file_entries); - - let mut files = Vec::new(); - for i in 0..file_entries { - let mut fname = String::new(); - let input_fname = f.read_exact(200).unwrap(); - for byte in input_fname.iter() { - let c = byte ^ 0xAA; - if c == 0 { - break; - } - fname.push(c as char); - } - let fsize = f.read_le_u32().unwrap(); - let foffset = f.read_le_u32().unwrap(); - println!("File name: {} offset: {} size: {}", fname, foffset, fsize); - f.read_exact(44).unwrap(); - - let file = FileEntry {filename: fname, length: fsize, offset: foffset}; - files.push(file); - } - - // bundle data - let mut bdata = String::new(); - for i in 0..4 { - let c:u8 = f.read_u8().unwrap(); - bdata.push(c as char); - } - let bdata_size = f.read_be_u32().unwrap(); - println!("Bundle data: {} size: {}", bdata, bdata_size); - - // unpack files - let base_pos = f.tell().unwrap(); - for file in files.iter() { - let mut split = file.filename.split_str("\\"); - let vec: Vec<&str> = split.collect(); - let last = vec[vec.len() - 1]; - println!("File name: {} offset: {} size: {}", last, file.offset, file.length); - - let fpos = base_pos + file.offset as u64; - f.seek((fpos as i64), std::io::SeekStyle::SeekSet); - let databytes = f.read_exact(file.length as usize).unwrap(); - let mut fdest = File::create(&Path::new(last)); - fdest.write(databytes.as_slice()); - } -} - -fn main() { - let args = os::args(); - if args.len() < 2 { - println!("No input specified!"); //first param = path - unpack_bundle(&Path::new("BUNDLE.000")); //debug - remove - } else { - println!("Trying to parse file {0}", args[1]); - let ref fpath = args[1]; - unpack_bundle(&Path::new(fpath)); - } - print!("Done."); -} diff --git a/bundle_unpack/bundle_unpacker.py b/bundle_unpack/bundle_unpacker.py new file mode 100644 index 0000000..c896c3f --- /dev/null +++ b/bundle_unpack/bundle_unpacker.py @@ -0,0 +1,141 @@ +import struct +import argparse +import os + +debug = True +padding = 65570 +file_entry_size = 252 +crypt_key = 170 # 0xAA + +def check_expected( value, expected ): + assert value == expected, f"Invalid format: expected {repr(expected)}, got {repr(value)}" + +def do_unpack(input_file_path, output_dir_path): + with open(input_file_path, "rb") as f: + header = f.read(4) + expected = b"BUND" + check_expected(header, b"BUND") + # big endian + archive_size = struct.unpack(">I", f.read(4))[0] + print(f"Got archive size {archive_size} bytes") + bundle_header = f.read(4) + expected = b"BNHD" + check_expected(bundle_header, b"BNHD") + # big endian + bundle_header_size = struct.unpack(">I", f.read(4))[0] + print(f"Got bundle header size {bundle_header_size} bytes") + f.seek(padding, 1) + fe = 0 + file_entries = [] + remainder = (bundle_header_size - padding) % file_entry_size + assert remainder == 0, "Invalid format: bundle header size (minus padding) is not a multiple of the expected file entry size!" + + num_file_entries = (bundle_header_size - padding)//file_entry_size + remainder = (bundle_header_size - padding) % file_entry_size + print(f"Calculated {num_file_entries} entries") + while fe < num_file_entries: + filename_encrypted = f.read(200) + filename_decrypted = bytearray() + for e in filename_encrypted: + d = e ^ crypt_key + if d == 0: + break + filename_decrypted.append(d) + # little endian + filedata_length = struct.unpack("I", f.read(4))[0] + print(f"Got total file data size {file_data_size} bytes") + base_pos = f.tell() + for fe in file_entries: + start_pos = base_pos + fe['offset'] + f.seek(start_pos) + file_data = f.read(fe["length"]) + full_filename = os.path.join(output_dir_path, fe['filename'][1:]) + print(f"{full_filename} (filepos {start_pos}, length {fe['length']})") + os.makedirs(os.path.dirname(full_filename), exist_ok=True) + with open(full_filename, "wb+") as fo: + fo.write(file_data) + print(f"Wrote {len(file_data)} bytes") + +def do_pack(input_dir_path, output_file_path): + all_file_pairs = [] + for dirpath, dirnames, files in os.walk(input_dir_path, topdown=False): + for file in files: + unpacked_full_filename = os.path.join(dirpath,file) + rel_full_filename = os.path.relpath(unpacked_full_filename, input_dir_path) + packed_filename = os.path.join(os.path.sep,rel_full_filename) + all_file_pairs.append((unpacked_full_filename, packed_filename)) + + all_file_data = {} + offset = 0 + for unpacked_fn, packed_fn in all_file_pairs: + with open(unpacked_fn, "rb") as f: + file_data = f.read() + file_size = len(file_data) + all_file_data[(unpacked_fn, packed_fn)] = {"size": file_size, "offset": offset, "file_data": file_data} + offset += file_size + N = len(all_file_pairs) + bundle_header_size = padding + N * file_entry_size + total_archive_size = 4*4 + bundle_header_size + 2*4 + offset + + with open(output_file_path, "wb+") as f: + f.write(b"BUND") + f.write(struct.pack(">I", total_archive_size - 2*4)) # big endian + f.write(b"BNHD") + f.write(struct.pack(">I", bundle_header_size)) # big endian + f.write(bytearray(padding)) + file_data = bytearray() + for unpacked_fn, packed_fn in all_file_pairs: + decrypted_fn = packed_fn.encode("ascii") + if len(decrypted_fn) > 199: + print("Warning, truncating packed filename {packed_fn} to 199 characters (plus null terminator)") + decrypted_fn = decrypted_fn[:199] + encrypted_fn = bytearray(200) + for i, b in enumerate(decrypted_fn): + e = b ^ crypt_key + encrypted_fn[i] = e + encrypted_fn[i+1] = crypt_key # null terminator (0 ^ crypt_key = crypt_key) + f.write(encrypted_fn) + file_info_dict = all_file_data[(unpacked_fn, packed_fn)] + # little endian + f.write(struct.pack("I", len(file_data))) # big endian + f.write(file_data) + +def main(): + parser = argparse.ArgumentParser(description="Unpack Rogue Squadron BUNDLE files") + parser.add_argument("--input-path", metavar="PATH", type=str) + parser.add_argument("--output-path", metavar="PATH", type=str) + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("--unpack", action="store_true") + group.add_argument("--pack", action="store_true") + args = parser.parse_args() + if args.pack: + raise NotImplementedError("Currently unable to produce compatible packed BUNDLE.00x files -- more research is needed. " + "See do_pack in the source code.") + #do_pack(args.input_path, args.output_path) + else: + do_unpack(args.input_path, args.output_path) + +if __name__ == "__main__": + main() + + + + + +