refactor things around

This commit is contained in:
Hamcha 2024-05-05 19:08:52 +02:00
parent 81a66e6dc7
commit 2e642f2e4d
Signed by: hamcha
GPG key ID: 1669C533B8CF6D89
4 changed files with 202 additions and 166 deletions

View file

@ -4,15 +4,6 @@ pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{}); const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{}); const optimize = b.standardOptimizeOption(.{});
const dolLib = b.addStaticLibrary(.{
.name = "elphin",
.root_source_file = b.path("src/dol.zig"),
.target = target,
.optimize = optimize,
});
b.installArtifact(dolLib);
const exe = b.addExecutable(.{ const exe = b.addExecutable(.{
.name = "elphin", .name = "elphin",
.root_source_file = b.path("src/main.zig"), .root_source_file = b.path("src/main.zig"),

View file

@ -0,0 +1,128 @@
const std = @import("std");
const elf = @import("elf.zig");
const oldzig = @import("oldzig.zig");
pub const MaximumTextSegments = 7;
pub const MaximumDataSegments = 11;
pub const DolHeader = extern struct {
textOffsets: [7]u32,
dataOffsets: [11]u32,
textAddress: [7]u32,
dataAddress: [11]u32,
textSize: [7]u32,
dataSize: [11]u32,
BSSAddress: u32,
BSSSize: u32,
entrypoint: u32,
reserved: [7]u32,
};
pub const DolMap = struct {
header: DolHeader,
textCount: u32,
dataCount: u32,
originalFileTextOffset: [7]u32,
originalFileDataOffset: [11]u32,
};
const DOLAlignment = 64;
pub fn createDOLMapping(segments: elf.ELFSegments) DolMap {
// Create dol map
var dolMap = DolMap{
.header = .{
.textOffsets = .{0} ** 7,
.dataOffsets = .{0} ** 11,
.textAddress = .{0} ** 7,
.dataAddress = .{0} ** 11,
.textSize = .{0} ** 7,
.dataSize = .{0} ** 11,
.BSSAddress = segments.bssAddress,
.BSSSize = segments.bssSize,
.entrypoint = segments.entryPoint,
.reserved = .{0} ** 7,
},
.textCount = segments.textCount,
.dataCount = segments.dataCount,
.originalFileTextOffset = undefined,
.originalFileDataOffset = undefined,
};
// Setup dummy segments if no TEXT or DATA segments are present
dolMap.header.textOffsets[0] = std.mem.alignForward(u32, @sizeOf(DolHeader), DOLAlignment);
dolMap.header.dataOffsets[0] = std.mem.alignForward(u32, @sizeOf(DolHeader), DOLAlignment);
var currentPosition = std.mem.alignForward(u32, @sizeOf(DolHeader), DOLAlignment);
for (0..segments.textCount) |i| {
// Set offset to aligned address
dolMap.header.textOffsets[i] = currentPosition;
// Copy over text segment info
dolMap.originalFileTextOffset[i] = segments.text[i].offset;
dolMap.header.textAddress[i] = segments.text[i].address;
dolMap.header.textSize[i] = segments.text[i].size;
// Get next aligned position
currentPosition = std.mem.alignForward(u32, currentPosition + dolMap.header.textSize[i], DOLAlignment);
}
for (0..segments.dataCount) |i| {
// Set offset to aligned address
dolMap.header.dataOffsets[i] = currentPosition;
// Copy over text segment info
dolMap.originalFileDataOffset[i] = segments.data[i].offset;
dolMap.header.dataAddress[i] = segments.data[i].address;
dolMap.header.dataSize[i] = segments.data[i].size;
// Get next aligned position
currentPosition = std.mem.alignForward(u32, currentPosition + dolMap.header.dataSize[i], DOLAlignment);
}
return dolMap;
}
pub fn writeDOL(map: DolMap, input: std.fs.File, output: std.fs.File) !void {
const reader = input.reader();
const writer = output.writer();
// Write header
try oldzig.writeStructEndian(writer, map.header, std.builtin.Endian.big);
// Create buffer for pump
var fifo = std.fifo.LinearFifo(u8, .{ .Static = 1024 * 1024 }).init();
defer fifo.deinit();
// Copy over text segments
for (0..map.textCount) |i| {
// Seek to text segment
try input.seekTo(map.originalFileTextOffset[i]);
// Create limited reader to segment size
var limitedReader = std.io.limitedReader(reader, map.header.textSize[i]);
// Seek to destination
try output.seekTo(map.header.textOffsets[i]);
// Copy segment
std.log.debug("Copying text segment {d} at 0x{x} -> 0x{x}", .{ i, map.header.textAddress[i], map.header.textOffsets[i] });
try fifo.pump(&limitedReader, writer);
}
// Copy over data segments
for (0..map.dataCount) |i| {
// Seek to data segment
try input.seekTo(map.originalFileDataOffset[i]);
// Create limited reader to segment size
var limitedReader = std.io.limitedReader(reader, map.header.dataSize[i]);
// Seek to destination
try output.seekTo(map.header.dataOffsets[i]);
// Copy segment
std.log.debug("Copying data segment {d} at 0x{x} -> 0x{x}", .{ i, map.header.dataAddress[i], map.header.dataOffsets[i] });
try fifo.pump(&limitedReader, writer);
}
}

View file

@ -1,5 +1,5 @@
const std = @import("std"); const std = @import("std");
const oldzig = @import("oldzig.zig"); const dol = @import("dol.zig");
// Cool website for ELF info: // Cool website for ELF info:
// https://www.man7.org/linux/man-pages/man5/elf.5.html // https://www.man7.org/linux/man-pages/man5/elf.5.html
@ -27,9 +27,6 @@ const ELFVersion_Current = 1;
const ELFType_Executable = 2; const ELFType_Executable = 2;
const ELFMachine_PowerPC = 20; const ELFMachine_PowerPC = 20;
const MaximumTextSegments = 7;
const MaximumDataSegments = 11;
const PSFlags_Executable = 1; const PSFlags_Executable = 1;
const PSFlags_Writable = 2; const PSFlags_Writable = 2;
const PSFlags_Readable = 4; const PSFlags_Readable = 4;
@ -62,51 +59,36 @@ const ELFProgramHeader = extern struct {
p_align: u32, p_align: u32,
}; };
pub const DolHeader = extern struct {
text_off: [7]u32,
data_off: [11]u32,
text_addr: [7]u32,
data_addr: [11]u32,
text_size: [7]u32,
data_size: [11]u32,
bss_addr: u32,
bss_size: u32,
entry: u32,
pad: [7]u32,
};
pub const DolMap = struct {
header: DolHeader,
text_cnt: u32,
data_cnt: u32,
text_elf_off: [7]u32,
data_elf_off: [11]u32,
flags: u32,
};
const DolHasBSS = 1; const DolHasBSS = 1;
pub fn readELF(file: std.fs.File) !DolMap { pub const SegmentInfo = struct {
offset: u32,
size: u32,
address: u32,
};
// Create dol map pub const ELFSegments = struct {
var dolMap = DolMap{ entryPoint: u32,
.header = .{ textCount: u4,
.text_off = .{0} ** 7, text: [dol.MaximumTextSegments]SegmentInfo,
.data_off = .{0} ** 11, dataCount: u4,
.text_addr = .{0} ** 7, data: [dol.MaximumDataSegments]SegmentInfo,
.data_addr = .{0} ** 11, hasBSS: bool,
.text_size = .{0} ** 7, bssAddress: u32,
.data_size = .{0} ** 11, bssSize: u32,
.bss_addr = 0, };
.bss_size = 0,
.entry = 0, pub fn readELF(file: std.fs.File) !ELFSegments {
.pad = .{0} ** 7, // Create segment map
}, var elfMap: ELFSegments = .{
.text_cnt = 0, .entryPoint = undefined,
.data_cnt = 0, .textCount = 0,
.text_elf_off = undefined, .text = undefined,
.data_elf_off = undefined, .dataCount = 0,
.flags = 0, .data = undefined,
.hasBSS = false,
.bssAddress = 0,
.bssSize = 0,
}; };
// Read header // Read header
@ -116,14 +98,14 @@ pub fn readELF(file: std.fs.File) !DolMap {
try checkELFHeader(header); try checkELFHeader(header);
// Get entry point // Get entry point
dolMap.header.entry = header.e_entry; elfMap.entryPoint = header.e_entry;
// Read program headers // Read program headers
const phnum = header.e_phnum; const programCount = header.e_phnum;
const phoff = header.e_phoff; const programOffset = header.e_phoff;
// Sanity checks // Sanity checks
if (phnum == 0 or phoff == 0) { if (programCount == 0 or programOffset == 0) {
return ELFError.MissingProgramHeader; return ELFError.MissingProgramHeader;
} }
if (header.e_phentsize != @sizeOf(ELFProgramHeader)) { if (header.e_phentsize != @sizeOf(ELFProgramHeader)) {
@ -131,38 +113,38 @@ pub fn readELF(file: std.fs.File) !DolMap {
} }
// Read program headers // Read program headers
try file.seekTo(phoff); try file.seekTo(programOffset);
for (0..phnum) |_| { for (0..programCount) |_| {
const programHeader = try reader.readStructEndian(ELFProgramHeader, std.builtin.Endian.big); const programHeader = try reader.readStructEndian(ELFProgramHeader, std.builtin.Endian.big);
// Skip non-loadable segments // Skip non-loadable segments
if (programHeader.p_type != 1) { if (programHeader.p_type != 1) {
std.debug.print("Skipping non-loadable segment at 0x{x}\n", .{programHeader.p_vaddr}); std.log.debug("Skipping non-loadable segment at 0x{x}", .{programHeader.p_vaddr});
continue; continue;
} }
// Skip empty segments // Skip empty segments
if (programHeader.p_memsz == 0) { if (programHeader.p_memsz == 0) {
std.debug.print("Skipping empty segment at 0x{x}\n", .{programHeader.p_vaddr}); std.log.debug("Skipping empty segment at 0x{x}", .{programHeader.p_vaddr});
continue; continue;
} }
// Check if segment is readable // Check if segment is readable
if (programHeader.p_flags & PSFlags_Readable == 0) { if (programHeader.p_flags & PSFlags_Readable == 0) {
std.debug.print("Warning: non-readable segment at 0x{x}\n", .{programHeader.p_vaddr}); std.log.debug("Warning: non-readable segment at 0x{x}", .{programHeader.p_vaddr});
} }
// If the segment is executable, it's a TEXT segment // If the segment is executable, it's a TEXT segment
if (programHeader.p_flags & PSFlags_Executable != 0) { if (programHeader.p_flags & PSFlags_Executable != 0) {
// Do we have too many text segments? // Do we have too many text segments?
if (dolMap.text_cnt >= MaximumTextSegments) { if (elfMap.textCount >= dol.MaximumTextSegments) {
return ELFError.TooManyTextSegments; return ELFError.TooManyTextSegments;
} }
// Check if segment is writable // Check if segment is writable
if (programHeader.p_flags & PSFlags_Writable != 0) { if (programHeader.p_flags & PSFlags_Writable != 0) {
std.debug.print("Warning: segment at 0x{x} is both executable and writable\n", .{programHeader.p_vaddr}); std.log.debug("Warning: segment at 0x{x} is both executable and writable", .{programHeader.p_vaddr});
} }
// Check if segment has valid memory size // Check if segment has valid memory size
@ -173,129 +155,63 @@ pub fn readELF(file: std.fs.File) !DolMap {
// Check if there's leftover space // Check if there's leftover space
if (programHeader.p_filesz < programHeader.p_memsz) { if (programHeader.p_filesz < programHeader.p_memsz) {
// Add as BSS segment of whatever is left between the file and memory sizes // Add as BSS segment of whatever is left between the file and memory sizes
// TODO: why?! addOrExtendBSS(&elfMap, programHeader.p_paddr + programHeader.p_filesz, programHeader.p_memsz - programHeader.p_filesz);
add_bss(&dolMap, programHeader.p_paddr + programHeader.p_filesz, programHeader.p_memsz - programHeader.p_filesz); std.log.debug("Found bss segment (TEXT) at 0x{x}", .{programHeader.p_paddr + programHeader.p_filesz});
std.debug.print("Found bss segment (TEXT) at 0x{x}\n", .{programHeader.p_paddr + programHeader.p_filesz});
} }
std.debug.print("Found text segment at 0x{x}\n", .{programHeader.p_vaddr}); std.log.debug("Found text segment at 0x{x}", .{programHeader.p_vaddr});
dolMap.header.text_addr[dolMap.text_cnt] = programHeader.p_paddr; elfMap.text[elfMap.textCount] = .{
dolMap.header.text_size[dolMap.text_cnt] = programHeader.p_filesz; .address = programHeader.p_paddr,
dolMap.text_elf_off[dolMap.text_cnt] = programHeader.p_offset; .size = programHeader.p_filesz,
.offset = programHeader.p_offset,
};
dolMap.text_cnt += 1; elfMap.textCount += 1;
} else { } else {
// DATA or BSS segment // DATA or BSS segment
// TODO: ???? // TODO: ????
if (programHeader.p_filesz == 0) { if (programHeader.p_filesz == 0) {
add_bss(&dolMap, programHeader.p_paddr, programHeader.p_memsz); addOrExtendBSS(&elfMap, programHeader.p_paddr, programHeader.p_memsz);
std.debug.print("Found bss segment (DATA) at 0x{x}\n", .{programHeader.p_vaddr}); std.log.debug("Found bss segment (DATA) at 0x{x}", .{programHeader.p_vaddr});
continue; continue;
} }
// Do we have too many data segments? // Do we have too many data segments?
if (dolMap.data_cnt >= MaximumDataSegments) { if (elfMap.dataCount >= dol.MaximumDataSegments) {
return ELFError.TooManyDataSegments; return ELFError.TooManyDataSegments;
} }
std.debug.print("Found data segment at 0x{x}\n", .{programHeader.p_vaddr}); std.log.debug("Found data segment at 0x{x}", .{programHeader.p_vaddr});
dolMap.header.data_addr[dolMap.data_cnt] = programHeader.p_paddr; elfMap.data[elfMap.dataCount] = .{
dolMap.header.data_size[dolMap.data_cnt] = programHeader.p_filesz; .address = programHeader.p_paddr,
dolMap.data_elf_off[dolMap.data_cnt] = programHeader.p_offset; .size = programHeader.p_filesz,
.offset = programHeader.p_offset,
};
dolMap.data_cnt += 1; elfMap.dataCount += 1;
} }
} }
return dolMap; return elfMap;
} }
// I don't understand what this does fn addOrExtendBSS(map: *ELFSegments, addr: u32, size: u32) void {
fn add_bss(map: *DolMap, addr: u32, size: u32) void { // If we already have a BSS segment and it lines up, extend it
if (map.flags & DolHasBSS != 0) { if (map.hasBSS) {
const originalAddr = map.header.bss_addr; const originalAddr = map.bssAddress;
const originalSize = map.header.bss_size; const originalSize = map.bssSize;
if ((originalAddr + originalSize) == addr) { if ((originalAddr + originalSize) == addr) {
map.header.bss_size = originalSize + size; map.bssSize = originalSize + size;
}
} else {
map.header.bss_addr = addr;
map.header.bss_size = size;
map.flags |= DolHasBSS;
} }
return;
} }
const DOLAlignment = 64; map.bssAddress = addr;
pub fn alignSegments(map: *DolMap) void { map.bssSize = size;
std.debug.print("Mapping DOL to 64 byte alignment\n", .{}); map.hasBSS = true;
var currentPosition = std.mem.alignForward(u32, @sizeOf(DolHeader), DOLAlignment);
for (0..map.text_cnt) |i| {
std.debug.print(" - Mapping text segment {d} at 0x{x} -> 0x{x}\n", .{ i, map.header.text_addr[i], currentPosition });
map.header.text_off[i] = currentPosition;
currentPosition = std.mem.alignForward(u32, currentPosition + map.header.text_size[i], DOLAlignment);
}
for (0..map.data_cnt) |i| {
std.debug.print(" - Mapping data segment {d} at 0x{x} -> 0x{x}\n", .{ i, map.header.data_addr[i], currentPosition });
map.header.data_off[i] = currentPosition;
currentPosition = std.mem.alignForward(u32, currentPosition + map.header.data_size[i], DOLAlignment);
}
// Add dummy segments if no TEXT or DATA segments are present
if (map.text_cnt == 0) {
map.header.text_off[0] = std.mem.alignForward(u32, @sizeOf(DolHeader), DOLAlignment);
}
if (map.data_cnt == 0) {
map.header.data_off[0] = std.mem.alignForward(u32, @sizeOf(DolHeader), DOLAlignment);
}
}
pub fn writeDOL(map: DolMap, input: std.fs.File, output: std.fs.File) !void {
const reader = input.reader();
const writer = output.writer();
// Write header
try oldzig.writeStructEndian(writer, map.header, std.builtin.Endian.big);
// Create buffer for pump
var fifo = std.fifo.LinearFifo(u8, .{ .Static = 1024 * 1024 }).init();
defer fifo.deinit();
// Copy over text segments
for (0..map.text_cnt) |i| {
// Seek to text segment
try input.seekTo(map.text_elf_off[i]);
// Create limited reader to segment size
var limitedReader = std.io.limitedReader(reader, map.header.text_size[i]);
// Seek to destination
try output.seekTo(map.header.text_off[i]);
// Copy segment
std.debug.print("Copying text segment {d} at 0x{x} -> 0x{x}\n", .{ i, map.header.text_addr[i], map.header.text_off[i] });
try fifo.pump(&limitedReader, writer);
}
// Copy over data segments
for (0..map.data_cnt) |i| {
// Seek to data segment
try input.seekTo(map.data_elf_off[i]);
// Create limited reader to segment size
var limitedReader = std.io.limitedReader(reader, map.header.data_size[i]);
// Seek to destination
try output.seekTo(map.header.data_off[i]);
// Copy segment
std.debug.print("Copying data segment {d} at 0x{x} -> 0x{x}\n", .{ i, map.header.data_addr[i], map.header.data_off[i] });
try fifo.pump(&limitedReader, writer);
}
} }
fn checkELFHeader(header: ELFHeader) !void { fn checkELFHeader(header: ELFHeader) !void {

View file

@ -1,5 +1,6 @@
const std = @import("std"); const std = @import("std");
const elf = @import("elf.zig"); const elf = @import("elf.zig");
const dol = @import("dol.zig");
pub fn main() !void { pub fn main() !void {
// Get allocator // Get allocator
@ -12,7 +13,7 @@ pub fn main() !void {
defer std.process.argsFree(allocator, args); defer std.process.argsFree(allocator, args);
if (args.len < 3) { if (args.len < 3) {
std.debug.print("Usage: {s} <input.elf> <output.dol>\n", .{args[0]}); std.log.err("Usage: {s} <input.elf> <output.dol>", .{args[0]});
std.process.exit(1); std.process.exit(1);
} }
@ -22,13 +23,13 @@ pub fn main() !void {
// Read input // Read input
const input = try std.fs.cwd().openFile(inputPath, .{}); const input = try std.fs.cwd().openFile(inputPath, .{});
defer input.close(); defer input.close();
var map = try elf.readELF(input); const map = try elf.readELF(input);
// Align segments to 64 byte boundaries // Align segments to 64 byte boundaries
elf.alignSegments(&map); const dolMap = dol.createDOLMapping(map);
// Write header and copy over segments from input // Write header and copy over segments from input
const output = try std.fs.cwd().createFile(outputPath, .{}); const output = try std.fs.cwd().createFile(outputPath, .{});
defer output.close(); defer output.close();
try elf.writeDOL(map, input, output); try dol.writeDOL(dolMap, input, output);
} }