Solved day 9 part 2. This was, painful...

Signed-off-by: jmug <u.g.a.mariano@gmail.com>
This commit is contained in:
Mariano Uvalle 2024-12-10 23:15:47 -08:00
parent bf7ebc760a
commit 62723e5129

View file

@ -7,53 +7,85 @@ inline fn normalize(c: u8) u8 {
return c - '0'; return c - '0';
} }
const FSEntryType = enum { const EntryType = enum {
File, File,
EmptySpace, EmptyRegion,
}; };
const FSEntry = struct { const File = struct {
size: usize,
typ: FSEntryType,
// Only appicable to Files, may be best
// expressed as a union?
id: usize, id: usize,
idx: usize,
size: usize,
relocated: bool = false,
}; };
// Could use an array list, this struct seems better since the max size is known. const EmptyRegion = struct {
// Would be interesting to analyse the implications. idx: usize,
const FSEntryIterator = struct { size: usize,
size: usize = 0, insertions: [10]File = undefined,
ptr: usize = 0, insertions_pos: usize = 0,
items: [20000]FSEntry = undefined, insertions_size: usize = 0,
fn add(self: *FSEntryIterator, e: FSEntry) void { fn actualSize(self: *EmptyRegion) usize {
self.items[self.size] = e; return self.size - self.insertions_size;
self.size += 1;
} }
fn peek(self: *FSEntryIterator) ?FSEntry { fn addToInsertions(self: *EmptyRegion, f: File) void {
if (self.ptr == self.size) { self.insertions[self.insertions_pos] = f;
return null; self.insertions_pos += 1;
} self.insertions_size += f.size;
return self.items[self.ptr];
}
fn next(self: *FSEntryIterator) ?FSEntry {
if (self.ptr == self.size) {
return null;
}
defer self.ptr += 1;
return self.items[self.ptr];
} }
}; };
fn fsEntryFromIdx(dense: []u8, relocated: [20000]bool, idx: usize) FSEntry { const Entry = union(EntryType) {
return FSEntry{ File: File,
EmptyRegion: EmptyRegion,
};
const EntryIterator = struct {
pos: usize = 0,
entries: std.ArrayList(Entry),
// peek gets the entry at pos without
// moving it.
fn peek(self: *EntryIterator) ?Entry {
if (self.pos >= self.entries.items.len) {
return null;
}
return self.entries.items[self.pos];
}
// shift gets the entry at pos and moves pos.
fn shift(self: *EntryIterator) !void {
if (self.pos >= self.entries.items.len) {
return error.ShiftedEmptyIterator;
}
self.pos += 1;
}
// add an entry where it's supposed to go, depending on its index.
fn addOrdered(self: *EntryIterator, e: Entry) !void {
if (e == EntryType.File) {
return error.CannotIterateOverFiles;
}
var idx: usize = self.pos;
while (e.EmptyRegion.idx > self.entries.items[idx].EmptyRegion.idx) : (idx += 1) {}
try self.entries.insert(idx, e);
}
};
fn entryFromIdx(dense: []u8, idx: usize) Entry {
if (idx % 2 == 0) {
return Entry{ .File = File{
.id = idx / 2,
.idx = idx,
.size = normalize(dense[idx]),
} };
}
return Entry{ .EmptyRegion = EmptyRegion{
.idx = idx,
.size = normalize(dense[idx]), .size = normalize(dense[idx]),
.typ = if (relocated[idx]) .EmptySpace else if (idx % 2 == 0) .File else .EmptySpace, } };
.id = idx / 2,
};
} }
pub fn main() !void { pub fn main() !void {
@ -61,74 +93,95 @@ pub fn main() !void {
const input = try files.openForReading(input_path); const input = try files.openForReading(input_path);
defer input.close(); defer input.close();
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
const allocator = arena.allocator();
var line_buf: [20000]u8 = undefined; var line_buf: [20000]u8 = undefined;
const dense = try files.readLine(input, &line_buf); const dense = try files.readLine(input, &line_buf);
var entries_by_size: [10]FSEntryIterator = .{FSEntryIterator{}} ** 10; // Only empty spaces will be stored here.
var relocated: [20000]bool = .{false} ** 20000; var entries_by_size: [10]EntryIterator = undefined;
for (0..10) |i| {
var idx: usize = dense.len - 1; entries_by_size[i].pos = 0;
while (idx >= 2) : (idx -= 2) { entries_by_size[i].entries = try std.ArrayList(Entry).initCapacity(allocator, 20000);
const entry = fsEntryFromIdx(dense, relocated, idx);
entries_by_size[entry.size].add(entry);
} }
var entry: FSEntry = fsEntryFromIdx(dense, relocated, 0); var fs: [20000]Entry = undefined;
var entry_pos: usize = 0; for (0..dense.len) |i| {
var fsIdx: usize = 0; const entry = entryFromIdx(dense, i);
fs[i] = entry;
if (entry == EntryType.EmptyRegion) {
try entries_by_size[entry.EmptyRegion.size].entries.append(entry);
}
}
var checksum: u128 = 0; // Try to move files to the left.
idx = 0; var i: usize = dense.len + 1;
entries: while (idx < dense.len) { while (i > 0) {
if (entry_pos >= entry.size) { i -= 2;
if (idx == dense.len - 1) { const entry = fs[i];
break;
// Try to find a spot that fits it.
var min_idx: usize = 20000;
var chosen_slot: ?EmptyRegion = null;
for (entry.File.size..10) |s| {
if (entries_by_size[s].peek()) |cs| {
if (chosen_slot == null or cs.EmptyRegion.idx < min_idx) {
chosen_slot = cs.EmptyRegion;
min_idx = cs.EmptyRegion.idx;
}
} }
entry = fsEntryFromIdx(dense, relocated, idx + 1); }
idx += 1; if (chosen_slot == null or chosen_slot.?.idx > entry.File.idx) {
entry_pos = 0;
continue; continue;
} }
switch (entry.typ) {
.File => {
print("{d}", .{entry.id});
checksum += fsIdx * entry.id;
fsIdx += 1;
entry_pos += 1;
},
.EmptySpace => {
const remaining_slots = entry.size - entry_pos;
var size = remaining_slots;
var largest_id_size: usize = undefined; try entries_by_size[chosen_slot.?.actualSize()].shift();
var largest_id: ?usize = null; // If there's a spot, mark it as relocated.
while (size > 0) : (size -= 1) { fs[i].File.relocated = true;
if (entries_by_size[size].peek()) |e| { // Add the file to the insertions.
if (largest_id == null or e.id > largest_id.?) { chosen_slot.?.addToInsertions(entry.File);
largest_id = e.id; fs[chosen_slot.?.idx] = Entry{ .EmptyRegion = chosen_slot.? };
largest_id_size = e.size;
} // if the selected empty region has slots left, update it an re-insert it.
} if (chosen_slot.?.actualSize() > 0) {
try entries_by_size[chosen_slot.?.actualSize()].addOrdered(fs[chosen_slot.?.idx]);
}
}
// Calculate checksum.
var fsIdx: usize = 0;
var checksum: u128 = 0;
for (0..dense.len) |di| {
switch (fs[di]) {
EntryType.File => |file| {
if (file.relocated) {
// for (0..file.size) |_| {
// print(".", .{});
// }
fsIdx += file.size;
continue;
} }
for (0..file.size) |_| {
size = largest_id_size; // print("{d}", .{file.id});
if (largest_id) |id| { checksum += fsIdx * file.id;
_ = entries_by_size[size].next(); fsIdx += 1;
for (0..size) |_| { }
print("{d}", .{id}); },
checksum += fsIdx * id; EntryType.EmptyRegion => |empty| {
for (0..empty.insertions_pos) |ins_i| {
const f = empty.insertions[ins_i];
for (0..f.size) |_| {
// print("{d}", .{f.id});
checksum += fsIdx * f.id;
fsIdx += 1; fsIdx += 1;
} }
relocated[id * 2] = true;
entry_pos += size;
continue :entries;
} }
fsIdx += empty.size - empty.insertions_size;
for (0..remaining_slots) |_| { // for (0..(empty.size - empty.insertions_size)) |_| {
print(".", .{}); // print(".", .{});
} // }
entry_pos += remaining_slots;
fsIdx += remaining_slots;
}, },
} }
} }