add 2024/day9
This commit is contained in:
parent
8f0bc54b7c
commit
cac9efe9d3
1 changed files with 123 additions and 0 deletions
123
2024/09/main.py
Normal file
123
2024/09/main.py
Normal file
|
@ -0,0 +1,123 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- encoding: utf-8; py-indent-offset: 4 -*-
|
||||
|
||||
# Author: Sebastian Mark
|
||||
# CC-BY-SA (https://creativecommons.org/licenses/by-sa/4.0/deed.de)
|
||||
|
||||
# pylint: disable=missing-module-docstring,missing-function-docstring,consider-using-f-string
|
||||
|
||||
|
||||
def readinput():
|
||||
with open("input", "r", encoding="utf-8") as file:
|
||||
lines = [int(digit) for digit in file.read().rstrip("\n")]
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def expand_map(diskmap: list) -> list:
|
||||
fileblocks = []
|
||||
fileid = 0
|
||||
|
||||
for i, filesize in enumerate(diskmap):
|
||||
# when processing file, use fileid as block, otherwise use free space indicator
|
||||
block = str(fileid) if i % 2 == 0 else "."
|
||||
# when processing file, count fileid up
|
||||
fileid += i % 2 == 0
|
||||
# create as many new blocks as the file needs
|
||||
fileblocks.extend([block] * filesize)
|
||||
|
||||
return fileblocks
|
||||
|
||||
|
||||
def defragment_blocks(fileblocks: list) -> list:
|
||||
defragmented = fileblocks.copy()
|
||||
freespace = 0
|
||||
|
||||
for pos in range(len(defragmented) - 1, 0, -1):
|
||||
# search next free space, start from prev free space
|
||||
freespace = defragmented.index(".", freespace)
|
||||
# abort if no more free space before current pos
|
||||
if pos < freespace:
|
||||
break
|
||||
# swap current fileblock with free space
|
||||
defragmented[freespace], defragmented[pos] = (
|
||||
defragmented[pos],
|
||||
defragmented[freespace],
|
||||
)
|
||||
|
||||
return defragmented
|
||||
|
||||
|
||||
def defragment_files(fileblocks: list) -> list:
|
||||
defragmented = fileblocks.copy()
|
||||
|
||||
# find all fileblocks and try to fit them into a free space
|
||||
file_end = len(fileblocks) - 1
|
||||
while file_end > 0:
|
||||
if fileblocks[file_end] == ".":
|
||||
file_end -= 1
|
||||
continue
|
||||
|
||||
# get blocks for file
|
||||
file_length = 1
|
||||
while fileblocks[file_end] == fileblocks[file_end - file_length]:
|
||||
file_length += 1
|
||||
file_start = file_end - file_length + 1
|
||||
file = fileblocks[file_start : file_end + 1]
|
||||
|
||||
# search free space and try to fit the file in it
|
||||
free_start = 0
|
||||
while free_start < len(defragmented) - 1:
|
||||
# find and measure size of next free block
|
||||
# (that is to the left of file)
|
||||
free_length = 0
|
||||
while (
|
||||
defragmented[free_start + free_length] == "."
|
||||
and free_start < file_start
|
||||
):
|
||||
free_length += 1
|
||||
|
||||
# if file fits into free block, move it
|
||||
# and skip to next file
|
||||
if free_length >= file_length:
|
||||
# move block to free space
|
||||
defragmented[free_start : free_start + file_length] = file
|
||||
# mark block as free
|
||||
defragmented[file_start : file_end + 1] = ["."] * file_length
|
||||
break
|
||||
|
||||
# file does not fit,
|
||||
# continue search after this block
|
||||
free_start += 1 + free_length
|
||||
|
||||
file_end -= file_length
|
||||
|
||||
return defragmented
|
||||
|
||||
|
||||
def calc_checksum(fileblocks: list) -> list:
|
||||
checksum = 0
|
||||
for i, block in enumerate(fileblocks):
|
||||
if block == ".": # skip free space
|
||||
continue
|
||||
checksum += i * int(block)
|
||||
return checksum
|
||||
|
||||
|
||||
def main():
|
||||
diskmap = readinput()
|
||||
fileblocks = expand_map(diskmap)
|
||||
|
||||
# part1
|
||||
defragmented_blocks = defragment_blocks(fileblocks)
|
||||
checksum = calc_checksum(defragmented_blocks)
|
||||
print("Filesystem checksum (part 1): %d" % checksum)
|
||||
|
||||
# part2
|
||||
defragmented_blocks = defragment_files(fileblocks)
|
||||
checksum = calc_checksum(defragmented_blocks)
|
||||
print("Filesystem checksum (part 2): %d" % checksum)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Reference in a new issue