Day1 of AoC 2022 in Rust. That was... somewhat painful. And I haven't even needed to get to the borrow checker stuff yet
commit
e847cee021
@ -0,0 +1,7 @@
|
|||||||
|
# This file is automatically @generated by Cargo.
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "day1"
|
||||||
|
version = "0.1.0"
|
@ -0,0 +1,8 @@
|
|||||||
|
[package]
|
||||||
|
name = "day1"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,25 @@
|
|||||||
|
use std::fs;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let contents = fs::read_to_string("data.txt").expect("Failed to read file");
|
||||||
|
// break into groups divided by empty lines
|
||||||
|
let groups: Vec<&str> = contents.trim().split("\n\n").collect();
|
||||||
|
// turn into vectors of vectors of strings
|
||||||
|
let groups: Vec<Vec<&str>> = groups.iter().map(|&x| x.split("\n").collect()).collect();
|
||||||
|
// map parsing to u32
|
||||||
|
let nums: Vec<Vec<u32>> = groups.iter().map(|x| x.iter().map(|&y| y.parse().unwrap()).collect()).collect();
|
||||||
|
// sum each group
|
||||||
|
let mut sums: Vec<u32> = nums.iter().map(|x| x.iter().sum()).collect();
|
||||||
|
// find the max
|
||||||
|
let max = sums.iter().max().unwrap();
|
||||||
|
|
||||||
|
println!("Max Calories: {:?}", max);
|
||||||
|
|
||||||
|
sums.sort();
|
||||||
|
sums.reverse();
|
||||||
|
|
||||||
|
let (top3, _) = sums.split_at(3);
|
||||||
|
let top3_sum = top3.iter().sum::<u32>();
|
||||||
|
|
||||||
|
println!("Top 3 Max Calories: {:?}", top3_sum);
|
||||||
|
}
|
@ -0,0 +1,14 @@
|
|||||||
|
1000
|
||||||
|
2000
|
||||||
|
3000
|
||||||
|
|
||||||
|
4000
|
||||||
|
|
||||||
|
5000
|
||||||
|
6000
|
||||||
|
|
||||||
|
7000
|
||||||
|
8000
|
||||||
|
9000
|
||||||
|
|
||||||
|
10000
|
Loading…
Reference in New Issue