Day 14
This commit is contained in:
@@ -47,8 +47,10 @@ library
|
||||
Days.D11
|
||||
Days.D12
|
||||
Days.D13
|
||||
Days.D14
|
||||
build-depends:
|
||||
bytestring
|
||||
, either
|
||||
, lens
|
||||
, megaparsec ^>=9.4.0
|
||||
, mtl
|
||||
|
||||
@@ -21,6 +21,7 @@ paths =
|
||||
, "./data/11.in"
|
||||
, "./data/12.in"
|
||||
, "./data/13.in"
|
||||
, "./data/14.in"
|
||||
]
|
||||
|
||||
solutions :: [(Int, Day, FilePath)]
|
||||
|
||||
@@ -20,6 +20,7 @@ paths =
|
||||
, "./data/11.in"
|
||||
, "./data/12.in"
|
||||
, "./data/13.in"
|
||||
, "./data/14.in"
|
||||
]
|
||||
|
||||
solutions :: [(Integer, [Day], FilePath)]
|
||||
|
||||
2
data/14.in
Normal file
2
data/14.in
Normal file
@@ -0,0 +1,2 @@
|
||||
498,4 -> 498,6 -> 496,6
|
||||
503,4 -> 502,4 -> 502,9 -> 494,9
|
||||
86
src/Days/D14.hs
Normal file
86
src/Days/D14.hs
Normal file
@@ -0,0 +1,86 @@
|
||||
{-# LANGUAGE ImportQualifiedPost #-}
|
||||
{-# LANGUAGE NamedFieldPuns #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Days.D14 where
|
||||
|
||||
import Common
|
||||
import Control.Monad (foldM)
|
||||
import Data.Functor ((<&>))
|
||||
import Data.Functor.Identity (Identity (..))
|
||||
import Data.Set (Set)
|
||||
import Data.Set qualified as Set
|
||||
import Parse
|
||||
import Text.Megaparsec (sepBy, sepEndBy1)
|
||||
import Text.Megaparsec.Char (char, newline, string)
|
||||
|
||||
data Point = P {column :: !Int, depth :: !Int} deriving (Eq, Ord, Show)
|
||||
|
||||
type Path = [Point]
|
||||
|
||||
down :: Point -> Point
|
||||
down p@P {depth} = p {depth = depth + 1}
|
||||
|
||||
left :: Point -> Point
|
||||
left p@P {column} = p {column = column - 1}
|
||||
|
||||
right :: Point -> Point
|
||||
right p@P {column} = p {column = column + 1}
|
||||
|
||||
parser :: Parser (Set Point, Int)
|
||||
parser =
|
||||
sepEndBy1 path newline
|
||||
<&> \paths ->
|
||||
let wallSpots = concatMap pathTrace paths
|
||||
cave = Set.fromList wallSpots
|
||||
caveFloor = 1 + maximum [y | P _ y <- wallSpots]
|
||||
in (cave, caveFloor)
|
||||
where
|
||||
path :: Parser Path
|
||||
path = sepBy point (string " -> ")
|
||||
|
||||
point :: Parser Point
|
||||
point = P <$> number <* char ',' <*> number
|
||||
|
||||
pathTrace :: Path -> [Point]
|
||||
pathTrace p = concat $ zipWith lineTrace p (tail p)
|
||||
where
|
||||
lineTrace :: Point -> Point -> [Point]
|
||||
lineTrace (P x y) (P x' y')
|
||||
| x == x' = [P x y'' | y'' <- [min y y' .. max y y']]
|
||||
| y == y' = [P x'' y | x'' <- [min x x' .. max x x']]
|
||||
| otherwise = error "line isn't straight"
|
||||
|
||||
fill ::
|
||||
(Monad m) =>
|
||||
(Set Point -> m (Set Point)) ->
|
||||
Int ->
|
||||
Set Point ->
|
||||
Point ->
|
||||
m (Set Point)
|
||||
fill m caveFloor = go
|
||||
where
|
||||
go cave pos
|
||||
| caveFloor < depth pos = m cave
|
||||
| Set.member pos cave = pure cave
|
||||
| otherwise =
|
||||
Set.insert pos
|
||||
<$> foldM go cave (map ($ pos) [down, left . down, right . down])
|
||||
|
||||
source :: Point
|
||||
source = P {column = 500, depth = 0}
|
||||
|
||||
part1 :: Set Point -> Int -> Maybe Int
|
||||
part1 cave caveFloor =
|
||||
case fill Left caveFloor cave (P {column = 500, depth = 0}) of
|
||||
Right _ -> Nothing
|
||||
Left filledCave -> Just $ Set.size filledCave - Set.size cave
|
||||
|
||||
part2 :: Set Point -> Int -> Int
|
||||
part2 cave caveFloor =
|
||||
(\filledCave -> Set.size filledCave - Set.size cave)
|
||||
. runIdentity
|
||||
$ fill Identity caveFloor cave (P 500 0)
|
||||
|
||||
day :: Day
|
||||
day = parsecDay parser (fmap Answer . uncurry part1, definitive . uncurry part2)
|
||||
@@ -25,6 +25,7 @@ import Days.D10 qualified as D10
|
||||
import Days.D11 qualified as D11
|
||||
import Days.D12 qualified as D12
|
||||
import Days.D13 qualified as D13
|
||||
import Days.D14 qualified as D14
|
||||
|
||||
import Data.ByteString.Char8 qualified as BS
|
||||
import Data.Text.IO qualified as T
|
||||
@@ -50,4 +51,5 @@ days =
|
||||
, [D11.day]
|
||||
, [D12.day]
|
||||
, [D13.day]
|
||||
, [D14.day]
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user