summaryrefslogtreecommitdiff
path: root/src/Language/PureScript/Make/Cache.hs
blob: 337ee2d2623e46a023310de512a06bdee651de20 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
{-# LANGUAGE GeneralizedNewtypeDeriving #-}

module Language.PureScript.Make.Cache
  ( ContentHash
  , hash
  , CacheDb
  , CacheInfo
  , checkChanged
  , removeModules
  ) where

import Prelude

import Control.Category ((>>>))
import Control.Monad ((>=>))
import Crypto.Hash (hashlazy, HashAlgorithm, Digest, SHA512, digestFromByteString)
import qualified Data.Aeson as Aeson
import Data.Align (align)
import Data.ByteArray.Encoding (Base(Base16), convertToBase, convertFromBase)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Monoid (All(..))
import Data.Set (Set)
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import Data.These (These(..))
import Data.Time.Clock (UTCTime)
import Data.Traversable (for)

import Language.PureScript.Names (ModuleName)

digestToHex :: Digest a -> Text
digestToHex = decodeUtf8 . convertToBase Base16

digestFromHex :: forall a. HashAlgorithm a => Text -> Maybe (Digest a)
digestFromHex =
  encodeUtf8
  >>> either (const Nothing) Just . convertFromBase Base16
  >=> (digestFromByteString :: BS.ByteString -> Maybe (Digest a))

-- | Defines the hash algorithm we use for cache invalidation of input files.
newtype ContentHash = ContentHash
  { unContentHash :: Digest SHA512 }
  deriving (Show, Eq, Ord)

instance Aeson.ToJSON ContentHash where
  toJSON = Aeson.toJSON . digestToHex . unContentHash

instance Aeson.FromJSON ContentHash where
  parseJSON x = do
    str <- Aeson.parseJSON x
    case digestFromHex str of
      Just digest ->
        pure $ ContentHash digest
      Nothing ->
        fail "Unable to decode ContentHash"

hash :: BSL.ByteString -> ContentHash
hash = ContentHash . hashlazy

type CacheDb = Map ModuleName CacheInfo

-- | A CacheInfo contains all of the information we need to store about a
-- particular module in the cache database.
newtype CacheInfo = CacheInfo
  { unCacheInfo :: Map FilePath (UTCTime, ContentHash) }
  deriving stock (Show)
  deriving newtype (Eq, Ord, Semigroup, Monoid, Aeson.FromJSON, Aeson.ToJSON)

-- | Given a module name, and a map containing the associated input files
-- together with current metadata i.e. timestamps and hashes, check whether the
-- input files have changed, based on comparing with the database stored in the
-- monadic state.
--
-- The CacheInfo in the return value should be stored in the cache for future
-- builds.
--
-- The Bool in the return value indicates whether it is safe to use existing
-- build artifacts for this module, at least based on the timestamps and hashes
-- of the module's input files.
--
-- If the timestamps are the same as those in the database, assume the file is
-- unchanged, and return True without checking hashes.
--
-- If any of the timestamps differ from what is in the database, check the
-- hashes of those files. In this case, update the database with any changed
-- timestamps and hashes, and return True if and only if all of the hashes are
-- unchanged.
checkChanged
  :: Monad m
  => CacheDb
  -> ModuleName
  -> Map FilePath (UTCTime, m ContentHash)
  -> m (CacheInfo, Bool)
checkChanged cacheDb mn currentInfo = do
  let dbInfo = unCacheInfo $ fromMaybe mempty (Map.lookup mn cacheDb)
  (newInfo, isUpToDate) <-
    fmap mconcat $
      for (Map.toList (align dbInfo currentInfo)) $ \(fp, aligned) -> do
        case aligned of
          This _ -> do
            -- One of the input files listed in the cache no longer exists;
            -- remove that file from the cache and note that the module needs
            -- rebuilding
            pure (Map.empty, All False)
          That (timestamp, getHash) -> do
            -- The module has a new input file; add it to the cache and
            -- note that the module needs rebuilding.
            newHash <- getHash
            pure (Map.singleton fp (timestamp, newHash), All False)
          These db@(dbTimestamp, _) (newTimestamp, _) | dbTimestamp == newTimestamp -> do
            -- This file exists both currently and in the cache database,
            -- and the timestamp is unchanged, so we skip checking the
            -- hash.
            pure (Map.singleton fp db, mempty)
          These (_, dbHash) (newTimestamp, getHash) -> do
            -- This file exists both currently and in the cache database,
            -- but the timestamp has changed, so we need to check the hash.
            newHash <- getHash
            pure (Map.singleton fp (newTimestamp, newHash), All (dbHash == newHash))

  pure (CacheInfo newInfo, getAll isUpToDate)

-- | Remove any modules from the given set from the cache database; used when
-- they failed to build.
removeModules :: Set ModuleName -> CacheDb -> CacheDb
removeModules moduleNames = flip Map.withoutKeys moduleNames