Formatting with fourmolu

This commit is contained in:
Andrea Bedini 2023-09-13 14:37:10 +08:00 committed by Andrea Bedini
parent b18d165702
commit cc610620d6
19 changed files with 750 additions and 681 deletions

16
.github/workflows/formatting.yaml vendored Normal file
View File

@ -0,0 +1,16 @@
name: Check code formatting
on:
pull_request:
jobs:
build:
runs-on:
- ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: haskell-actions/run-fourmolu@v9
with:
version: "0.14.0.0"

View File

@ -144,33 +144,33 @@ jsonField fn v
| v == emptyArray = mempty | v == emptyArray = mempty
| v == emptyString = mempty | v == emptyString = mempty
| otherwise = [Key.fromString (fromUTF8BS fn) .= v] | otherwise = [Key.fromString (fromUTF8BS fn) .= v]
where where
-- Should be added to aeson -- Should be added to aeson
emptyString :: Value emptyString :: Value
emptyString = String "" emptyString = String ""
jsonGenericPackageDescription :: GenericPackageDescription -> Value jsonGenericPackageDescription :: GenericPackageDescription -> Value
jsonGenericPackageDescription gpd = jsonGenericPackageDescription' v gpd jsonGenericPackageDescription gpd = jsonGenericPackageDescription' v gpd
where where
v = specVersion $ packageDescription gpd v = specVersion $ packageDescription gpd
jsonGenericPackageDescription' :: CabalSpecVersion -> GenericPackageDescription -> Value jsonGenericPackageDescription' :: CabalSpecVersion -> GenericPackageDescription -> Value
jsonGenericPackageDescription' v gpd = jsonGenericPackageDescription' v gpd =
object $ object $
concat concat
[ jsonPackageDescription v (packageDescription gpd), [ jsonPackageDescription v (packageDescription gpd)
jsonSetupBuildInfo v (setupBuildInfo (packageDescription gpd)), , jsonSetupBuildInfo v (setupBuildInfo (packageDescription gpd))
jsonGenPackageFlags v (genPackageFlags gpd), , jsonGenPackageFlags v (genPackageFlags gpd)
jsonCondLibrary v (condLibrary gpd), , jsonCondLibrary v (condLibrary gpd)
jsonCondSubLibraries v (condSubLibraries gpd), , jsonCondSubLibraries v (condSubLibraries gpd)
jsonCondForeignLibs v (condForeignLibs gpd), , jsonCondForeignLibs v (condForeignLibs gpd)
jsonCondExecutables v (condExecutables gpd), , jsonCondExecutables v (condExecutables gpd)
jsonCondTestSuites v (condTestSuites gpd), , jsonCondTestSuites v (condTestSuites gpd)
jsonCondBenchmarks v (condBenchmarks gpd) , jsonCondBenchmarks v (condBenchmarks gpd)
] ]
jsonPackageDescription :: CabalSpecVersion -> PackageDescription -> [Pair] jsonPackageDescription :: CabalSpecVersion -> PackageDescription -> [Pair]
jsonPackageDescription v pd@PackageDescription {sourceRepos, setupBuildInfo} = jsonPackageDescription v pd@PackageDescription{sourceRepos, setupBuildInfo} =
jsonFieldGrammar v packageDescriptionFieldGrammar pd jsonFieldGrammar v packageDescriptionFieldGrammar pd
<> jsonSourceRepos v sourceRepos <> jsonSourceRepos v sourceRepos
<> jsonSetupBuildInfo v setupBuildInfo <> jsonSetupBuildInfo v setupBuildInfo
@ -180,7 +180,7 @@ jsonSourceRepos v =
concatMap (\neRepos -> ["source-repository" .= NE.map (jsonSourceRepo v) neRepos]) . NE.nonEmpty concatMap (\neRepos -> ["source-repository" .= NE.map (jsonSourceRepo v) neRepos]) . NE.nonEmpty
jsonSourceRepo :: CabalSpecVersion -> SourceRepo -> Value jsonSourceRepo :: CabalSpecVersion -> SourceRepo -> Value
jsonSourceRepo v repo@SourceRepo {repoKind} = jsonSourceRepo v repo@SourceRepo{repoKind} =
object $ jsonFieldGrammar v (sourceRepoFieldGrammar repoKind) repo object $ jsonFieldGrammar v (sourceRepoFieldGrammar repoKind) repo
jsonSetupBuildInfo :: CabalSpecVersion -> Maybe SetupBuildInfo -> [Pair] jsonSetupBuildInfo :: CabalSpecVersion -> Maybe SetupBuildInfo -> [Pair]
@ -241,33 +241,33 @@ jsonCondBenchmark v (n, condTree) =
jsonCondTree :: forall a. CabalSpecVersion -> JSONFieldGrammar' a -> CondTree ConfVar [Dependency] a -> Value jsonCondTree :: forall a. CabalSpecVersion -> JSONFieldGrammar' a -> CondTree ConfVar [Dependency] a -> Value
jsonCondTree v grammar = toJSON . go . fmap fst . conv jsonCondTree v grammar = toJSON . go . fmap fst . conv
where where
go (CondFlat a ifs) = go (CondFlat a ifs) =
KeyMap.fromListWith (<>) $ KeyMap.fromListWith (<>) $
second (: []) second (: [])
<$> jsonFieldGrammar v grammar a ++ concatMap (\(cv, a') -> second (ifc cv) <$> jsonFieldGrammar v grammar a') ifs <$> jsonFieldGrammar v grammar a ++ concatMap (\(cv, a') -> second (ifc cv) <$> jsonFieldGrammar v grammar a') ifs
ifc cv a = object ["if" .= showCondition cv, "then" .= a] ifc cv a = object ["if" .= showCondition cv, "then" .= a]
data CondFlat v a = CondFlat a [(Condition v, a)] data CondFlat v a = CondFlat a [(Condition v, a)]
deriving (Show, Functor) deriving (Show, Functor)
conv :: forall v c a. CondTree v c a -> CondFlat v (a, c) conv :: forall v c a. CondTree v c a -> CondFlat v (a, c)
conv = goNode conv = goNode
where where
goNode (CondNode a c ifs) = goNode (CondNode a c ifs) =
CondFlat (a, c) (concatMap goBranch ifs) CondFlat (a, c) (concatMap goBranch ifs)
goBranch (CondBranch cond thenTree Nothing) = goBranch (CondBranch cond thenTree Nothing) =
let (CondFlat a ifs) = goNode thenTree let (CondFlat a ifs) = goNode thenTree
in (cond, a) : fmap (first (cond `cAnd`)) ifs in (cond, a) : fmap (first (cond `cAnd`)) ifs
goBranch (CondBranch cond thenTree (Just elseTree)) = goBranch (CondBranch cond thenTree (Just elseTree)) =
let (CondFlat a1 ifs1) = goNode thenTree let (CondFlat a1 ifs1) = goNode thenTree
(CondFlat a2 ifs2) = goNode elseTree (CondFlat a2 ifs2) = goNode elseTree
in (cond, a1) in (cond, a1)
: (first (cond `cAnd`) <$> ifs1) : (first (cond `cAnd`) <$> ifs1)
++ (cNot cond, a2) ++ (cNot cond, a2)
: (first (cNot cond `cAnd`) <$> ifs2) : (first (cNot cond `cAnd`) <$> ifs2)
test :: FilePath -> IO () test :: FilePath -> IO ()
test fn = do test fn = do

View File

@ -48,24 +48,24 @@ cmdBuild buildOptions = do
addPrepareSdistRule outputDirRoot addPrepareSdistRule outputDirRoot
phony "buildAction" (buildAction buildOptions) phony "buildAction" (buildAction buildOptions)
want ["buildAction"] want ["buildAction"]
where where
cacheDir = "_cache" cacheDir = "_cache"
opts = opts =
shakeOptions shakeOptions
{ shakeFiles = cacheDir, { shakeFiles = cacheDir
shakeVerbosity = Verbose, , shakeVerbosity = Verbose
shakeThreads = buildOptsNumThreads buildOptions , shakeThreads = buildOptsNumThreads buildOptions
} }
buildAction :: BuildOptions -> Action () buildAction :: BuildOptions -> Action ()
buildAction buildAction
BuildOptions BuildOptions
{ buildOptsSignOpts = signOpts, { buildOptsSignOpts = signOpts
buildOptsCurrentTime = mCurrentTime, , buildOptsCurrentTime = mCurrentTime
buildOptsExpireSignaturesOn = mExpireSignaturesOn, , buildOptsExpireSignaturesOn = mExpireSignaturesOn
buildOptsInputDir = inputDir, , buildOptsInputDir = inputDir
buildOptsOutputDir = outputDir, , buildOptsOutputDir = outputDir
buildOptsWriteMetadata = doWritePackageMeta , buildOptsWriteMetadata = doWritePackageMeta
} = do } = do
outputDirRoot <- liftIO $ makeAbsolute (fromFilePath outputDir) outputDirRoot <- liftIO $ makeAbsolute (fromFilePath outputDir)
@ -108,7 +108,7 @@ buildAction
cabalEntries <- cabalEntries <-
foldMap foldMap
( \PreparedPackageVersion {pkgId, pkgTimestamp, cabalFilePath, originalCabalFilePath, cabalFileRevisions} -> do ( \PreparedPackageVersion{pkgId, pkgTimestamp, cabalFilePath, originalCabalFilePath, cabalFileRevisions} -> do
-- original cabal file, with its timestamp (if specified) -- original cabal file, with its timestamp (if specified)
copyFileChanged originalCabalFilePath (outputDir </> "package" </> prettyShow pkgId </> "revision" </> "0" <.> "cabal") copyFileChanged originalCabalFilePath (outputDir </> "package" </> prettyShow pkgId </> "revision" </> "0" <.> "cabal")
cf <- prepareIndexPkgCabal pkgId (fromMaybe currentTime pkgTimestamp) originalCabalFilePath cf <- prepareIndexPkgCabal pkgId (fromMaybe currentTime pkgTimestamp) originalCabalFilePath
@ -131,7 +131,7 @@ buildAction
targetKeys <- maybeReadKeysAt "target" targetKeys <- maybeReadKeysAt "target"
metadataEntries <- metadataEntries <-
forP packageVersions $ \ppv@PreparedPackageVersion {pkgId, pkgTimestamp} -> do forP packageVersions $ \ppv@PreparedPackageVersion{pkgId, pkgTimestamp} -> do
targets <- prepareIndexPkgMetadata expiryTime ppv targets <- prepareIndexPkgMetadata expiryTime ppv
pure $ pure $
mkTarEntry mkTarEntry
@ -156,51 +156,51 @@ buildAction
liftIO $ liftIO $
writeSignedJSON outputDirRoot repoLayoutMirrors privateKeysMirrors $ writeSignedJSON outputDirRoot repoLayoutMirrors privateKeysMirrors $
Mirrors Mirrors
{ mirrorsVersion = FileVersion 1, { mirrorsVersion = FileVersion 1
mirrorsExpires = FileExpires expiryTime, , mirrorsExpires = FileExpires expiryTime
mirrorsMirrors = [] , mirrorsMirrors = []
} }
liftIO $ liftIO $
writeSignedJSON outputDirRoot repoLayoutRoot privateKeysRoot $ writeSignedJSON outputDirRoot repoLayoutRoot privateKeysRoot $
Root Root
{ rootVersion = FileVersion 1, { rootVersion = FileVersion 1
rootExpires = FileExpires expiryTime, , rootExpires = FileExpires expiryTime
rootKeys = , rootKeys =
fromKeys $ fromKeys $
concat concat
[ privateKeysRoot, [ privateKeysRoot
privateKeysTarget, , privateKeysTarget
privateKeysSnapshot, , privateKeysSnapshot
privateKeysTimestamp, , privateKeysTimestamp
privateKeysMirrors , privateKeysMirrors
], ]
rootRoles = , rootRoles =
RootRoles RootRoles
{ rootRolesRoot = { rootRolesRoot =
RoleSpec RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysRoot, { roleSpecKeys = map somePublicKey privateKeysRoot
roleSpecThreshold = KeyThreshold 2 , roleSpecThreshold = KeyThreshold 2
}, }
rootRolesSnapshot = , rootRolesSnapshot =
RoleSpec RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysSnapshot, { roleSpecKeys = map somePublicKey privateKeysSnapshot
roleSpecThreshold = KeyThreshold 1 , roleSpecThreshold = KeyThreshold 1
}, }
rootRolesTargets = , rootRolesTargets =
RoleSpec RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysTarget, { roleSpecKeys = map somePublicKey privateKeysTarget
roleSpecThreshold = KeyThreshold 1 , roleSpecThreshold = KeyThreshold 1
}, }
rootRolesTimestamp = , rootRolesTimestamp =
RoleSpec RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysTimestamp, { roleSpecKeys = map somePublicKey privateKeysTimestamp
roleSpecThreshold = KeyThreshold 1 , roleSpecThreshold = KeyThreshold 1
}, }
rootRolesMirrors = , rootRolesMirrors =
RoleSpec RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysMirrors, { roleSpecKeys = map somePublicKey privateKeysMirrors
roleSpecThreshold = KeyThreshold 1 , roleSpecThreshold = KeyThreshold 1
} }
} }
} }
@ -213,21 +213,21 @@ buildAction
liftIO $ liftIO $
writeSignedJSON outputDirRoot repoLayoutSnapshot privateKeysSnapshot $ writeSignedJSON outputDirRoot repoLayoutSnapshot privateKeysSnapshot $
Snapshot Snapshot
{ snapshotVersion = FileVersion 1, { snapshotVersion = FileVersion 1
snapshotExpires = FileExpires expiryTime, , snapshotExpires = FileExpires expiryTime
snapshotInfoRoot = rootInfo, , snapshotInfoRoot = rootInfo
snapshotInfoMirrors = mirrorsInfo, , snapshotInfoMirrors = mirrorsInfo
snapshotInfoTar = Just tarInfo, , snapshotInfoTar = Just tarInfo
snapshotInfoTarGz = tarGzInfo , snapshotInfoTarGz = tarGzInfo
} }
snapshotInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutSnapshot) snapshotInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutSnapshot)
liftIO $ liftIO $
writeSignedJSON outputDirRoot repoLayoutTimestamp privateKeysTimestamp $ writeSignedJSON outputDirRoot repoLayoutTimestamp privateKeysTimestamp $
Timestamp Timestamp
{ timestampVersion = FileVersion 1, { timestampVersion = FileVersion 1
timestampExpires = FileExpires expiryTime, , timestampExpires = FileExpires expiryTime
timestampInfoSnapshot = snapshotInfo , timestampInfoSnapshot = snapshotInfo
} }
makeMetadataFile :: FilePath -> [PreparedPackageVersion] -> Action () makeMetadataFile :: FilePath -> [PreparedPackageVersion] -> Action ()
@ -236,37 +236,37 @@ makeMetadataFile outputDir packageVersions = traced "writing metadata" $ do
Aeson.encodeFile Aeson.encodeFile
(outputDir </> "foliage" </> "packages.json") (outputDir </> "foliage" </> "packages.json")
(map encodePackageVersion packageVersions) (map encodePackageVersion packageVersions)
where where
encodePackageVersion encodePackageVersion
PreparedPackageVersion PreparedPackageVersion
{ pkgId = PackageIdentifier {pkgName, pkgVersion}, { pkgId = PackageIdentifier{pkgName, pkgVersion}
pkgTimestamp, , pkgTimestamp
pkgVersionForce, , pkgVersionForce
pkgVersionSource , pkgVersionSource
} = } =
Aeson.object Aeson.object
( [ "pkg-name" Aeson..= pkgName, ( [ "pkg-name" Aeson..= pkgName
"pkg-version" Aeson..= pkgVersion, , "pkg-version" Aeson..= pkgVersion
"url" Aeson..= sourceUrl pkgVersionSource , "url" Aeson..= sourceUrl pkgVersionSource
] ]
++ ["forced-version" Aeson..= True | pkgVersionForce] ++ ["forced-version" Aeson..= True | pkgVersionForce]
++ (case pkgTimestamp of Nothing -> []; Just t -> ["timestamp" Aeson..= t]) ++ (case pkgTimestamp of Nothing -> []; Just t -> ["timestamp" Aeson..= t])
) )
sourceUrl :: PackageVersionSource -> URI sourceUrl :: PackageVersionSource -> URI
sourceUrl (TarballSource uri Nothing) = uri sourceUrl (TarballSource uri Nothing) = uri
sourceUrl (TarballSource uri (Just subdir)) = uri {uriQuery = "?dir=" ++ subdir} sourceUrl (TarballSource uri (Just subdir)) = uri{uriQuery = "?dir=" ++ subdir}
sourceUrl (GitHubSource repo rev Nothing) = sourceUrl (GitHubSource repo rev Nothing) =
nullURI nullURI
{ uriScheme = "github:", { uriScheme = "github:"
uriPath = T.unpack (unGitHubRepo repo) </> T.unpack (unGitHubRev rev) , uriPath = T.unpack (unGitHubRepo repo) </> T.unpack (unGitHubRev rev)
} }
sourceUrl (GitHubSource repo rev (Just subdir)) = sourceUrl (GitHubSource repo rev (Just subdir)) =
nullURI nullURI
{ uriScheme = "github:", { uriScheme = "github:"
uriPath = T.unpack (unGitHubRepo repo) </> T.unpack (unGitHubRev rev), , uriPath = T.unpack (unGitHubRepo repo) </> T.unpack (unGitHubRev rev)
uriQuery = "?dir=" ++ subdir , uriQuery = "?dir=" ++ subdir
} }
getPackageVersions :: FilePath -> Action [PreparedPackageVersion] getPackageVersions :: FilePath -> Action [PreparedPackageVersion]
getPackageVersions inputDir = do getPackageVersions inputDir = do
@ -275,8 +275,8 @@ getPackageVersions inputDir = do
when (null metaFiles) $ do when (null metaFiles) $ do
error $ error $
unlines unlines
[ "We could not find any package metadata file (i.e. _sources/<name>/<version>/meta.toml)", [ "We could not find any package metadata file (i.e. _sources/<name>/<version>/meta.toml)"
"Make sure you are passing the right input directory. The default input directory is _sources" , "Make sure you are passing the right input directory. The default input directory is _sources"
] ]
forP metaFiles $ preparePackageVersion inputDir forP metaFiles $ preparePackageVersion inputDir
@ -288,46 +288,48 @@ prepareIndexPkgCabal pkgId timestamp filePath = do
pure $ mkTarEntry (BL.fromStrict contents) (IndexPkgCabal pkgId) timestamp pure $ mkTarEntry (BL.fromStrict contents) (IndexPkgCabal pkgId) timestamp
prepareIndexPkgMetadata :: Maybe UTCTime -> PreparedPackageVersion -> Action Targets prepareIndexPkgMetadata :: Maybe UTCTime -> PreparedPackageVersion -> Action Targets
prepareIndexPkgMetadata expiryTime PreparedPackageVersion {pkgId, sdistPath} = do prepareIndexPkgMetadata expiryTime PreparedPackageVersion{pkgId, sdistPath} = do
targetFileInfo <- liftIO $ computeFileInfoSimple sdistPath targetFileInfo <- liftIO $ computeFileInfoSimple sdistPath
let packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId let packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId
return return
Targets Targets
{ targetsVersion = FileVersion 1, { targetsVersion = FileVersion 1
targetsExpires = FileExpires expiryTime, , targetsExpires = FileExpires expiryTime
targetsTargets = fromList [(TargetPathRepo packagePath, targetFileInfo)], , targetsTargets = fromList [(TargetPathRepo packagePath, targetFileInfo)]
targetsDelegations = Nothing , targetsDelegations = Nothing
} }
-- Currently `extraEntries` are only used for encoding `prefered-versions`. -- Currently `extraEntries` are only used for encoding `prefered-versions`.
getExtraEntries :: [PreparedPackageVersion] -> [Tar.Entry] getExtraEntries :: [PreparedPackageVersion] -> [Tar.Entry]
getExtraEntries packageVersions = getExtraEntries packageVersions =
let -- Group all (package) versions by package (name) let
groupedPackageVersions :: [NE.NonEmpty PreparedPackageVersion] -- Group all (package) versions by package (name)
groupedPackageVersions = NE.groupWith (pkgName . pkgId) packageVersions groupedPackageVersions :: [NE.NonEmpty PreparedPackageVersion]
groupedPackageVersions = NE.groupWith (pkgName . pkgId) packageVersions
-- All versions of a given package together form a list of entries -- All versions of a given package together form a list of entries
-- The list of entries might be empty (in case no version has been deprecated) -- The list of entries might be empty (in case no version has been deprecated)
generateEntriesForGroup :: NE.NonEmpty PreparedPackageVersion -> [Tar.Entry] generateEntriesForGroup :: NE.NonEmpty PreparedPackageVersion -> [Tar.Entry]
generateEntriesForGroup packageGroup = map createTarEntry effectiveRanges generateEntriesForGroup packageGroup = map createTarEntry effectiveRanges
where where
-- Get the package name of the current group. -- Get the package name of the current group.
pn :: PackageName pn :: PackageName
pn = pkgName $ pkgId $ NE.head packageGroup pn = pkgName $ pkgId $ NE.head packageGroup
-- Collect and sort the deprecation changes for the package group, turning them into a action on VersionRange -- Collect and sort the deprecation changes for the package group, turning them into a action on VersionRange
deprecationChanges :: [(UTCTime, VersionRange -> VersionRange)] deprecationChanges :: [(UTCTime, VersionRange -> VersionRange)]
deprecationChanges = sortOn fst $ foldMap versionDeprecationChanges packageGroup deprecationChanges = sortOn fst $ foldMap versionDeprecationChanges packageGroup
-- Calculate (by applying them chronologically) the effective `VersionRange` for the package group. -- Calculate (by applying them chronologically) the effective `VersionRange` for the package group.
effectiveRanges :: [(UTCTime, VersionRange)] effectiveRanges :: [(UTCTime, VersionRange)]
effectiveRanges = NE.tail $ NE.scanl applyChangeToRange (posixSecondsToUTCTime 0, anyVersion) deprecationChanges effectiveRanges = NE.tail $ NE.scanl applyChangeToRange (posixSecondsToUTCTime 0, anyVersion) deprecationChanges
-- Create a `Tar.Entry` for the package group, its computed `VersionRange` and a timestamp. -- Create a `Tar.Entry` for the package group, its computed `VersionRange` and a timestamp.
createTarEntry (ts, effectiveRange) = mkTarEntry (BL.pack $ prettyShow dep) (IndexPkgPrefs pn) ts createTarEntry (ts, effectiveRange) = mkTarEntry (BL.pack $ prettyShow dep) (IndexPkgPrefs pn) ts
where where
-- Cabal uses `Dependency` to represent preferred versions, cf. -- Cabal uses `Dependency` to represent preferred versions, cf.
-- `parsePreferredVersions`. The (sub)libraries part is ignored. -- `parsePreferredVersions`. The (sub)libraries part is ignored.
dep = mkDependency pn effectiveRange mainLibSet dep = mkDependency pn effectiveRange mainLibSet
in foldMap generateEntriesForGroup groupedPackageVersions in
foldMap generateEntriesForGroup groupedPackageVersions
-- TODO: the functions belows should be moved to Foliage.PreparedPackageVersion -- TODO: the functions belows should be moved to Foliage.PreparedPackageVersion
@ -335,8 +337,8 @@ getExtraEntries packageVersions =
versionDeprecationChanges :: PreparedPackageVersion -> [(UTCTime, VersionRange -> VersionRange)] versionDeprecationChanges :: PreparedPackageVersion -> [(UTCTime, VersionRange -> VersionRange)]
versionDeprecationChanges versionDeprecationChanges
PreparedPackageVersion PreparedPackageVersion
{ pkgId = PackageIdentifier {pkgVersion}, { pkgId = PackageIdentifier{pkgVersion}
pkgVersionDeprecationChanges , pkgVersionDeprecationChanges
} = } =
map (second $ applyDeprecation pkgVersion) pkgVersionDeprecationChanges map (second $ applyDeprecation pkgVersion) pkgVersionDeprecationChanges
@ -356,21 +358,21 @@ applyDeprecation pkgVersion deprecated =
mkTarEntry :: BL.ByteString -> IndexFile dec -> UTCTime -> Tar.Entry mkTarEntry :: BL.ByteString -> IndexFile dec -> UTCTime -> Tar.Entry
mkTarEntry contents indexFile timestamp = mkTarEntry contents indexFile timestamp =
(Tar.fileEntry tarPath contents) (Tar.fileEntry tarPath contents)
{ Tar.entryTime = floor $ Time.utcTimeToPOSIXSeconds timestamp, { Tar.entryTime = floor $ Time.utcTimeToPOSIXSeconds timestamp
Tar.entryOwnership = , Tar.entryOwnership =
Tar.Ownership Tar.Ownership
{ Tar.ownerName = "foliage", { Tar.ownerName = "foliage"
Tar.groupName = "foliage", , Tar.groupName = "foliage"
Tar.ownerId = 0, , Tar.ownerId = 0
Tar.groupId = 0 , Tar.groupId = 0
} }
} }
where where
tarPath = case Tar.toTarPath False indexPath of tarPath = case Tar.toTarPath False indexPath of
Left e -> error $ "Invalid tar path " ++ indexPath ++ "(" ++ e ++ ")" Left e -> error $ "Invalid tar path " ++ indexPath ++ "(" ++ e ++ ")"
Right tp -> tp Right tp -> tp
indexPath = toFilePath $ castRoot $ indexFileToPath hackageIndexLayout indexFile indexPath = toFilePath $ castRoot $ indexFileToPath hackageIndexLayout indexFile
anchorPath :: Path Absolute -> (RepoLayout -> RepoPath) -> FilePath anchorPath :: Path Absolute -> (RepoLayout -> RepoPath) -> FilePath
anchorPath outputDirRoot p = anchorPath outputDirRoot p =

View File

@ -1,8 +1,8 @@
{-# LANGUAGE ViewPatterns #-} {-# LANGUAGE ViewPatterns #-}
module Foliage.CmdImportIndex module Foliage.CmdImportIndex (
( cmdImportIndex, cmdImportIndex,
) )
where where
import Codec.Archive.Tar qualified as Tar import Codec.Archive.Tar qualified as Tar
@ -29,55 +29,56 @@ cmdImportIndex :: ImportIndexOptions -> IO ()
cmdImportIndex opts = do cmdImportIndex opts = do
putStrLn $ putStrLn $
unlines unlines
[ "This command is EXPERIMENTAL and INCOMPLETE!", [ "This command is EXPERIMENTAL and INCOMPLETE!"
"Import the Hackage index from $HOME/.cabal. Make sure you have done `cabal update` recently." , "Import the Hackage index from $HOME/.cabal. Make sure you have done `cabal update` recently."
] ]
home <- getEnv "HOME" home <- getEnv "HOME"
entries <- Tar.read <$> BSL.readFile (home </> ".cabal/packages/hackage.haskell.org/01-index.tar") entries <- Tar.read <$> BSL.readFile (home </> ".cabal/packages/hackage.haskell.org/01-index.tar")
m <- importIndex indexfilter entries M.empty m <- importIndex indexfilter entries M.empty
for_ (M.toList m) $ uncurry finalise for_ (M.toList m) $ uncurry finalise
where where
indexfilter = case importOptsFilter opts of indexfilter = case importOptsFilter opts of
Nothing -> const True Nothing -> const True
(Just f) -> mkFilter f (Just f) -> mkFilter f
mkFilter (ImportFilter pn Nothing) = (== pn) . unPackageName . pkgName mkFilter (ImportFilter pn Nothing) = (== pn) . unPackageName . pkgName
mkFilter (ImportFilter pn (Just pv)) = (&&) <$> (== pn) . unPackageName . pkgName <*> (== pv) . prettyShow . pkgVersion mkFilter (ImportFilter pn (Just pv)) = (&&) <$> (== pn) . unPackageName . pkgName <*> (== pv) . prettyShow . pkgVersion
importIndex :: importIndex
Show e => :: (Show e)
(PackageIdentifier -> Bool) -> => (PackageIdentifier -> Bool)
Tar.Entries e -> -> Tar.Entries e
Map PackageIdentifier PackageVersionSpec -> -> Map PackageIdentifier PackageVersionSpec
IO (Map PackageIdentifier PackageVersionSpec) -> IO (Map PackageIdentifier PackageVersionSpec)
importIndex f (Tar.Next e es) m = importIndex f (Tar.Next e es) m =
case isCabalFile e of case isCabalFile e of
Just (pkgId, contents, time) Just (pkgId, contents, time)
| f pkgId -> | f pkgId ->
do do
putStrLn $ "Found cabal file " ++ prettyShow pkgId ++ " with timestamp " ++ show time putStrLn $ "Found cabal file " ++ prettyShow pkgId ++ " with timestamp " ++ show time
let -- new package let
go Nothing = -- new package
pure $ go Nothing =
Just $ pure $
PackageVersionSpec Just $
{ packageVersionSource = TarballSource (pkgIdToHackageUrl pkgId) Nothing, PackageVersionSpec
packageVersionTimestamp = Just time, { packageVersionSource = TarballSource (pkgIdToHackageUrl pkgId) Nothing
packageVersionRevisions = [], , packageVersionTimestamp = Just time
packageVersionDeprecations = [], , packageVersionRevisions = []
packageVersionForce = False , packageVersionDeprecations = []
} , packageVersionForce = False
-- Existing package, new revision }
go (Just sm) = do -- Existing package, new revision
let revnum = 1 + fromMaybe 0 (latestRevisionNumber sm) go (Just sm) = do
newRevision = RevisionSpec {revisionNumber = revnum, revisionTimestamp = time} let revnum = 1 + fromMaybe 0 (latestRevisionNumber sm)
-- Repeatedly adding at the end of a list is bad performance but good for the moment. newRevision = RevisionSpec{revisionNumber = revnum, revisionTimestamp = time}
let sm' = sm {packageVersionRevisions = packageVersionRevisions sm ++ [newRevision]} -- Repeatedly adding at the end of a list is bad performance but good for the moment.
let PackageIdentifier pkgName pkgVersion = pkgId let sm' = sm{packageVersionRevisions = packageVersionRevisions sm ++ [newRevision]}
let outDir = "_sources" </> unPackageName pkgName </> prettyShow pkgVersion </> "revisions" let PackageIdentifier pkgName pkgVersion = pkgId
createDirectoryIfMissing True outDir let outDir = "_sources" </> unPackageName pkgName </> prettyShow pkgVersion </> "revisions"
BSL.writeFile (outDir </> show revnum <.> "cabal") contents createDirectoryIfMissing True outDir
return $ Just sm' BSL.writeFile (outDir </> show revnum <.> "cabal") contents
return $ Just sm'
m' <- M.alterF go pkgId m m' <- M.alterF go pkgId m
importIndex f es m' importIndex f es m'
_ -> importIndex f es m _ -> importIndex f es m
@ -89,28 +90,28 @@ importIndex _f (Tar.Fail e) _ =
pkgIdToHackageUrl :: PackageIdentifier -> URI pkgIdToHackageUrl :: PackageIdentifier -> URI
pkgIdToHackageUrl pkgId = pkgIdToHackageUrl pkgId =
nullURI nullURI
{ uriScheme = "https:", { uriScheme = "https:"
uriAuthority = Just $ nullURIAuth {uriRegName = "hackage.haskell.org"}, , uriAuthority = Just $ nullURIAuth{uriRegName = "hackage.haskell.org"}
uriPath = "/package" </> prettyShow pkgId </> prettyShow pkgId <.> "tar.gz" , uriPath = "/package" </> prettyShow pkgId </> prettyShow pkgId <.> "tar.gz"
} }
finalise :: finalise
PackageIdentifier -> :: PackageIdentifier
PackageVersionSpec -> -> PackageVersionSpec
IO () -> IO ()
finalise PackageIdentifier {pkgName, pkgVersion} meta = do finalise PackageIdentifier{pkgName, pkgVersion} meta = do
let dir = "_sources" </> unPackageName pkgName </> prettyShow pkgVersion let dir = "_sources" </> unPackageName pkgName </> prettyShow pkgVersion
createDirectoryIfMissing True dir createDirectoryIfMissing True dir
writePackageVersionSpec (dir </> "meta.toml") meta writePackageVersionSpec (dir </> "meta.toml") meta
isCabalFile :: isCabalFile
Tar.Entry -> :: Tar.Entry
Maybe (PackageIdentifier, BSL.ByteString, UTCTime) -> Maybe (PackageIdentifier, BSL.ByteString, UTCTime)
isCabalFile isCabalFile
Tar.Entry Tar.Entry
{ Tar.entryTarPath = Tar.fromTarPath -> path, { Tar.entryTarPath = Tar.fromTarPath -> path
Tar.entryContent = Tar.NormalFile contents _, , Tar.entryContent = Tar.NormalFile contents _
Tar.entryTime = posixSecondsToUTCTime . fromIntegral -> time , Tar.entryTime = posixSecondsToUTCTime . fromIntegral -> time
} }
| ".cabal" `isSuffixOf` path = | ".cabal" `isSuffixOf` path =
let [pkgName, pkgVersion, _] = splitDirectories path let [pkgName, pkgVersion, _] = splitDirectories path

View File

@ -2,14 +2,14 @@
{-# LANGUAGE GADTs #-} {-# LANGUAGE GADTs #-}
{-# LANGUAGE ImportQualifiedPost #-} {-# LANGUAGE ImportQualifiedPost #-}
module Foliage.HackageSecurity module Foliage.HackageSecurity (
( module Foliage.HackageSecurity, module Foliage.HackageSecurity,
module Hackage.Security.Server, module Hackage.Security.Server,
module Hackage.Security.TUF.FileMap, module Hackage.Security.TUF.FileMap,
module Hackage.Security.Key.Env, module Hackage.Security.Key.Env,
module Hackage.Security.Util.Path, module Hackage.Security.Util.Path,
module Hackage.Security.Util.Some, module Hackage.Security.Util.Some,
) )
where where
import Control.Monad (replicateM) import Control.Monad (replicateM)
@ -27,7 +27,7 @@ import Hackage.Security.Util.Some
import System.Directory (createDirectoryIfMissing) import System.Directory (createDirectoryIfMissing)
import System.FilePath import System.FilePath
readJSONSimple :: FromJSON ReadJSON_NoKeys_NoLayout a => FilePath -> IO (Either DeserializationError a) readJSONSimple :: (FromJSON ReadJSON_NoKeys_NoLayout a) => FilePath -> IO (Either DeserializationError a)
readJSONSimple fp = do readJSONSimple fp = do
p <- makeAbsolute (fromFilePath fp) p <- makeAbsolute (fromFilePath fp)
readJSON_NoKeys_NoLayout p readJSON_NoKeys_NoLayout p
@ -46,16 +46,16 @@ createKeys base = do
putStrLn "root keys:" putStrLn "root keys:"
createKeyGroup "root" >>= showKeys createKeyGroup "root" >>= showKeys
for_ ["target", "timestamp", "snapshot", "mirrors"] createKeyGroup for_ ["target", "timestamp", "snapshot", "mirrors"] createKeyGroup
where where
createKeyGroup group = do createKeyGroup group = do
createDirectoryIfMissing True (base </> group) createDirectoryIfMissing True (base </> group)
keys <- replicateM 3 $ createKey' KeyTypeEd25519 keys <- replicateM 3 $ createKey' KeyTypeEd25519
for_ keys $ writeKeyWithId (base </> group) for_ keys $ writeKeyWithId (base </> group)
pure keys pure keys
showKeys keys = showKeys keys =
for_ keys $ \key -> for_ keys $ \key ->
putStrLn $ " " ++ showKey key putStrLn $ " " ++ showKey key
showKey :: Some Key -> [Char] showKey :: Some Key -> [Char]
showKey k = T.unpack $ encodeBase16 $ exportSomePublicKey $ somePublicKey k showKey k = T.unpack $ encodeBase16 $ exportSomePublicKey $ somePublicKey k
@ -75,14 +75,14 @@ writeKey fp key = do
p <- makeAbsolute (fromFilePath fp) p <- makeAbsolute (fromFilePath fp)
writeJSON_NoLayout p key writeJSON_NoLayout p key
renderSignedJSON :: ToJSON WriteJSON a => [Some Key] -> a -> BSL.ByteString renderSignedJSON :: (ToJSON WriteJSON a) => [Some Key] -> a -> BSL.ByteString
renderSignedJSON keys thing = renderSignedJSON keys thing =
renderJSON renderJSON
hackageRepoLayout hackageRepoLayout
(withSignatures hackageRepoLayout keys thing) (withSignatures hackageRepoLayout keys thing)
writeSignedJSON :: ToJSON WriteJSON a => Path Absolute -> (RepoLayout -> RepoPath) -> [Some Key] -> a -> IO () writeSignedJSON :: (ToJSON WriteJSON a) => Path Absolute -> (RepoLayout -> RepoPath) -> [Some Key] -> a -> IO ()
writeSignedJSON outputDirRoot repoPath keys thing = do writeSignedJSON outputDirRoot repoPath keys thing = do
writeLazyByteString fp $ renderSignedJSON keys thing writeLazyByteString fp $ renderSignedJSON keys thing
where where
fp = anchorRepoPathLocally outputDirRoot $ repoPath hackageRepoLayout fp = anchorRepoPathLocally outputDirRoot $ repoPath hackageRepoLayout

View File

@ -5,29 +5,29 @@
{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE PatternSynonyms #-}
{-# OPTIONS_GHC -Wno-orphans #-} {-# OPTIONS_GHC -Wno-orphans #-}
module Foliage.Meta module Foliage.Meta (
( packageVersionTimestamp, packageVersionTimestamp,
packageVersionSource, packageVersionSource,
packageVersionRevisions, packageVersionRevisions,
packageVersionDeprecations, packageVersionDeprecations,
packageVersionForce, packageVersionForce,
PackageVersionSpec (PackageVersionSpec), PackageVersionSpec (PackageVersionSpec),
readPackageVersionSpec, readPackageVersionSpec,
writePackageVersionSpec, writePackageVersionSpec,
RevisionSpec (RevisionSpec), RevisionSpec (RevisionSpec),
revisionTimestamp, revisionTimestamp,
revisionNumber, revisionNumber,
DeprecationSpec (DeprecationSpec), DeprecationSpec (DeprecationSpec),
deprecationTimestamp, deprecationTimestamp,
deprecationIsDeprecated, deprecationIsDeprecated,
PackageVersionSource, PackageVersionSource,
pattern TarballSource, pattern TarballSource,
pattern GitHubSource, pattern GitHubSource,
GitHubRepo (..), GitHubRepo (..),
GitHubRev (..), GitHubRev (..),
UTCTime, UTCTime,
latestRevisionNumber, latestRevisionNumber,
) )
where where
import Control.Applicative ((<|>)) import Control.Applicative ((<|>))
@ -56,13 +56,13 @@ newtype GitHubRev = GitHubRev {unGitHubRev :: Text}
data PackageVersionSource data PackageVersionSource
= TarballSource = TarballSource
{ tarballSourceURI :: URI, { tarballSourceURI :: URI
subdir :: Maybe String , subdir :: Maybe String
} }
| GitHubSource | GitHubSource
{ githubRepo :: GitHubRepo, { githubRepo :: GitHubRepo
githubRev :: GitHubRev, , githubRev :: GitHubRev
subdir :: Maybe String , subdir :: Maybe String
} }
deriving (Show, Eq, Generic) deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData) deriving anyclass (Binary, Hashable, NFData)
@ -74,11 +74,11 @@ packageSourceCodec =
uri :: Toml.Key -> TomlCodec URI uri :: Toml.Key -> TomlCodec URI
uri = Toml.textBy to from uri = Toml.textBy to from
where where
to = T.pack . show to = T.pack . show
from t = case parseURI (T.unpack t) of from t = case parseURI (T.unpack t) of
Nothing -> Left $ "Invalid url: " <> t Nothing -> Left $ "Invalid url: " <> t
Just uri' -> Right uri' Just uri' -> Right uri'
tarballSourceCodec :: TomlCodec (URI, Maybe String) tarballSourceCodec :: TomlCodec (URI, Maybe String)
tarballSourceCodec = tarballSourceCodec =
@ -107,16 +107,16 @@ matchGitHubSource (GitHubSource repo rev mSubdir) = Just ((repo, rev), mSubdir)
matchGitHubSource _ = Nothing matchGitHubSource _ = Nothing
data PackageVersionSpec = PackageVersionSpec data PackageVersionSpec = PackageVersionSpec
{ -- | timestamp { packageVersionTimestamp :: Maybe UTCTime
packageVersionTimestamp :: Maybe UTCTime, -- ^ timestamp
-- | source parameters , packageVersionSource :: PackageVersionSource
packageVersionSource :: PackageVersionSource, -- ^ source parameters
-- | revisions , packageVersionRevisions :: [RevisionSpec]
packageVersionRevisions :: [RevisionSpec], -- ^ revisions
-- | deprecations , packageVersionDeprecations :: [DeprecationSpec]
packageVersionDeprecations :: [DeprecationSpec], -- ^ deprecations
-- | force version , packageVersionForce :: Bool
packageVersionForce :: Bool -- ^ force version
} }
deriving (Show, Eq, Generic) deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData) deriving anyclass (Binary, Hashable, NFData)
@ -125,15 +125,15 @@ sourceMetaCodec :: TomlCodec PackageVersionSpec
sourceMetaCodec = sourceMetaCodec =
PackageVersionSpec PackageVersionSpec
<$> Toml.dioptional (timeCodec "timestamp") <$> Toml.dioptional (timeCodec "timestamp")
.= packageVersionTimestamp .= packageVersionTimestamp
<*> packageSourceCodec <*> packageSourceCodec
.= packageVersionSource .= packageVersionSource
<*> Toml.list revisionMetaCodec "revisions" <*> Toml.list revisionMetaCodec "revisions"
.= packageVersionRevisions .= packageVersionRevisions
<*> Toml.list deprecationMetaCodec "deprecations" <*> Toml.list deprecationMetaCodec "deprecations"
.= packageVersionDeprecations .= packageVersionDeprecations
<*> withDefault False (Toml.bool "force-version") <*> withDefault False (Toml.bool "force-version")
.= packageVersionForce .= packageVersionForce
readPackageVersionSpec :: FilePath -> IO PackageVersionSpec readPackageVersionSpec :: FilePath -> IO PackageVersionSpec
readPackageVersionSpec = Toml.decodeFile sourceMetaCodec readPackageVersionSpec = Toml.decodeFile sourceMetaCodec
@ -142,8 +142,8 @@ writePackageVersionSpec :: FilePath -> PackageVersionSpec -> IO ()
writePackageVersionSpec fp a = void $ Toml.encodeToFile sourceMetaCodec fp a writePackageVersionSpec fp a = void $ Toml.encodeToFile sourceMetaCodec fp a
data RevisionSpec = RevisionSpec data RevisionSpec = RevisionSpec
{ revisionTimestamp :: UTCTime, { revisionTimestamp :: UTCTime
revisionNumber :: Int , revisionNumber :: Int
} }
deriving (Show, Eq, Generic, Ord) deriving (Show, Eq, Generic, Ord)
deriving anyclass (Binary, Hashable, NFData) deriving anyclass (Binary, Hashable, NFData)
@ -152,16 +152,16 @@ revisionMetaCodec :: TomlCodec RevisionSpec
revisionMetaCodec = revisionMetaCodec =
RevisionSpec RevisionSpec
<$> timeCodec "timestamp" <$> timeCodec "timestamp"
.= revisionTimestamp .= revisionTimestamp
<*> Toml.int "number" <*> Toml.int "number"
.= revisionNumber .= revisionNumber
data DeprecationSpec = DeprecationSpec data DeprecationSpec = DeprecationSpec
{ deprecationTimestamp :: UTCTime, { deprecationTimestamp :: UTCTime
-- | 'True' means the package version has been deprecated , deprecationIsDeprecated :: Bool
-- 'False' means the package version has been undeprecated -- ^ 'True' means the package version has been deprecated
-- FIXME: we should consider something better than 'Bool' -- 'False' means the package version has been undeprecated
deprecationIsDeprecated :: Bool -- FIXME: we should consider something better than 'Bool'
} }
deriving (Show, Eq, Generic, Ord) deriving (Show, Eq, Generic, Ord)
deriving anyclass (Binary, Hashable, NFData) deriving anyclass (Binary, Hashable, NFData)
@ -170,9 +170,9 @@ deprecationMetaCodec :: TomlCodec DeprecationSpec
deprecationMetaCodec = deprecationMetaCodec =
DeprecationSpec DeprecationSpec
<$> timeCodec "timestamp" <$> timeCodec "timestamp"
.= deprecationTimestamp .= deprecationTimestamp
<*> withDefault True (Toml.bool "deprecated") <*> withDefault True (Toml.bool "deprecated")
.= deprecationIsDeprecated .= deprecationIsDeprecated
timeCodec :: Toml.Key -> TomlCodec UTCTime timeCodec :: Toml.Key -> TomlCodec UTCTime
timeCodec key = Toml.dimap (utcToZonedTime utc) zonedTimeToUTC $ Toml.zonedTime key timeCodec key = Toml.dimap (utcToZonedTime utc) zonedTimeToUTC $ Toml.zonedTime key
@ -183,7 +183,7 @@ latestRevisionNumber sm =
[] -> Nothing [] -> Nothing
rev : _ -> Just (revisionNumber rev) rev : _ -> Just (revisionNumber rev)
withDefault :: Eq a => a -> TomlCodec a -> TomlCodec a withDefault :: (Eq a) => a -> TomlCodec a -> TomlCodec a
withDefault d c = (fromMaybe d <$> Toml.dioptional c) .= f withDefault d c = (fromMaybe d <$> Toml.dioptional c) .= f
where where
f a = if a == d then Nothing else Just a f a = if a == d then Nothing else Just a

View File

@ -26,8 +26,8 @@ instance ToJSON PackageVersionSource where
toJSON = toJSON =
genericToJSON genericToJSON
defaultOptions defaultOptions
{ sumEncoding = ObjectWithSingleField, { sumEncoding = ObjectWithSingleField
omitNothingFields = True , omitNothingFields = True
} }
instance ToJSON URI where instance ToJSON URI where

View File

@ -2,14 +2,14 @@
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE DerivingStrategies #-}
module Foliage.Options module Foliage.Options (
( parseCommand, parseCommand,
Command (..), Command (..),
BuildOptions (..), BuildOptions (..),
SignOptions (..), SignOptions (..),
ImportIndexOptions (..), ImportIndexOptions (..),
ImportFilter (..), ImportFilter (..),
) )
where where
import Development.Shake.Classes (Binary, Hashable, NFData) import Development.Shake.Classes (Binary, Hashable, NFData)
@ -47,13 +47,13 @@ data SignOptions
deriving anyclass (Binary, Hashable, NFData) deriving anyclass (Binary, Hashable, NFData)
data BuildOptions = BuildOptions data BuildOptions = BuildOptions
{ buildOptsSignOpts :: SignOptions, { buildOptsSignOpts :: SignOptions
buildOptsCurrentTime :: Maybe UTCTime, , buildOptsCurrentTime :: Maybe UTCTime
buildOptsExpireSignaturesOn :: Maybe UTCTime, , buildOptsExpireSignaturesOn :: Maybe UTCTime
buildOptsInputDir :: FilePath, , buildOptsInputDir :: FilePath
buildOptsOutputDir :: FilePath, , buildOptsOutputDir :: FilePath
buildOptsNumThreads :: Int, , buildOptsNumThreads :: Int
buildOptsWriteMetadata :: Bool , buildOptsWriteMetadata :: Bool
} }
buildCommand :: Parser Command buildCommand :: Parser Command
@ -107,20 +107,20 @@ buildCommand =
<> showDefault <> showDefault
) )
) )
where where
signOpts = signOpts =
( SignOptsSignWithKeys ( SignOptsSignWithKeys
<$> strOption <$> strOption
( long "keys" ( long "keys"
<> metavar "KEYS" <> metavar "KEYS"
<> help "TUF keys location" <> help "TUF keys location"
<> showDefault <> showDefault
<> value "_keys" <> value "_keys"
) )
) )
<|> ( SignOptsDon'tSign <|> ( SignOptsDon'tSign
<$ switch (long "no-signatures" <> help "Don't sign the repository") <$ switch (long "no-signatures" <> help "Don't sign the repository")
) )
createKeysCommand :: Parser Command createKeysCommand :: Parser Command
createKeysCommand = createKeysCommand =

View File

@ -3,15 +3,15 @@
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TemplateHaskell #-}
module Foliage.Pages module Foliage.Pages (
( allPackagesPageTemplate, allPackagesPageTemplate,
allPackageVersionsPageTemplate, allPackageVersionsPageTemplate,
packageVersionPageTemplate, packageVersionPageTemplate,
makeAllPackagesPage, makeAllPackagesPage,
makePackageVersionPage, makePackageVersionPage,
makeAllPackageVersionsPage, makeAllPackageVersionsPage,
makeIndexPage, makeIndexPage,
) )
where where
import Data.Aeson (KeyValue ((.=)), ToJSON, object) import Data.Aeson (KeyValue ((.=)), ToJSON, object)
@ -47,11 +47,11 @@ makeIndexPage outputDir =
object [] object []
data AllPackagesPageEntry = AllPackagesPageEntry data AllPackagesPageEntry = AllPackagesPageEntry
{ allPackagesPageEntryPkgId :: PackageIdentifier, { allPackagesPageEntryPkgId :: PackageIdentifier
allPackagesPageEntryTimestamp :: UTCTime, , allPackagesPageEntryTimestamp :: UTCTime
allPackagesPageEntryTimestampPosix :: POSIXTime, , allPackagesPageEntryTimestampPosix :: POSIXTime
allPackagesPageEntrySource :: PackageVersionSource, , allPackagesPageEntrySource :: PackageVersionSource
allPackagesPageEntryLatestRevisionTimestamp :: Maybe UTCTime , allPackagesPageEntryLatestRevisionTimestamp :: Maybe UTCTime
} }
deriving stock (Generic) deriving stock (Generic)
deriving (ToJSON) via MyAesonEncoding AllPackagesPageEntry deriving (ToJSON) via MyAesonEncoding AllPackagesPageEntry
@ -63,47 +63,47 @@ makeAllPackagesPage currentTime outputDir packageVersions =
TL.writeFile (outputDir </> "all-packages" </> "index.html") $ TL.writeFile (outputDir </> "all-packages" </> "index.html") $
renderMustache allPackagesPageTemplate $ renderMustache allPackagesPageTemplate $
object ["packages" .= packages] object ["packages" .= packages]
where where
packages = packages =
packageVersions packageVersions
-- group package versions by package name -- group package versions by package name
& NE.groupBy ((==) `on` (pkgName . pkgId)) & NE.groupBy ((==) `on` (pkgName . pkgId))
-- for each package name pick the most recent version -- for each package name pick the most recent version
& map & map
( \group -> ( \group ->
group group
-- sort them from the most recent version to the least recent -- sort them from the most recent version to the least recent
& NE.sortBy (comparing $ Down . pkgVersion . pkgId) & NE.sortBy (comparing $ Down . pkgVersion . pkgId)
-- pick the most recent version -- pick the most recent version
& NE.head & NE.head
-- turn it into the template data -- turn it into the template data
& ( \(PreparedPackageVersion {pkgId, pkgTimestamp, cabalFileRevisions, pkgVersionSource}) -> & ( \(PreparedPackageVersion{pkgId, pkgTimestamp, cabalFileRevisions, pkgVersionSource}) ->
AllPackagesPageEntry AllPackagesPageEntry
{ allPackagesPageEntryPkgId = pkgId, { allPackagesPageEntryPkgId = pkgId
allPackagesPageEntryTimestamp = fromMaybe currentTime pkgTimestamp, , allPackagesPageEntryTimestamp = fromMaybe currentTime pkgTimestamp
allPackagesPageEntryTimestampPosix = utcTimeToPOSIXSeconds (fromMaybe currentTime pkgTimestamp), , allPackagesPageEntryTimestampPosix = utcTimeToPOSIXSeconds (fromMaybe currentTime pkgTimestamp)
allPackagesPageEntrySource = pkgVersionSource, , allPackagesPageEntrySource = pkgVersionSource
allPackagesPageEntryLatestRevisionTimestamp = fst <$> listToMaybe cabalFileRevisions , allPackagesPageEntryLatestRevisionTimestamp = fst <$> listToMaybe cabalFileRevisions
} }
) )
) )
-- sort packages by pkgId -- sort packages by pkgId
& sortOn allPackagesPageEntryPkgId & sortOn allPackagesPageEntryPkgId
-- FIXME: refactor this -- FIXME: refactor this
data AllPackageVersionsPageEntry data AllPackageVersionsPageEntry
= AllPackageVersionsPageEntryPackage = AllPackageVersionsPageEntryPackage
{ allPackageVersionsPageEntryPkgId :: PackageIdentifier, { allPackageVersionsPageEntryPkgId :: PackageIdentifier
allPackageVersionsPageEntryTimestamp :: UTCTime, , allPackageVersionsPageEntryTimestamp :: UTCTime
allPackageVersionsPageEntryTimestampPosix :: POSIXTime, , allPackageVersionsPageEntryTimestampPosix :: POSIXTime
allPackageVersionsPageEntrySource :: PackageVersionSource, , allPackageVersionsPageEntrySource :: PackageVersionSource
allPackageVersionsPageEntryDeprecated :: Bool , allPackageVersionsPageEntryDeprecated :: Bool
} }
| AllPackageVersionsPageEntryRevision | AllPackageVersionsPageEntryRevision
{ allPackageVersionsPageEntryPkgId :: PackageIdentifier, { allPackageVersionsPageEntryPkgId :: PackageIdentifier
allPackageVersionsPageEntryTimestamp :: UTCTime, , allPackageVersionsPageEntryTimestamp :: UTCTime
allPackageVersionsPageEntryTimestampPosix :: POSIXTime, , allPackageVersionsPageEntryTimestampPosix :: POSIXTime
allPackageVersionsPageEntryDeprecated :: Bool , allPackageVersionsPageEntryDeprecated :: Bool
} }
deriving stock (Generic) deriving stock (Generic)
deriving (ToJSON) via MyAesonEncoding AllPackageVersionsPageEntry deriving (ToJSON) via MyAesonEncoding AllPackageVersionsPageEntry
@ -115,45 +115,45 @@ makeAllPackageVersionsPage currentTime outputDir packageVersions =
TL.writeFile (outputDir </> "all-package-versions" </> "index.html") $ TL.writeFile (outputDir </> "all-package-versions" </> "index.html") $
renderMustache allPackageVersionsPageTemplate $ renderMustache allPackageVersionsPageTemplate $
object ["entries" .= entries] object ["entries" .= entries]
where where
entries = entries =
-- collect all cabal file revisions including the original cabal file -- collect all cabal file revisions including the original cabal file
foldMap foldMap
( \PreparedPackageVersion {pkgId, pkgTimestamp, pkgVersionSource, pkgVersionIsDeprecated, cabalFileRevisions} -> ( \PreparedPackageVersion{pkgId, pkgTimestamp, pkgVersionSource, pkgVersionIsDeprecated, cabalFileRevisions} ->
-- original cabal file -- original cabal file
AllPackageVersionsPageEntryPackage AllPackageVersionsPageEntryPackage
{ allPackageVersionsPageEntryPkgId = pkgId, { allPackageVersionsPageEntryPkgId = pkgId
allPackageVersionsPageEntryTimestamp = fromMaybe currentTime pkgTimestamp, , allPackageVersionsPageEntryTimestamp = fromMaybe currentTime pkgTimestamp
allPackageVersionsPageEntryTimestampPosix = utcTimeToPOSIXSeconds (fromMaybe currentTime pkgTimestamp), , allPackageVersionsPageEntryTimestampPosix = utcTimeToPOSIXSeconds (fromMaybe currentTime pkgTimestamp)
allPackageVersionsPageEntrySource = pkgVersionSource, , allPackageVersionsPageEntrySource = pkgVersionSource
allPackageVersionsPageEntryDeprecated = pkgVersionIsDeprecated , allPackageVersionsPageEntryDeprecated = pkgVersionIsDeprecated
} }
-- list of revisions -- list of revisions
: [ AllPackageVersionsPageEntryRevision : [ AllPackageVersionsPageEntryRevision
{ allPackageVersionsPageEntryPkgId = pkgId, { allPackageVersionsPageEntryPkgId = pkgId
allPackageVersionsPageEntryTimestamp = revisionTimestamp, , allPackageVersionsPageEntryTimestamp = revisionTimestamp
allPackageVersionsPageEntryTimestampPosix = utcTimeToPOSIXSeconds revisionTimestamp, , allPackageVersionsPageEntryTimestampPosix = utcTimeToPOSIXSeconds revisionTimestamp
allPackageVersionsPageEntryDeprecated = pkgVersionIsDeprecated , allPackageVersionsPageEntryDeprecated = pkgVersionIsDeprecated
} }
| (revisionTimestamp, _) <- cabalFileRevisions | (revisionTimestamp, _) <- cabalFileRevisions
] ]
) )
packageVersions packageVersions
-- sort them by timestamp -- sort them by timestamp
& sortOn (Down . allPackageVersionsPageEntryTimestamp) & sortOn (Down . allPackageVersionsPageEntryTimestamp)
makePackageVersionPage :: FilePath -> PreparedPackageVersion -> Action () makePackageVersionPage :: FilePath -> PreparedPackageVersion -> Action ()
makePackageVersionPage outputDir PreparedPackageVersion {pkgId, pkgTimestamp, pkgVersionSource, pkgDesc, cabalFileRevisions, pkgVersionIsDeprecated} = do makePackageVersionPage outputDir PreparedPackageVersion{pkgId, pkgTimestamp, pkgVersionSource, pkgDesc, cabalFileRevisions, pkgVersionIsDeprecated} = do
traced ("webpages / package / " ++ prettyShow pkgId) $ do traced ("webpages / package / " ++ prettyShow pkgId) $ do
IO.createDirectoryIfMissing True (outputDir </> "package" </> prettyShow pkgId) IO.createDirectoryIfMissing True (outputDir </> "package" </> prettyShow pkgId)
TL.writeFile (outputDir </> "package" </> prettyShow pkgId </> "index.html") $ TL.writeFile (outputDir </> "package" </> prettyShow pkgId </> "index.html") $
renderMustache packageVersionPageTemplate $ renderMustache packageVersionPageTemplate $
object object
[ "pkgVersionSource" .= pkgVersionSource, [ "pkgVersionSource" .= pkgVersionSource
"cabalFileRevisions" .= map fst cabalFileRevisions, , "cabalFileRevisions" .= map fst cabalFileRevisions
"pkgDesc" .= jsonGenericPackageDescription pkgDesc, , "pkgDesc" .= jsonGenericPackageDescription pkgDesc
"pkgTimestamp" .= pkgTimestamp, , "pkgTimestamp" .= pkgTimestamp
"pkgVersionDeprecated" .= pkgVersionIsDeprecated , "pkgVersionDeprecated" .= pkgVersionIsDeprecated
] ]
indexPageTemplate :: Template indexPageTemplate :: Template

View File

@ -2,23 +2,23 @@
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-} {-# LANGUAGE ViewPatterns #-}
module Foliage.PreparePackageVersion module Foliage.PreparePackageVersion (
( PreparedPackageVersion PreparedPackageVersion (
( pkgId, pkgId,
pkgTimestamp, pkgTimestamp,
pkgVersionSource, pkgVersionSource,
pkgVersionForce, pkgVersionForce,
pkgVersionIsDeprecated, pkgVersionIsDeprecated,
pkgVersionDeprecationChanges, pkgVersionDeprecationChanges,
pkgDesc, pkgDesc,
sdistPath, sdistPath,
cabalFilePath, cabalFilePath,
originalCabalFilePath, originalCabalFilePath,
cabalFileRevisions cabalFileRevisions
), ),
pattern PreparedPackageVersion, pattern PreparedPackageVersion,
preparePackageVersion, preparePackageVersion,
) )
where where
import Control.Monad (unless) import Control.Monad (unless)
@ -42,17 +42,17 @@ import System.FilePath (takeBaseName, takeFileName, (<.>), (</>))
-- TODO: can we ensure that `pkgVersionDeprecationChanges` and `cabalFileRevisions` are -- TODO: can we ensure that `pkgVersionDeprecationChanges` and `cabalFileRevisions` are
-- sorted by timestamp? e.g https://hackage.haskell.org/package/sorted-list -- sorted by timestamp? e.g https://hackage.haskell.org/package/sorted-list
data PreparedPackageVersion = PreparedPackageVersion data PreparedPackageVersion = PreparedPackageVersion
{ pkgId :: PackageId, { pkgId :: PackageId
pkgTimestamp :: Maybe UTCTime, , pkgTimestamp :: Maybe UTCTime
pkgVersionSource :: PackageVersionSource, , pkgVersionSource :: PackageVersionSource
pkgVersionForce :: Bool, , pkgVersionForce :: Bool
pkgVersionIsDeprecated :: Bool, , pkgVersionIsDeprecated :: Bool
pkgVersionDeprecationChanges :: [(UTCTime, Bool)], , pkgVersionDeprecationChanges :: [(UTCTime, Bool)]
pkgDesc :: GenericPackageDescription, , pkgDesc :: GenericPackageDescription
sdistPath :: FilePath, , sdistPath :: FilePath
cabalFilePath :: FilePath, , cabalFilePath :: FilePath
originalCabalFilePath :: FilePath, , originalCabalFilePath :: FilePath
cabalFileRevisions :: [(UTCTime, FilePath)] , cabalFileRevisions :: [(UTCTime, FilePath)]
} }
-- @andreabedini comments: -- @andreabedini comments:
@ -93,27 +93,27 @@ preparePackageVersion inputDir metaFile = do
let pkgId = PackageIdentifier pkgName pkgVersion let pkgId = PackageIdentifier pkgName pkgVersion
pkgSpec <- pkgSpec <-
readPackageVersionSpec' (inputDir </> metaFile) >>= \meta@PackageVersionSpec {..} -> do readPackageVersionSpec' (inputDir </> metaFile) >>= \meta@PackageVersionSpec{..} -> do
case (NE.nonEmpty packageVersionRevisions, packageVersionTimestamp) of case (NE.nonEmpty packageVersionRevisions, packageVersionTimestamp) of
(Just _someRevisions, Nothing) -> (Just _someRevisions, Nothing) ->
error $ error $
unlines unlines
[ inputDir </> metaFile <> " has cabal file revisions but the package has no timestamp.", [ inputDir </> metaFile <> " has cabal file revisions but the package has no timestamp."
"This combination doesn't make sense. Either add a timestamp on the original package or remove the revisions." , "This combination doesn't make sense. Either add a timestamp on the original package or remove the revisions."
] ]
(Just (NE.sort -> someRevisions), Just ts) (Just (NE.sort -> someRevisions), Just ts)
-- WARN: this should really be a <= -- WARN: this should really be a <=
| revisionTimestamp (NE.head someRevisions) < ts -> | revisionTimestamp (NE.head someRevisions) < ts ->
error $ error $
unlines unlines
[ inputDir </> metaFile <> " has a revision with timestamp earlier than the package itself.", [ inputDir </> metaFile <> " has a revision with timestamp earlier than the package itself."
"Adjust the timestamps so that all revisions come after the package publication." , "Adjust the timestamps so that all revisions come after the package publication."
] ]
| not (null $ duplicates (revisionTimestamp <$> someRevisions)) -> | not (null $ duplicates (revisionTimestamp <$> someRevisions)) ->
error $ error $
unlines unlines
[ inputDir </> metaFile <> " has two revisions entries with the same timestamp.", [ inputDir </> metaFile <> " has two revisions entries with the same timestamp."
"Adjust the timestamps so that all the revisions happen at a different time." , "Adjust the timestamps so that all the revisions happen at a different time."
] ]
_otherwise -> return () _otherwise -> return ()
@ -121,15 +121,15 @@ preparePackageVersion inputDir metaFile = do
(Just _someDeprecations, Nothing) -> (Just _someDeprecations, Nothing) ->
error $ error $
unlines unlines
[ inputDir </> metaFile <> " has deprecations but the package has no timestamp.", [ inputDir </> metaFile <> " has deprecations but the package has no timestamp."
"This combination doesn't make sense. Either add a timestamp on the original package or remove the deprecation." , "This combination doesn't make sense. Either add a timestamp on the original package or remove the deprecation."
] ]
(Just (NE.sort -> someDeprecations), Just ts) (Just (NE.sort -> someDeprecations), Just ts)
| deprecationTimestamp (NE.head someDeprecations) <= ts -> | deprecationTimestamp (NE.head someDeprecations) <= ts ->
error $ error $
unlines unlines
[ inputDir </> metaFile <> " has a deprecation entry with timestamp earlier (or equal) than the package itself.", [ inputDir </> metaFile <> " has a deprecation entry with timestamp earlier (or equal) than the package itself."
"Adjust the timestamps so that all the (un-)deprecations come after the package publication." , "Adjust the timestamps so that all the (un-)deprecations come after the package publication."
] ]
| not (deprecationIsDeprecated (NE.head someDeprecations)) -> | not (deprecationIsDeprecated (NE.head someDeprecations)) ->
error $ error $
@ -137,14 +137,14 @@ preparePackageVersion inputDir metaFile = do
| not (null $ duplicates (deprecationTimestamp <$> someDeprecations)) -> | not (null $ duplicates (deprecationTimestamp <$> someDeprecations)) ->
error $ error $
unlines unlines
[ inputDir </> metaFile <> " has two deprecation entries with the same timestamp.", [ inputDir </> metaFile <> " has two deprecation entries with the same timestamp."
"Adjust the timestamps so that all the (un-)deprecations happen at a different time." , "Adjust the timestamps so that all the (un-)deprecations happen at a different time."
] ]
| not (null $ doubleDeprecations someDeprecations) -> | not (null $ doubleDeprecations someDeprecations) ->
error $ error $
unlines unlines
[ inputDir </> metaFile <> " contains two consecutive deprecations or two consecutive un-deprecations.", [ inputDir </> metaFile <> " contains two consecutive deprecations or two consecutive un-deprecations."
"Make sure deprecations and un-deprecations alternate in time." , "Make sure deprecations and un-deprecations alternate in time."
] ]
_otherwise -> return () _otherwise -> return ()
@ -156,11 +156,11 @@ preparePackageVersion inputDir metaFile = do
cabalFileRevisionPath revisionNumber = cabalFileRevisionPath revisionNumber =
joinPath joinPath
[ inputDir, [ inputDir
prettyShow pkgName, , prettyShow pkgName
prettyShow pkgVersion, , prettyShow pkgVersion
"revisions", , "revisions"
show revisionNumber , show revisionNumber
] ]
<.> "cabal" <.> "cabal"
@ -178,47 +178,47 @@ preparePackageVersion inputDir metaFile = do
unless (takeFileName sdistPath == expectedSdistName) $ do unless (takeFileName sdistPath == expectedSdistName) $ do
error $ error $
unlines unlines
[ "creating a source distribution for " ++ prettyShow pkgId ++ " has failed because", [ "creating a source distribution for " ++ prettyShow pkgId ++ " has failed because"
"cabal has produced a source distribtion that does not match the expected file name:", , "cabal has produced a source distribtion that does not match the expected file name:"
"actual: " ++ takeBaseName sdistPath, , "actual: " ++ takeBaseName sdistPath
"expected: " ++ expectedSdistName, , "expected: " ++ expectedSdistName
"possible cause: the package name and/or version implied by the metadata file path does not match what is contained in the cabal file", , "possible cause: the package name and/or version implied by the metadata file path does not match what is contained in the cabal file"
"metadata file: " ++ metaFile, , "metadata file: " ++ metaFile
"version in cabal file: " ++ prettyShow (Distribution.Types.PackageId.pkgVersion $ package $ packageDescription pkgDesc) , "version in cabal file: " ++ prettyShow (Distribution.Types.PackageId.pkgVersion $ package $ packageDescription pkgDesc)
] ]
let cabalFileRevisions = let cabalFileRevisions =
sortOn sortOn
Down Down
[ (revisionTimestamp, cabalFileRevisionPath revisionNumber) [ (revisionTimestamp, cabalFileRevisionPath revisionNumber)
| RevisionSpec {revisionTimestamp, revisionNumber} <- packageVersionRevisions pkgSpec | RevisionSpec{revisionTimestamp, revisionNumber} <- packageVersionRevisions pkgSpec
] ]
let pkgVersionDeprecationChanges = let pkgVersionDeprecationChanges =
sortOn sortOn
Down Down
[ (deprecationTimestamp, deprecationIsDeprecated) [ (deprecationTimestamp, deprecationIsDeprecated)
| DeprecationSpec {deprecationTimestamp, deprecationIsDeprecated} <- packageVersionDeprecations pkgSpec | DeprecationSpec{deprecationTimestamp, deprecationIsDeprecated} <- packageVersionDeprecations pkgSpec
] ]
let pkgVersionIsDeprecated = maybe False snd $ listToMaybe pkgVersionDeprecationChanges let pkgVersionIsDeprecated = maybe False snd $ listToMaybe pkgVersionDeprecationChanges
return return
PreparedPackageVersion PreparedPackageVersion
{ pkgId, { pkgId
pkgTimestamp = packageVersionTimestamp pkgSpec, , pkgTimestamp = packageVersionTimestamp pkgSpec
pkgVersionSource = packageVersionSource pkgSpec, , pkgVersionSource = packageVersionSource pkgSpec
pkgVersionForce = packageVersionForce pkgSpec, , pkgVersionForce = packageVersionForce pkgSpec
pkgVersionDeprecationChanges, , pkgVersionDeprecationChanges
pkgVersionIsDeprecated, , pkgVersionIsDeprecated
pkgDesc, , pkgDesc
sdistPath, , sdistPath
cabalFilePath, , cabalFilePath
originalCabalFilePath, , originalCabalFilePath
cabalFileRevisions , cabalFileRevisions
} }
duplicates :: Ord a => NE.NonEmpty a -> [a] duplicates :: (Ord a) => NE.NonEmpty a -> [a]
duplicates = mapMaybe (listToMaybe . NE.tail) . NE.group duplicates = mapMaybe (listToMaybe . NE.tail) . NE.group
doubleDeprecations :: NE.NonEmpty DeprecationSpec -> [NE.NonEmpty DeprecationSpec] doubleDeprecations :: NE.NonEmpty DeprecationSpec -> [NE.NonEmpty DeprecationSpec]

View File

@ -2,10 +2,10 @@
{-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
module Foliage.PrepareSdist module Foliage.PrepareSdist (
( prepareSdist, prepareSdist,
addPrepareSdistRule, addPrepareSdistRule,
) )
where where
import Control.Monad (when) import Control.Monad (when)
@ -41,70 +41,70 @@ prepareSdist srcDir = apply1 $ PrepareSdistRule srcDir
addPrepareSdistRule :: Path Absolute -> Rules () addPrepareSdistRule :: Path Absolute -> Rules ()
addPrepareSdistRule outputDirRoot = addBuiltinRule noLint noIdentity run addPrepareSdistRule outputDirRoot = addBuiltinRule noLint noIdentity run
where where
run :: PrepareSdistRule -> Maybe BS.ByteString -> RunMode -> Action (RunResult FilePath) run :: PrepareSdistRule -> Maybe BS.ByteString -> RunMode -> Action (RunResult FilePath)
run (PrepareSdistRule srcDir) (Just old) RunDependenciesSame = do run (PrepareSdistRule srcDir) (Just old) RunDependenciesSame = do
let (hvExpected, path) = load old let (hvExpected, path) = load old
-- Check of has of the sdist, if the sdist is still there and it is -- Check of has of the sdist, if the sdist is still there and it is
-- indeed what we expect, signal that nothing changed. Otherwise -- indeed what we expect, signal that nothing changed. Otherwise
-- warn the user and proceed to recompute. -- warn the user and proceed to recompute.
ehvExisting <- liftIO $ tryIOError $ readFileHashValue path ehvExisting <- liftIO $ tryIOError $ readFileHashValue path
case ehvExisting of case ehvExisting of
Right hvExisting Right hvExisting
| hvExisting == hvExpected -> | hvExisting == hvExpected ->
return RunResult {runChanged = ChangedNothing, runStore = old, runValue = path} return RunResult{runChanged = ChangedNothing, runStore = old, runValue = path}
Right hvExisting -> do Right hvExisting -> do
putWarn $ "Changed " ++ path ++ " (expecting hash " ++ showHashValue hvExpected ++ " found " ++ showHashValue hvExisting ++ "). I will rebuild it." putWarn $ "Changed " ++ path ++ " (expecting hash " ++ showHashValue hvExpected ++ " found " ++ showHashValue hvExisting ++ "). I will rebuild it."
run (PrepareSdistRule srcDir) (Just old) RunDependenciesChanged run (PrepareSdistRule srcDir) (Just old) RunDependenciesChanged
Left _e -> do Left _e -> do
putWarn $ "Unable to read " ++ path ++ ". I will rebuild it." putWarn $ "Unable to read " ++ path ++ ". I will rebuild it."
run (PrepareSdistRule srcDir) (Just old) RunDependenciesChanged run (PrepareSdistRule srcDir) (Just old) RunDependenciesChanged
run (PrepareSdistRule srcDir) old _mode = do run (PrepareSdistRule srcDir) old _mode = do
-- create the sdist distribution -- create the sdist distribution
(hv, path) <- makeSdist srcDir (hv, path) <- makeSdist srcDir
let new = save (hv, path) let new = save (hv, path)
let changed = case fmap ((== hv) . fst . load) old of let changed = case fmap ((== hv) . fst . load) old of
Just True -> ChangedRecomputeSame Just True -> ChangedRecomputeSame
_differentOrMissing -> ChangedRecomputeDiff _differentOrMissing -> ChangedRecomputeDiff
when (changed == ChangedRecomputeSame) $ when (changed == ChangedRecomputeSame) $
putInfo ("Wrote " ++ path ++ " (same hash " ++ showHashValue hv ++ ")") putInfo ("Wrote " ++ path ++ " (same hash " ++ showHashValue hv ++ ")")
when (changed == ChangedRecomputeDiff) $ when (changed == ChangedRecomputeDiff) $
putInfo ("Wrote " ++ path ++ " (new hash " ++ showHashValue hv ++ ")") putInfo ("Wrote " ++ path ++ " (new hash " ++ showHashValue hv ++ ")")
return $ RunResult {runChanged = changed, runStore = new, runValue = path} return $ RunResult{runChanged = changed, runStore = new, runValue = path}
makeSdist srcDir = do makeSdist srcDir = do
cabalFiles <- getDirectoryFiles srcDir ["*.cabal"] cabalFiles <- getDirectoryFiles srcDir ["*.cabal"]
let cabalFile = case cabalFiles of let cabalFile = case cabalFiles of
[f] -> f [f] -> f
fs -> fs ->
error $ error $
unlines unlines
[ "Invalid source directory: " ++ srcDir, [ "Invalid source directory: " ++ srcDir
"It contains multiple cabal files, while only one is allowed", , "It contains multiple cabal files, while only one is allowed"
unwords fs , unwords fs
] ]
traced "cabal sdist" $ do traced "cabal sdist" $ do
gpd <- readGenericPackageDescription Verbosity.normal (srcDir </> cabalFile) gpd <- readGenericPackageDescription Verbosity.normal (srcDir </> cabalFile)
let pkgId = packageId gpd let pkgId = packageId gpd
packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId
path = toFilePath $ anchorRepoPathLocally outputDirRoot packagePath path = toFilePath $ anchorRepoPathLocally outputDirRoot packagePath
IO.createDirectoryIfMissing True (takeDirectory path) IO.createDirectoryIfMissing True (takeDirectory path)
sdist <- packageDirToSdist Verbosity.normal gpd srcDir sdist <- packageDirToSdist Verbosity.normal gpd srcDir
BSL.writeFile path sdist BSL.writeFile path sdist
return (SHA256.hashlazy sdist, path) return (SHA256.hashlazy sdist, path)
save :: (BS.ByteString, FilePath) -> BS.ByteString save :: (BS.ByteString, FilePath) -> BS.ByteString
save = BSL.toStrict . Binary.encode save = BSL.toStrict . Binary.encode
load :: BS.ByteString -> (BS.ByteString, FilePath) load :: BS.ByteString -> (BS.ByteString, FilePath)
load = Binary.decode . BSL.fromStrict load = Binary.decode . BSL.fromStrict
readFileHashValue :: FilePath -> IO BS.ByteString readFileHashValue :: FilePath -> IO BS.ByteString
readFileHashValue = fmap SHA256.hash . BS.readFile readFileHashValue = fmap SHA256.hash . BS.readFile

View File

@ -40,83 +40,83 @@ prepareSource pkgId pkgMeta = apply1 $ PrepareSourceRule pkgId pkgMeta
addPrepareSourceRule :: FilePath -> FilePath -> Rules () addPrepareSourceRule :: FilePath -> FilePath -> Rules ()
addPrepareSourceRule inputDir cacheDir = addBuiltinRule noLint noIdentity run addPrepareSourceRule inputDir cacheDir = addBuiltinRule noLint noIdentity run
where where
run :: PrepareSourceRule -> Maybe BS.ByteString -> RunMode -> Action (RunResult FilePath) run :: PrepareSourceRule -> Maybe BS.ByteString -> RunMode -> Action (RunResult FilePath)
run (PrepareSourceRule pkgId pkgMeta) _old mode = do run (PrepareSourceRule pkgId pkgMeta) _old mode = do
let PackageIdentifier {pkgName, pkgVersion} = pkgId let PackageIdentifier{pkgName, pkgVersion} = pkgId
let PackageVersionSpec {packageVersionSource, packageVersionForce} = pkgMeta let PackageVersionSpec{packageVersionSource, packageVersionForce} = pkgMeta
let srcDir = cacheDir </> unPackageName pkgName </> prettyShow pkgVersion let srcDir = cacheDir </> unPackageName pkgName </> prettyShow pkgVersion
case mode of case mode of
RunDependenciesSame -> RunDependenciesSame ->
return $ RunResult ChangedNothing BS.empty srcDir return $ RunResult ChangedNothing BS.empty srcDir
RunDependenciesChanged -> do RunDependenciesChanged -> do
-- FIXME too much rework? -- FIXME too much rework?
-- this action only depends on the tarball and the package metadata -- this action only depends on the tarball and the package metadata
-- delete everything inside the package source tree -- delete everything inside the package source tree
liftIO $ do liftIO $ do
-- FIXME this should only delete inside srcDir but apparently -- FIXME this should only delete inside srcDir but apparently
-- also deletes srcDir itself -- also deletes srcDir itself
removeFiles srcDir ["//*"] removeFiles srcDir ["//*"]
IO.createDirectoryIfMissing True srcDir IO.createDirectoryIfMissing True srcDir
case packageVersionSource of case packageVersionSource of
TarballSource url mSubdir -> do TarballSource url mSubdir -> do
tarballPath <- fetchRemoteAsset url tarballPath <- fetchRemoteAsset url
withTempDir $ \tmpDir -> do withTempDir $ \tmpDir -> do
cmd_ "tar xzf" [tarballPath] "-C" [tmpDir] cmd_ "tar xzf" [tarballPath] "-C" [tmpDir]
-- Special treatment of top-level directory: we remove it -- Special treatment of top-level directory: we remove it
-- --
-- Note: Don't let shake look into tmpDir! it will cause -- Note: Don't let shake look into tmpDir! it will cause
-- unnecessary rework because tmpDir is always new -- unnecessary rework because tmpDir is always new
ls <- liftIO $ IO.getDirectoryContents tmpDir ls <- liftIO $ IO.getDirectoryContents tmpDir
let ls' = filter (not . all (== '.')) ls let ls' = filter (not . all (== '.')) ls
let fix1 = case ls' of [l] -> (</> l); _ -> id let fix1 = case ls' of [l] -> (</> l); _ -> id
fix2 = case mSubdir of Just s -> (</> s); _ -> id fix2 = case mSubdir of Just s -> (</> s); _ -> id
tdir = fix2 $ fix1 tmpDir tdir = fix2 $ fix1 tmpDir
cmd_ "cp --recursive --no-target-directory --dereference" [tdir, srcDir] cmd_ "cp --recursive --no-target-directory --dereference" [tdir, srcDir]
-- --
-- This is almost identical to the above but we get to keep the -- This is almost identical to the above but we get to keep the
-- metadata. -- metadata.
-- --
GitHubSource repo rev mSubdir -> do GitHubSource repo rev mSubdir -> do
let url = githubRepoTarballUrl repo rev let url = githubRepoTarballUrl repo rev
tarballPath <- fetchRemoteAsset url tarballPath <- fetchRemoteAsset url
withTempDir $ \tmpDir -> do withTempDir $ \tmpDir -> do
cmd_ "tar xzf" [tarballPath] "-C" [tmpDir] cmd_ "tar xzf" [tarballPath] "-C" [tmpDir]
-- Special treatment of top-level directory: we remove it -- Special treatment of top-level directory: we remove it
-- --
-- Note: Don't let shake look into tmpDir! it will cause -- Note: Don't let shake look into tmpDir! it will cause
-- unnecessary rework because tmpDir is always new -- unnecessary rework because tmpDir is always new
ls <- liftIO $ IO.getDirectoryContents tmpDir ls <- liftIO $ IO.getDirectoryContents tmpDir
let ls' = filter (not . all (== '.')) ls let ls' = filter (not . all (== '.')) ls
let fix1 = case ls' of [l] -> (</> l); _ -> id let fix1 = case ls' of [l] -> (</> l); _ -> id
fix2 = case mSubdir of Just s -> (</> s); _ -> id fix2 = case mSubdir of Just s -> (</> s); _ -> id
tdir = fix2 $ fix1 tmpDir tdir = fix2 $ fix1 tmpDir
cmd_ "cp --recursive --no-target-directory --dereference" [tdir, srcDir] cmd_ "cp --recursive --no-target-directory --dereference" [tdir, srcDir]
let patchesDir = inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "patches" let patchesDir = inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "patches"
hasPatches <- doesDirectoryExist patchesDir hasPatches <- doesDirectoryExist patchesDir
when hasPatches $ do when hasPatches $ do
patchfiles <- getDirectoryFiles patchesDir ["*.patch"] patchfiles <- getDirectoryFiles patchesDir ["*.patch"]
for_ patchfiles $ \patchfile -> do for_ patchfiles $ \patchfile -> do
let patch = patchesDir </> patchfile let patch = patchesDir </> patchfile
cmd_ Shell (Cwd srcDir) (FileStdin patch) "patch -p1" cmd_ Shell (Cwd srcDir) (FileStdin patch) "patch -p1"
when packageVersionForce $ do when packageVersionForce $ do
let cabalFilePath = srcDir </> unPackageName pkgName <.> "cabal" let cabalFilePath = srcDir </> unPackageName pkgName <.> "cabal"
putInfo $ "Updating version in cabal file" ++ cabalFilePath putInfo $ "Updating version in cabal file" ++ cabalFilePath
liftIO $ rewritePackageVersion cabalFilePath pkgVersion liftIO $ rewritePackageVersion cabalFilePath pkgVersion
return $ RunResult ChangedRecomputeDiff BS.empty srcDir return $ RunResult ChangedRecomputeDiff BS.empty srcDir

View File

@ -2,10 +2,10 @@
{-# LANGUAGE DerivingVia #-} {-# LANGUAGE DerivingVia #-}
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
module Foliage.RemoteAsset module Foliage.RemoteAsset (
( fetchRemoteAsset, fetchRemoteAsset,
addFetchRemoteAssetRule, addFetchRemoteAssetRule,
) )
where where
import Control.Monad import Control.Monad
@ -38,32 +38,32 @@ fetchRemoteAsset = apply1 . RemoteAsset
addFetchRemoteAssetRule :: FilePath -> Rules () addFetchRemoteAssetRule :: FilePath -> Rules ()
addFetchRemoteAssetRule cacheDir = addBuiltinRule noLint noIdentity run addFetchRemoteAssetRule cacheDir = addBuiltinRule noLint noIdentity run
where where
run :: BuiltinRun RemoteAsset FilePath run :: BuiltinRun RemoteAsset FilePath
run (RemoteAsset uri) old _mode = do run (RemoteAsset uri) old _mode = do
unless (uriQuery uri == "") $ unless (uriQuery uri == "") $
error ("Query elements in URI are not supported: " <> show uri) error ("Query elements in URI are not supported: " <> show uri)
unless (uriFragment uri == "") $ unless (uriFragment uri == "") $
error ("Fragments in URI are not supported: " <> show uri) error ("Fragments in URI are not supported: " <> show uri)
let scheme = dropWhileEnd (not . isAlpha) $ uriScheme uri let scheme = dropWhileEnd (not . isAlpha) $ uriScheme uri
let host = maybe (error $ "invalid uri " ++ show uri) uriRegName (uriAuthority uri) let host = maybe (error $ "invalid uri " ++ show uri) uriRegName (uriAuthority uri)
let path = cacheDir </> joinPath (scheme : host : pathSegments uri) let path = cacheDir </> joinPath (scheme : host : pathSegments uri)
-- parse etag from store -- parse etag from store
let oldETag = fromMaybe BS.empty old let oldETag = fromMaybe BS.empty old
newETag <- newETag <-
withTempFile $ \etagFile -> do withTempFile $ \etagFile -> do
liftIO $ createDirectoryIfMissing True (takeDirectory path) liftIO $ createDirectoryIfMissing True (takeDirectory path)
liftIO $ BS.writeFile etagFile oldETag liftIO $ BS.writeFile etagFile oldETag
actionRetry 5 $ runCurl uri path etagFile actionRetry 5 $ runCurl uri path etagFile
let changed = if newETag == oldETag then ChangedRecomputeSame else ChangedRecomputeDiff let changed = if newETag == oldETag then ChangedRecomputeSame else ChangedRecomputeDiff
return $ RunResult {runChanged = changed, runStore = newETag, runValue = path} return $ RunResult{runChanged = changed, runStore = newETag, runValue = path}
runCurl :: URI -> String -> String -> Action ETag runCurl :: URI -> String -> String -> Action ETag
runCurl uri path etagFile = do runCurl uri path etagFile = do
@ -71,31 +71,31 @@ runCurl uri path etagFile = do
traced "curl" $ traced "curl" $
cmd cmd
Shell Shell
[ "curl", [ "curl"
-- Silent or quiet mode. Do not show progress meter or error messages. Makes Curl mute. , -- Silent or quiet mode. Do not show progress meter or error messages. Makes Curl mute.
"--silent", "--silent"
-- Fail fast with no output at all on server errors. , -- Fail fast with no output at all on server errors.
"--fail", "--fail"
-- If the server reports that the requested page has moved to a different location this , -- If the server reports that the requested page has moved to a different location this
-- option will make curl redo the request on the new place. -- option will make curl redo the request on the new place.
-- NOTE: This is needed because github always replies with a redirect -- NOTE: This is needed because github always replies with a redirect
"--location", "--location"
-- This option makes a conditional HTTP request for the specific ETag read from the , -- This option makes a conditional HTTP request for the specific ETag read from the
-- given file by sending a custom If-None-Match header using the stored ETag. -- given file by sending a custom If-None-Match header using the stored ETag.
-- For correct results, make sure that the specified file contains only a single line -- For correct results, make sure that the specified file contains only a single line
-- with the desired ETag. An empty file is parsed as an empty ETag. -- with the desired ETag. An empty file is parsed as an empty ETag.
"--etag-compare", "--etag-compare"
etagFile, , etagFile
-- This option saves an HTTP ETag to the specified file. If no ETag is sent by the server, , -- This option saves an HTTP ETag to the specified file. If no ETag is sent by the server,
-- an empty file is created. -- an empty file is created.
"--etag-save", "--etag-save"
etagFile, , etagFile
-- Write output to <file> instead of stdout. , -- Write output to <file> instead of stdout.
"--output", "--output"
path, , path
"--write-out", , "--write-out"
"%{json}", , "%{json}"
-- URL to fetch , -- URL to fetch
show uri show uri
] ]
case exitCode of case exitCode of
@ -107,11 +107,11 @@ runCurl uri path etagFile = do
Left err -> Left err ->
error $ error $
unlines unlines
[ "curl failed with return code " ++ show c ++ " while fetching " ++ show uri, [ "curl failed with return code " ++ show c ++ " while fetching " ++ show uri
"Error while reading curl diagnostic: " ++ err , "Error while reading curl diagnostic: " ++ err
] ]
-- We can consider displaying different messages based on some fields (e.g. response_code) -- We can consider displaying different messages based on some fields (e.g. response_code)
Right CurlWriteOut {errormsg} -> Right CurlWriteOut{errormsg} ->
error errormsg error errormsg
type ETag = BS.ByteString type ETag = BS.ByteString

View File

@ -1,9 +1,9 @@
module Foliage.Shake module Foliage.Shake (
( computeFileInfoSimple', computeFileInfoSimple',
readKeysAt, readKeysAt,
readPackageVersionSpec', readPackageVersionSpec',
readGenericPackageDescription', readGenericPackageDescription',
) )
where where
import Data.Traversable (for) import Data.Traversable (for)

View File

@ -1,16 +1,16 @@
{-# LANGUAGE TypeApplications #-} {-# LANGUAGE TypeApplications #-}
{-# OPTIONS_GHC -Wno-orphans #-} {-# OPTIONS_GHC -Wno-orphans #-}
module Foliage.Time module Foliage.Time (
( iso8601ParseM, iso8601ParseM,
iso8601Show, iso8601Show,
getCurrentTime, getCurrentTime,
UTCTime (..), UTCTime (..),
utcTimeToPOSIXSeconds, utcTimeToPOSIXSeconds,
addUTCTime, addUTCTime,
nominalDay, nominalDay,
truncateSeconds, truncateSeconds,
) )
where where
import Data.Time import Data.Time

View File

@ -7,12 +7,12 @@ import Distribution.Types.Lens
import Distribution.Types.Version import Distribution.Types.Version
import Distribution.Verbosity import Distribution.Verbosity
rewritePackageVersion :: rewritePackageVersion
-- | path to @.cabal@ file :: FilePath
FilePath -> -- ^ path to @.cabal@ file
-- | new version -> Version
Version -> -- ^ new version
IO () -> IO ()
rewritePackageVersion cabalPath ver = do rewritePackageVersion cabalPath ver = do
gpd <- readGenericPackageDescription normal cabalPath gpd <- readGenericPackageDescription normal cabalPath
writeGenericPackageDescription cabalPath (set (packageDescription . package . pkgVersion) ver gpd) writeGenericPackageDescription cabalPath (set (packageDescription . package . pkgVersion) ver gpd)

View File

@ -15,8 +15,8 @@ newtype MyAesonEncoding a = MyAesonEncoding a
myOptions :: Options myOptions :: Options
myOptions = myOptions =
defaultOptions defaultOptions
{ sumEncoding = ObjectWithSingleField, { sumEncoding = ObjectWithSingleField
omitNothingFields = True , omitNothingFields = True
} }
instance (Generic a, GToJSON' Value Zero (Rep a), GToJSON' Encoding Zero (Rep a)) => ToJSON (MyAesonEncoding a) where instance (Generic a, GToJSON' Value Zero (Rep a), GToJSON' Encoding Zero (Rep a)) => ToJSON (MyAesonEncoding a) where

View File

@ -1,6 +1,6 @@
module Foliage.Utils.GitHub module Foliage.Utils.GitHub (
( githubRepoTarballUrl, githubRepoTarballUrl,
) )
where where
import Data.Text qualified as T import Data.Text qualified as T
@ -11,7 +11,7 @@ import System.FilePath ((</>))
githubRepoTarballUrl :: GitHubRepo -> GitHubRev -> URI githubRepoTarballUrl :: GitHubRepo -> GitHubRev -> URI
githubRepoTarballUrl repo rev = githubRepoTarballUrl repo rev =
nullURI nullURI
{ uriScheme = "https:", { uriScheme = "https:"
uriAuthority = Just nullURIAuth {uriRegName = "github.com"}, , uriAuthority = Just nullURIAuth{uriRegName = "github.com"}
uriPath = "/" </> T.unpack (unGitHubRepo repo) </> "tarball" </> T.unpack (unGitHubRev rev) , uriPath = "/" </> T.unpack (unGitHubRepo repo) </> "tarball" </> T.unpack (unGitHubRev rev)
} }

50
fourmolu.yaml Normal file
View File

@ -0,0 +1,50 @@
# Number of spaces per indentation step
indentation: 2
# Max line length for automatic line breaking
column-limit: none
# Styling of arrows in type signatures (choices: trailing, leading, or leading-args)
function-arrows: leading
# How to place commas in multi-line lists, records, etc. (choices: leading or trailing)
comma-style: leading
# Styling of import/export lists (choices: leading, trailing, or diff-friendly)
import-export-style: diff-friendly
# Whether to full-indent or half-indent 'where' bindings past the preceding body
indent-wheres: false
# Whether to leave a space before an opening record brace
record-brace-space: false
# Number of spaces between top-level declarations
newlines-between-decls: 1
# How to print Haddock comments (choices: single-line, multi-line, or multi-line-compact)
haddock-style: multi-line
# How to print module docstring
haddock-style-module: null
# Styling of let blocks (choices: auto, inline, newline, or mixed)
let-style: auto
# How to align the 'in' keyword with respect to the 'let' keyword (choices: left-align, right-align, or no-space)
in-style: right-align
# Whether to put parentheses around a single constraint (choices: auto, always, or never)
single-constraint-parens: always
# Output Unicode syntax (choices: detect, always, or never)
unicode: never
# Give the programmer more choice on where to insert blank lines
respectful: true
# Fixity information for operators
fixities: []
# Module reexports Fourmolu should know about
reexports: []