Merge remote-tracking branch 'new'

This commit is contained in:
Andrea Bedini 2022-10-05 15:11:48 +08:00
commit b13ecafb31
10 changed files with 431 additions and 526 deletions

View File

@ -1,5 +1,3 @@
{-# OPTIONS_GHC -Wno-name-shadowing #-}
module Foliage.CmdBuild (cmdBuild) where module Foliage.CmdBuild (cmdBuild) where
import Codec.Archive.Tar qualified as Tar import Codec.Archive.Tar qualified as Tar
@ -7,32 +5,44 @@ import Codec.Archive.Tar.Entry qualified as Tar
import Codec.Compression.GZip qualified as GZip import Codec.Compression.GZip qualified as GZip
import Control.Monad (unless, when) import Control.Monad (unless, when)
import Data.ByteString.Lazy qualified as BSL import Data.ByteString.Lazy qualified as BSL
import Data.Foldable (for_)
import Data.List (sortOn) import Data.List (sortOn)
import Data.Maybe (fromMaybe) import Data.Maybe (fromMaybe)
import Data.Text qualified as T
import Data.Traversable (for) import Data.Traversable (for)
import Development.Shake import Development.Shake
import Development.Shake.FilePath import Development.Shake.FilePath
import Distribution.Client.SrcDist (packageDirToSdist)
import Distribution.Package import Distribution.Package
import Distribution.Parsec (simpleParsec) import Distribution.Parsec (simpleParsec)
import Distribution.Pretty import Distribution.Pretty (prettyShow)
import Distribution.Simple.PackageDescription (readGenericPackageDescription)
import Distribution.Verbosity qualified as Verbosity
import Foliage.HackageSecurity import Foliage.HackageSecurity
import Foliage.Meta import Foliage.Meta
import Foliage.Options import Foliage.Options
import Foliage.RemoteAsset (addBuiltinRemoteAssetRule, remoteAssetNeed) import Foliage.PrepareSdist
import Foliage.PrepareSource (addPrepareSourceRule, prepareSource)
import Foliage.RemoteAsset (addFetchRemoteAssetRule)
import Foliage.Shake import Foliage.Shake
import Foliage.Shake.Oracle
import Foliage.Time qualified as Time import Foliage.Time qualified as Time
import Foliage.UpdateCabalFile (rewritePackageVersion) import Hackage.Security.Util.Path (castRoot, toFilePath)
import Network.URI
import System.Directory qualified as IO
cmdBuild :: BuildOptions -> IO () cmdBuild :: BuildOptions -> IO ()
cmdBuild cmdBuild buildOptions = do
outputDirRoot <- liftIO $ makeAbsolute (fromFilePath (buildOptsOutputDir buildOptions))
shake opts $
do
addFetchRemoteAssetRule cacheDir
addPrepareSourceRule (buildOptsInputDir buildOptions) cacheDir
addPrepareSdistRule outputDirRoot
phony "buildAction" (buildAction buildOptions)
want ["buildAction"]
where
cacheDir = "_cache"
opts =
shakeOptions
{ shakeFiles = cacheDir,
shakeVerbosity = Diagnostic
}
buildAction :: BuildOptions -> Action ()
buildAction
BuildOptions BuildOptions
{ buildOptsSignOpts = signOpts, { buildOptsSignOpts = signOpts,
buildOptsCurrentTime = mCurrentTime, buildOptsCurrentTime = mCurrentTime,
@ -40,453 +50,226 @@ cmdBuild
buildOptsInputDir = inputDir, buildOptsInputDir = inputDir,
buildOptsOutputDir = outputDir buildOptsOutputDir = outputDir
} = do } = do
let cacheDir = "_cache" outputDirRoot <- liftIO $ makeAbsolute (fromFilePath outputDir)
let pkgMetaDir PackageIdentifier {pkgName, pkgVersion} =
inputDir </> unPackageName pkgName </> prettyShow pkgVersion
maybeReadKeysAt <- case signOpts of maybeReadKeysAt <- case signOpts of
SignOptsSignWithKeys keysPath -> do SignOptsSignWithKeys keysPath -> do
ks <- IO.doesDirectoryExist keysPath ks <- doesDirectoryExist keysPath
unless ks $ do unless ks $ do
putStrLn $ "You don't seem to have created a set of TUF keys. I will create one in " <> keysPath putWarn $ "You don't seem to have created a set of TUF keys. I will create one in " <> keysPath
createKeys keysPath liftIO $ createKeys keysPath
return $ \name -> readKeysAt (keysPath </> name) return $ \name -> readKeysAt (keysPath </> name)
SignOptsDon'tSign -> SignOptsDon'tSign ->
return $ const $ return [] return $ const $ return []
let opts = expiryTime <-
shakeOptions for mExpireSignaturesOn $ \expireSignaturesOn -> do
{ shakeChange = ChangeDigest, putInfo $ "Expiry time set to " <> Time.iso8601Show expireSignaturesOn
shakeFiles = cacheDir, return expireSignaturesOn
shakeVerbosity = Info
}
shake opts $ do currentTime <- case mCurrentTime of
addBuiltinRemoteAssetRule (cacheDir </> "downloads") Nothing -> do
t <- Time.truncateSeconds <$> liftIO Time.getCurrentTime
putInfo $ "Current time set to " <> Time.iso8601Show t <> ". You can set a fixed time using the --current-time option."
return t
Just t -> do
putInfo $ "Current time set to " <> Time.iso8601Show t <> "."
return t
-- packages <- getPackages inputDir
-- Oracles
--
getCurrentTime <- addOracle $ \GetCurrentTime -> allCabalFiles <-
case mCurrentTime of concat
Nothing -> do <$> for
t <- Time.truncateSeconds <$> liftIO Time.getCurrentTime packages
putInfo $ "Current time set to " <> Time.iso8601Show t <> ". You can set a fixed time using the --current-time option." ( \(pkgId, pkgMeta) -> do
return t let PackageVersionMeta {packageVersionTimestamp, packageVersionRevisions} = pkgMeta
Just t -> do srcDir <- prepareSource pkgId pkgMeta
putInfo $ "Current time set to " <> Time.iso8601Show t <> "." let cabalFilePath = srcDir </> unPackageName (pkgName pkgId) <.> "cabal"
return t let cabalFileTimestamp = fromMaybe currentTime packageVersionTimestamp
return $
(pkgId, cabalFileTimestamp, cabalFilePath) :
map
( \RevisionMeta {revisionTimestamp, revisionNumber} ->
(pkgId, revisionTimestamp, cabalFileRevisionPath inputDir pkgId revisionNumber)
)
packageVersionRevisions
)
getExpiryTime <- addOracleCache $ \GetExpiryTime -> do entries1 <- for allCabalFiles $ \(pkgId, timestamp, filePath) ->
alwaysRerun prepareIndexPkgCabal pkgId timestamp filePath
for mExpireSignaturesOn $ \expireSignaturesOn -> do
putInfo $ "Expiry time set to " <> Time.iso8601Show expireSignaturesOn
return expireSignaturesOn
getPackageVersionMeta <- addOracleCache $ \(GetPackageVersionMeta pkgId) -> do targetKeys <- maybeReadKeysAt "target"
meta <- readPackageVersionMeta' $ pkgMetaDir pkgId </> "meta.toml" entries2 <- for packages $ uncurry (prepareIndexPkgMetadata currentTime expiryTime targetKeys)
-- Here we do some validation of the package metadata. We could let tarContents = Tar.write $ sortOn Tar.entryTime (entries1 ++ entries2)
-- fine a better place for it. traced "Writing index" $ do
case meta of BSL.writeFile (anchorPath outputDirRoot repoLayoutIndexTar) tarContents
PackageVersionMeta {packageVersionRevisions, packageVersionTimestamp = Nothing} BSL.writeFile (anchorPath outputDirRoot repoLayoutIndexTarGz) $ GZip.compress tarContents
| not (null packageVersionRevisions) -> do
putError $
"Package " <> prettyShow pkgId
<> " has cabal file revisions but the original package has no timestamp. This combination doesn't make sense. Either add a timestamp on the original package or remove the revisions"
fail "invalid package metadata"
PackageVersionMeta {packageVersionRevisions, packageVersionTimestamp = Just pkgTs}
| any ((< pkgTs) . revisionTimestamp) packageVersionRevisions -> do
putError $
"Package " <> prettyShow pkgId
<> " has a revision with timestamp earlier than the package itself. Adjust the timestamps so that all revisions come after the original package"
fail "invalid package metadata"
_ ->
return meta
preparePackageSource <- addOracleCache $ \(PreparePackageSource pkgId) -> do privateKeysRoot <- maybeReadKeysAt "root"
PackageVersionMeta {packageVersionSource, packageVersionForce} <- getPackageVersionMeta (GetPackageVersionMeta pkgId) privateKeysTarget <- maybeReadKeysAt "target"
privateKeysSnapshot <- maybeReadKeysAt "snapshot"
privateKeysTimestamp <- maybeReadKeysAt "timestamp"
privateKeysMirrors <- maybeReadKeysAt "mirrors"
-- FIXME too much rework? liftIO $
-- this action only depends on the tarball and the package metadata writeSignedJSON outputDirRoot repoLayoutMirrors privateKeysMirrors $
let PackageIdentifier {pkgName, pkgVersion} = pkgId Mirrors
let srcDir = cacheDir </> unPackageName pkgName </> prettyShow pkgVersion { mirrorsVersion = FileVersion 1,
mirrorsExpires = FileExpires expiryTime,
mirrorsMirrors = []
}
-- delete everything inside the package source tree liftIO $
liftIO $ do writeSignedJSON outputDirRoot repoLayoutRoot privateKeysRoot $
-- FIXME this should only delete inside srcDir but apparently Root
-- also deletes srcDir itself { rootVersion = FileVersion 1,
removeFiles srcDir ["//*"] rootExpires = FileExpires expiryTime,
IO.createDirectoryIfMissing True srcDir rootKeys =
fromKeys $
case packageVersionSource of concat
TarballSource url mSubdir -> do [ privateKeysRoot,
tarballPath <- remoteAssetNeed url privateKeysTarget,
privateKeysSnapshot,
withTempDir $ \tmpDir -> do privateKeysTimestamp,
cmd_ ["tar", "xzf", tarballPath, "-C", tmpDir] privateKeysMirrors
],
-- Special treatment of top-level directory: we remove it rootRoles =
-- RootRoles
-- Note: Don't let shake look into tmpDir! it will cause { rootRolesRoot =
-- unnecessary rework because tmpDir is always new RoleSpec
ls <- liftIO $ IO.getDirectoryContents tmpDir { roleSpecKeys = map somePublicKey privateKeysRoot,
let ls' = filter (not . all (== '.')) ls roleSpecThreshold = KeyThreshold 2
},
let fix1 = case ls' of [l] -> (</> l); _ -> id rootRolesSnapshot =
fix2 = case mSubdir of Just s -> (</> s); _ -> id RoleSpec
tdir = fix2 $ fix1 tmpDir { roleSpecKeys = map somePublicKey privateKeysSnapshot,
roleSpecThreshold = KeyThreshold 1
cmd_ ["cp", "--recursive", "--no-target-directory", "--dereference", tdir, srcDir] },
-- rootRolesTargets =
-- This is almost identical to the above but we get to keep the RoleSpec
-- metadata. { roleSpecKeys = map somePublicKey privateKeysTarget,
-- roleSpecThreshold = KeyThreshold 1
GitHubSource repo rev mSubdir -> do },
let url = rootRolesTimestamp =
nullURI RoleSpec
{ uriScheme = "https:", { roleSpecKeys = map somePublicKey privateKeysTimestamp,
uriAuthority = Just nullURIAuth {uriRegName = "github.com"}, roleSpecThreshold = KeyThreshold 1
uriPath = "/" </> T.unpack (unGitHubRepo repo) </> "tarball" </> T.unpack (unGitHubRev rev) },
} rootRolesMirrors =
tarballPath <- remoteAssetNeed url RoleSpec
withTempDir $ \tmpDir -> do { roleSpecKeys = map somePublicKey privateKeysMirrors,
cmd_ ["tar", "xzf", tarballPath, "-C", tmpDir] roleSpecThreshold = KeyThreshold 1
-- Special treatment of top-level directory: we remove it
--
-- Note: Don't let shake look into tmpDir! it will cause
-- unnecessary rework because tmpDir is always new
ls <- liftIO $ IO.getDirectoryContents tmpDir
let ls' = filter (not . all (== '.')) ls
let fix1 = case ls' of [l] -> (</> l); _ -> id
fix2 = case mSubdir of Just s -> (</> s); _ -> id
tdir = fix2 $ fix1 tmpDir
cmd_ ["cp", "--recursive", "--no-target-directory", "--dereference", tdir, srcDir]
applyPatches inputDir srcDir pkgId
when packageVersionForce $
forcePackageVersion srcDir pkgId
return srcDir
getPackageDescription <- addOracleCache $ \(GetPackageDescription pkgId) -> do
let PackageIdentifier {pkgName, pkgVersion} = pkgId
meta <- getPackageVersionMeta $ GetPackageVersionMeta pkgId
case latestRevisionNumber meta of
Nothing -> do
srcDir <- preparePackageSource $ PreparePackageSource pkgId
return $ srcDir </> unPackageName pkgName <.> "cabal"
Just revNum -> do
return $ inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "revisions" </> show revNum <.> "cabal"
getPackages <- addOracleCache $ \GetPackages -> do
metaFiles <- getDirectoryFiles inputDir ["*/*/meta.toml"]
when (null metaFiles) $ do
putError $
unlines
[ "We could not find any package metadata file (i.e. _sources/<name>/<version>/meta.toml)",
"Make sure you are passing the right input directory. The default input directory is _sources"
]
fail "no package metadata found"
return $
[ PackageIdentifier name version
| path <- metaFiles,
let [pkgName, pkgVersion, _] = splitDirectories path,
let Just name = simpleParsec pkgName,
let Just version = simpleParsec pkgVersion
]
--
-- Entrypoint
--
-- This triggers the whole chain of TUF metadata
want [outputDir </> "timestamp.json"]
-- This build the current index entry for all packages
action $ do
pkgIds <- getPackages GetPackages
need
[ outputDir </> "index" </> unPackageName pkgName </> prettyShow pkgVersion </> unPackageName pkgName <.> "cabal"
| PackageIdentifier pkgName pkgVersion <- pkgIds
]
--
-- timestamp.json
--
outputDir </> "timestamp.json" %> \path -> do
snapshotInfo <- computeFileInfoSimple' (outputDir </> "snapshot.json")
expires <- getExpiryTime GetExpiryTime
let timestamp =
Timestamp
{ timestampVersion = FileVersion 1,
timestampExpires = FileExpires expires,
timestampInfoSnapshot = snapshotInfo
}
keys <- maybeReadKeysAt "timestamp"
let timestampSigned = withSignatures hackageRepoLayout keys timestamp
traced "writing" $
liftIO $ do
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p timestampSigned
--
-- snapshot.json
--
outputDir </> "snapshot.json" %> \path -> do
rootInfo <- computeFileInfoSimple' (outputDir </> "root.json")
mirrorsInfo <- computeFileInfoSimple' (outputDir </> "mirrors.json")
tarInfo <- computeFileInfoSimple' (outputDir </> "01-index.tar")
tarGzInfo <- computeFileInfoSimple' (outputDir </> "01-index.tar.gz")
expires <- getExpiryTime GetExpiryTime
let snapshot =
Snapshot
{ snapshotVersion = FileVersion 1,
snapshotExpires = FileExpires expires,
snapshotInfoRoot = rootInfo,
snapshotInfoMirrors = mirrorsInfo,
snapshotInfoTar = Just tarInfo,
snapshotInfoTarGz = tarGzInfo
}
keys <- maybeReadKeysAt "snapshot"
let snapshotSigned = withSignatures hackageRepoLayout keys snapshot
traced "writing" $
liftIO $ do
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p snapshotSigned
--
-- root.json
--
outputDir </> "root.json" %> \path -> do
expires <- getExpiryTime GetExpiryTime
privateKeysRoot <- maybeReadKeysAt "root"
privateKeysTarget <- maybeReadKeysAt "target"
privateKeysSnapshot <- maybeReadKeysAt "snapshot"
privateKeysTimestamp <- maybeReadKeysAt "timestamp"
privateKeysMirrors <- maybeReadKeysAt "mirrors"
let root =
Root
{ rootVersion = FileVersion 1,
rootExpires = FileExpires expires,
rootKeys =
fromKeys $
concat
[ privateKeysRoot,
privateKeysTarget,
privateKeysSnapshot,
privateKeysTimestamp,
privateKeysMirrors
],
rootRoles =
RootRoles
{ rootRolesRoot =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysRoot,
roleSpecThreshold = KeyThreshold 2
},
rootRolesSnapshot =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysSnapshot,
roleSpecThreshold = KeyThreshold 1
},
rootRolesTargets =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysTarget,
roleSpecThreshold = KeyThreshold 1
},
rootRolesTimestamp =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysTimestamp,
roleSpecThreshold = KeyThreshold 1
},
rootRolesMirrors =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysMirrors,
roleSpecThreshold = KeyThreshold 1
}
} }
} }
}
keys <- maybeReadKeysAt "root" rootInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutRoot)
let signedRoot = withSignatures hackageRepoLayout keys root mirrorsInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutMirrors)
traced "writing" $ tarInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutIndexTar)
liftIO $ do tarGzInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutIndexTarGz)
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p signedRoot
-- liftIO $
-- mirrors.json writeSignedJSON outputDirRoot repoLayoutSnapshot privateKeysSnapshot $
-- Snapshot
{ snapshotVersion = FileVersion 1,
snapshotExpires = FileExpires expiryTime,
snapshotInfoRoot = rootInfo,
snapshotInfoMirrors = mirrorsInfo,
snapshotInfoTar = Just tarInfo,
snapshotInfoTarGz = tarGzInfo
}
outputDir </> "mirrors.json" %> \path -> do snapshotInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutSnapshot)
expires <- getExpiryTime GetExpiryTime liftIO $
let mirrors = writeSignedJSON outputDirRoot repoLayoutTimestamp privateKeysTimestamp $
Mirrors Timestamp
{ mirrorsVersion = FileVersion 1, { timestampVersion = FileVersion 1,
mirrorsExpires = FileExpires expires, timestampExpires = FileExpires expiryTime,
mirrorsMirrors = [] timestampInfoSnapshot = snapshotInfo
} }
keys <- maybeReadKeysAt "mirrors" getPackages :: FilePath -> Action [(PackageIdentifier, PackageVersionMeta)]
let signedMirrors = withSignatures hackageRepoLayout keys mirrors getPackages inputDir = do
traced "writing" $ metaFiles <- getDirectoryFiles inputDir ["*/*/meta.toml"]
liftIO $ do
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p signedMirrors
-- when (null metaFiles) $ do
-- 01-index.tar putError $
-- unlines
[ "We could not find any package metadata file (i.e. _sources/<name>/<version>/meta.toml)",
"Make sure you are passing the right input directory. The default input directory is _sources"
]
fail "no package metadata found"
outputDir </> "01-index.tar" %> \path -> do for metaFiles $ \metaFile -> do
pkgIds <- getPackages GetPackages let [pkgName, pkgVersion, _] = splitDirectories metaFile
let Just name = simpleParsec pkgName
let Just version = simpleParsec pkgVersion
let pkgId = PackageIdentifier name version
entries <- meta <-
flip foldMap pkgIds $ \pkgId -> do readPackageVersionMeta' (inputDir </> metaFile) >>= \case
let PackageIdentifier {pkgName, pkgVersion} = pkgId PackageVersionMeta {packageVersionRevisions, packageVersionTimestamp = Nothing}
PackageVersionMeta {packageVersionTimestamp, packageVersionRevisions} <- getPackageVersionMeta (GetPackageVersionMeta pkgId) | not (null packageVersionRevisions) -> do
putError $
inputDir </> metaFile <> " has cabal file revisions but the original package has no timestamp. This combination doesn't make sense. Either add a timestamp on the original package or remove the revisions"
fail "invalid package metadata"
PackageVersionMeta {packageVersionRevisions, packageVersionTimestamp = Just pkgTs}
| any ((< pkgTs) . revisionTimestamp) packageVersionRevisions -> do
putError $
inputDir </> metaFile <> " has a revision with timestamp earlier than the package itself. Adjust the timestamps so that all revisions come after the original package"
fail "invalid package metadata"
meta ->
return meta
return (pkgId, meta)
let name = unPackageName pkgName prepareIndexPkgCabal :: PackageId -> UTCTime -> FilePath -> Action Tar.Entry
let version = prettyShow pkgVersion prepareIndexPkgCabal pkgId timestamp filePath = do
need [filePath]
contents <- liftIO $ BSL.readFile filePath
return $ mkTarEntry contents (IndexPkgCabal pkgId) timestamp
srcDir <- preparePackageSource $ PreparePackageSource pkgId prepareIndexPkgMetadata :: UTCTime -> Maybe UTCTime -> [Some Key] -> PackageIdentifier -> PackageVersionMeta -> Action Tar.Entry
now <- getCurrentTime GetCurrentTime prepareIndexPkgMetadata currentTime expiryTime keys pkgId pkgMeta = do
let PackageVersionMeta {packageVersionTimestamp} = pkgMeta
sdist <- prepareSdist pkgId pkgMeta
targetFileInfo <- computeFileInfoSimple' sdist
let packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId
let targets =
Targets
{ targetsVersion = FileVersion 1,
targetsExpires = FileExpires expiryTime,
targetsTargets = fromList [(TargetPathRepo packagePath, targetFileInfo)],
targetsDelegations = Nothing
}
-- original cabal file return $
cabalEntry <- mkTarEntry
mkTarEntry (renderSignedJSON keys targets)
(srcDir </> name <.> "cabal") (IndexPkgMetadata pkgId)
(name </> version </> name <.> "cabal") (fromMaybe currentTime packageVersionTimestamp)
(fromMaybe now packageVersionTimestamp)
-- package.json mkTarEntry :: BSL.ByteString -> IndexFile dec -> UTCTime -> Tar.Entry
packageEntry <- mkTarEntry contents indexFile timestamp =
mkTarEntry (Tar.fileEntry tarPath contents)
(outputDir </> "index" </> name </> version </> "package.json") { Tar.entryTime = floor $ Time.utcTimeToPOSIXSeconds timestamp,
(name </> version </> "package.json") Tar.entryOwnership =
(fromMaybe now packageVersionTimestamp) Tar.Ownership
{ Tar.ownerName = "foliage",
Tar.groupName = "foliage",
Tar.ownerId = 0,
Tar.groupId = 0
}
}
where
indexPath = toFilePath $ castRoot $ indexFileToPath hackageIndexLayout indexFile
Right tarPath = Tar.toTarPath False indexPath
-- revised cabal files anchorPath :: Path Absolute -> (RepoLayout -> RepoPath) -> FilePath
revisionEntries <- for packageVersionRevisions $ \RevisionMeta {revisionNumber, revisionTimestamp} -> anchorPath outputDirRoot p =
mkTarEntry toFilePath $ anchorRepoPathLocally outputDirRoot $ p hackageRepoLayout
(inputDir </> name </> version </> "revisions" </> show revisionNumber <.> "cabal")
(name </> version </> name <.> "cabal")
revisionTimestamp
return $ cabalEntry : packageEntry : revisionEntries cabalFileRevisionPath :: FilePath -> PackageIdentifier -> Int -> FilePath
cabalFileRevisionPath inputDir PackageIdentifier {pkgName, pkgVersion} revisionNumber =
traced "writing" $ liftIO $ BSL.writeFile path $ Tar.write $ sortOn Tar.entryTime entries inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "revisions" </> show revisionNumber <.> "cabal"
--
-- 01-index.tar.gz
--
outputDir </> "01-index.tar.gz" %> \path -> do
tar <- readFileByteStringLazy (outputDir </> "01-index.tar")
traced "writing" $ liftIO $ BSL.writeFile path $ GZip.compress tar
--
-- index cabal files
--
-- these come either from the package source or the revision files
--
outputDir </> "index/*/*/*.cabal" %> \path -> do
let [_, _, pkgName, pkgVersion, _] = splitDirectories (drop (length outputDir) path)
let Just name = simpleParsec pkgName
let Just version = simpleParsec pkgVersion
let pkgId = PackageIdentifier name version
cabalFile <- getPackageDescription (GetPackageDescription pkgId)
copyFileChanged cabalFile path
--
-- index package files (only depends on the source distribution)
--
outputDir </> "index/*/*/package.json" %> \path -> do
let [_, _, pkgName, pkgVersion, _] = splitDirectories (drop (length outputDir) path)
let packagePath = "package" </> pkgName <> "-" <> pkgVersion <.> "tar.gz"
let targetPath = rootPath $ fromUnrootedFilePath packagePath
targetFileInfo <- computeFileInfoSimple' (outputDir </> packagePath)
expires <- getExpiryTime GetExpiryTime
let targets =
Targets
{ targetsVersion = FileVersion 1,
targetsExpires = FileExpires expires,
targetsTargets = fromList [(TargetPathRepo targetPath, targetFileInfo)],
targetsDelegations = Nothing
}
keys <- maybeReadKeysAt "target"
let signedTargets = withSignatures hackageRepoLayout keys targets
liftIO $ do
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p signedTargets
--
-- source distributions, including patching
--
outputDir </> "package/*.tar.gz" %> \path -> do
let [_, _, filename] = splitDirectories (drop (length outputDir) path)
let Just pkgId = stripExtension "tar.gz" filename >>= simpleParsec
cabalFile <- getPackageDescription $ GetPackageDescription pkgId
srcDir <- preparePackageSource $ PreparePackageSource pkgId
traced "cabal sdist" $ do
gpd <- readGenericPackageDescription Verbosity.normal cabalFile
packageDirToSdist Verbosity.normal gpd srcDir >>= BSL.writeFile path
putStrLn $ "All done. The repository is now available in " <> outputDir <> "."
mkTarEntry :: FilePath -> [Char] -> UTCTime -> Action Tar.Entry
mkTarEntry filePath indexPath timestamp = do
let Right tarPath = Tar.toTarPath False indexPath
contents <- readFileByteStringLazy filePath
return
(Tar.fileEntry tarPath contents)
{ Tar.entryTime = floor $ Time.utcTimeToPOSIXSeconds timestamp,
Tar.entryOwnership =
Tar.Ownership
{ Tar.ownerName = "foliage",
Tar.groupName = "foliage",
Tar.ownerId = 0,
Tar.groupId = 0
}
}
applyPatches :: [Char] -> FilePath -> PackageId -> Action ()
applyPatches inputDir srcDir PackageIdentifier {pkgName, pkgVersion} = do
let patchesDir = inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "patches"
hasPatches <- doesDirectoryExist patchesDir
when hasPatches $ do
patchfiles <- getDirectoryFiles patchesDir ["*.patch"]
for_ patchfiles $ \patchfile -> do
let patch = patchesDir </> patchfile
cmd_ Shell (Cwd srcDir) (FileStdin patch) "patch -p1"
forcePackageVersion :: FilePath -> PackageId -> Action ()
forcePackageVersion srcDir PackageIdentifier {pkgName, pkgVersion} = do
let cabalFilePath = srcDir </> unPackageName pkgName <.> "cabal"
liftIO $ rewritePackageVersion cabalFilePath pkgVersion

View File

@ -11,11 +11,11 @@ module Foliage.HackageSecurity
where where
import Control.Monad (replicateM_) import Control.Monad (replicateM_)
import Data.Functor.Identity import Data.ByteString.Lazy qualified as BSL
import Hackage.Security.Key.Env (fromKeys) import Hackage.Security.Key.Env (fromKeys)
import Hackage.Security.Server import Hackage.Security.Server
import Hackage.Security.TUF.FileMap import Hackage.Security.TUF.FileMap
import Hackage.Security.Util.Path (fromFilePath, fromUnrootedFilePath, makeAbsolute, rootPath) import Hackage.Security.Util.Path (Absolute, Path, fromFilePath, fromUnrootedFilePath, makeAbsolute, rootPath, writeLazyByteString)
import Hackage.Security.Util.Some import Hackage.Security.Util.Some
import System.Directory (createDirectoryIfMissing) import System.Directory (createDirectoryIfMissing)
import System.FilePath import System.FilePath
@ -25,11 +25,6 @@ readJSONSimple fp = do
p <- makeAbsolute (fromFilePath fp) p <- makeAbsolute (fromFilePath fp)
readJSON_NoKeys_NoLayout p readJSON_NoKeys_NoLayout p
writeJSONSimple :: ToJSON Identity a => FilePath -> a -> IO ()
writeJSONSimple fp a = do
p <- makeAbsolute (fromFilePath fp)
writeJSON_NoLayout p a
computeFileInfoSimple :: FilePath -> IO FileInfo computeFileInfoSimple :: FilePath -> IO FileInfo
computeFileInfoSimple fp = do computeFileInfoSimple fp = do
p <- makeAbsolute (fromFilePath fp) p <- makeAbsolute (fromFilePath fp)
@ -56,3 +51,15 @@ writeKey :: FilePath -> Some Key -> IO ()
writeKey fp key = do writeKey fp key = do
p <- makeAbsolute (fromFilePath fp) p <- makeAbsolute (fromFilePath fp)
writeJSON_NoLayout p key writeJSON_NoLayout p key
renderSignedJSON :: ToJSON WriteJSON a => [Some Key] -> a -> BSL.ByteString
renderSignedJSON keys thing =
renderJSON
hackageRepoLayout
(withSignatures hackageRepoLayout keys thing)
writeSignedJSON :: ToJSON WriteJSON a => Path Absolute -> (RepoLayout -> RepoPath) -> [Some Key] -> a -> IO ()
writeSignedJSON outputDirRoot repoPath keys thing = do
writeLazyByteString fp $ renderSignedJSON keys thing
where
fp = anchorRepoPathLocally outputDirRoot $ repoPath hackageRepoLayout

View File

@ -0,0 +1,55 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeApplications #-}
module Foliage.PrepareSdist
( prepareSdist,
addPrepareSdistRule,
)
where
import Data.ByteString qualified as BS
import Data.ByteString.Lazy qualified as BSL
import Development.Shake
import Development.Shake.FilePath (takeDirectory, (<.>), (</>))
import Development.Shake.Rule
import Distribution.Client.HashValue (readFileHashValue)
import Distribution.Client.SrcDist (packageDirToSdist)
import Distribution.Compat.Binary (encode)
import Distribution.Simple.PackageDescription (readGenericPackageDescription)
import Distribution.Types.PackageId
import Distribution.Types.PackageName
import Distribution.Verbosity qualified as Verbosity
import Foliage.HackageSecurity
import Foliage.Meta
import Foliage.PrepareSource (prepareSource)
import Foliage.Shake
import Hackage.Security.Util.Path (toFilePath)
import System.Directory qualified as IO
prepareSdist :: PackageId -> PackageVersionMeta -> Action FilePath
prepareSdist pkgId pkgMeta = apply1 $ PackageRule @"prepareSdist" pkgId pkgMeta
addPrepareSdistRule :: Path Absolute -> Rules ()
addPrepareSdistRule outputDirRoot = addBuiltinRule noLint noIdentity run
where
run :: PackageRule "prepareSdist" FilePath -> Maybe BS.ByteString -> RunMode -> Action (RunResult FilePath)
run (PackageRule pkgId _pkgMeta) (Just old) RunDependenciesSame =
let packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId
path = toFilePath $ anchorRepoPathLocally outputDirRoot packagePath
in return $ RunResult ChangedNothing old path
run (PackageRule pkgId pkgMeta) old _ = do
let packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId
path = toFilePath $ anchorRepoPathLocally outputDirRoot packagePath
srcDir <- prepareSource pkgId pkgMeta
let PackageIdentifier {pkgName} = pkgId
traced "cabal sdist" $ do
let cabalFilePath = srcDir </> unPackageName pkgName <.> "cabal"
gpd <- readGenericPackageDescription Verbosity.normal cabalFilePath
IO.createDirectoryIfMissing True (takeDirectory path)
packageDirToSdist Verbosity.normal gpd srcDir
>>= BSL.writeFile path
hash <- BSL.toStrict . encode <$> readFileHashValue path
return $
if old == Just hash
then RunResult ChangedRecomputeSame hash path
else RunResult ChangedRecomputeDiff hash path

View File

@ -0,0 +1,114 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
module Foliage.PrepareSource where
import Control.Monad (when)
import Data.ByteString qualified as BS
import Data.Foldable (for_)
import Data.Text qualified as T
import Development.Shake
import Development.Shake.Rule
import Distribution.Pretty (prettyShow)
import Distribution.Types.PackageId
import Distribution.Types.PackageName (unPackageName)
import Foliage.Meta
import Foliage.RemoteAsset (fetchRemoteAsset)
import Foliage.Shake (PackageRule (PackageRule))
import Foliage.UpdateCabalFile (rewritePackageVersion)
import Network.URI (URI (..), URIAuth (..), nullURI, nullURIAuth)
import System.Directory qualified as IO
import System.FilePath ((<.>), (</>))
prepareSource :: PackageId -> PackageVersionMeta -> Action FilePath
prepareSource pkgId pkgMeta = apply1 $ PackageRule @"prepareSource" pkgId pkgMeta
addPrepareSourceRule :: FilePath -> FilePath -> Rules ()
addPrepareSourceRule inputDir cacheDir = addBuiltinRule noLint noIdentity run
where
run :: BuiltinRun (PackageRule "prepareSource" FilePath) FilePath
run (PackageRule pkgId pkgMeta) _old mode = do
let PackageIdentifier {pkgName, pkgVersion} = pkgId
let PackageVersionMeta {packageVersionSource, packageVersionForce} = pkgMeta
let srcDir = cacheDir </> unPackageName pkgName </> prettyShow pkgVersion
case mode of
RunDependenciesSame ->
return $ RunResult ChangedNothing BS.empty srcDir
RunDependenciesChanged -> do
-- FIXME too much rework?
-- this action only depends on the tarball and the package metadata
-- delete everything inside the package source tree
liftIO $ do
-- FIXME this should only delete inside srcDir but apparently
-- also deletes srcDir itself
removeFiles srcDir ["//*"]
IO.createDirectoryIfMissing True srcDir
case packageVersionSource of
TarballSource url mSubdir -> do
tarballPath <- fetchRemoteAsset url
withTempDir $ \tmpDir -> do
cmd_ "tar xzf" [tarballPath] "-C" [tmpDir]
-- Special treatment of top-level directory: we remove it
--
-- Note: Don't let shake look into tmpDir! it will cause
-- unnecessary rework because tmpDir is always new
ls <- liftIO $ IO.getDirectoryContents tmpDir
let ls' = filter (not . all (== '.')) ls
let fix1 = case ls' of [l] -> (</> l); _ -> id
fix2 = case mSubdir of Just s -> (</> s); _ -> id
tdir = fix2 $ fix1 tmpDir
cmd_ "cp --recursive --no-target-directory --dereference" [tdir, srcDir]
--
-- This is almost identical to the above but we get to keep the
-- metadata.
--
GitHubSource repo rev mSubdir -> do
let url =
nullURI
{ uriScheme = "https:",
uriAuthority = Just nullURIAuth {uriRegName = "github.com"},
uriPath = "/" </> T.unpack (unGitHubRepo repo) </> "tarball" </> T.unpack (unGitHubRev rev)
}
tarballPath <- fetchRemoteAsset url
withTempDir $ \tmpDir -> do
cmd_ ["tar", "xzf", tarballPath, "-C", tmpDir]
-- Special treatment of top-level directory: we remove it
--
-- Note: Don't let shake look into tmpDir! it will cause
-- unnecessary rework because tmpDir is always new
ls <- liftIO $ IO.getDirectoryContents tmpDir
let ls' = filter (not . all (== '.')) ls
let fix1 = case ls' of [l] -> (</> l); _ -> id
fix2 = case mSubdir of Just s -> (</> s); _ -> id
tdir = fix2 $ fix1 tmpDir
cmd_ ["cp", "--recursive", "--no-target-directory", "--dereference", tdir, srcDir]
let patchesDir = inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "patches"
hasPatches <- doesDirectoryExist patchesDir
when hasPatches $ do
patchfiles <- getDirectoryFiles patchesDir ["*.patch"]
for_ patchfiles $ \patchfile -> do
let patch = patchesDir </> patchfile
cmd_ Shell (Cwd srcDir) (FileStdin patch) "patch -p1"
when packageVersionForce $ do
let cabalFilePath = srcDir </> unPackageName pkgName <.> "cabal"
putInfo $ "Updating version in cabal file" ++ cabalFilePath
liftIO $ rewritePackageVersion cabalFilePath pkgVersion
return $ RunResult ChangedRecomputeDiff BS.empty srcDir

View File

@ -2,13 +2,12 @@
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeFamilies #-}
module Foliage.RemoteAsset module Foliage.RemoteAsset
( remoteAssetNeed, ( fetchRemoteAsset,
remoteAssetRule, addFetchRemoteAssetRule,
addBuiltinRemoteAssetRule,
) )
where where
import Control.Monad (unless) import Control.Monad
import Data.ByteString qualified as BS import Data.ByteString qualified as BS
import Data.Char (isAlpha) import Data.Char (isAlpha)
import Data.List (dropWhileEnd) import Data.List (dropWhileEnd)
@ -17,7 +16,7 @@ import Development.Shake
import Development.Shake.Classes import Development.Shake.Classes
import Development.Shake.FilePath import Development.Shake.FilePath
import Development.Shake.Rule import Development.Shake.Rule
import Network.URI (URI (uriAuthority, uriFragment, uriQuery, uriScheme), URIAuth (uriRegName), pathSegments) import Network.URI (URI (..), URIAuth (..), pathSegments)
import Network.URI.Orphans () import Network.URI.Orphans ()
import System.Directory (createDirectoryIfMissing) import System.Directory (createDirectoryIfMissing)
@ -27,16 +26,11 @@ newtype RemoteAsset = RemoteAsset URI
type instance RuleResult RemoteAsset = FilePath type instance RuleResult RemoteAsset = FilePath
data RemoteAssetRule = RemoteAssetRule RemoteAsset (Action FilePath) fetchRemoteAsset :: URI -> Action FilePath
fetchRemoteAsset = apply1 . RemoteAsset
remoteAssetRule :: URI -> Action FilePath -> Rules () addFetchRemoteAssetRule :: FilePath -> Rules ()
remoteAssetRule url act = addUserRule $ RemoteAssetRule (RemoteAsset url) act addFetchRemoteAssetRule cacheDir = addBuiltinRule noLint noIdentity run
remoteAssetNeed :: URI -> Action FilePath
remoteAssetNeed = apply1 . RemoteAsset
addBuiltinRemoteAssetRule :: FilePath -> Rules ()
addBuiltinRemoteAssetRule cacheDir = addBuiltinRule noLint noIdentity run
where where
run :: BuiltinRun RemoteAsset FilePath run :: BuiltinRun RemoteAsset FilePath
run (RemoteAsset uri) old _mode = do run (RemoteAsset uri) old _mode = do

View File

@ -1,26 +1,33 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Foliage.Shake module Foliage.Shake
( computeFileInfoSimple', ( computeFileInfoSimple',
readFileByteStringLazy,
readKeysAt, readKeysAt,
readPackageVersionMeta', readPackageVersionMeta',
PackageRule (PackageRule),
) )
where where
import Data.ByteString.Lazy qualified as BSL
import Data.Traversable (for) import Data.Traversable (for)
import Development.Shake import Development.Shake
import Development.Shake.Classes
import Development.Shake.FilePath import Development.Shake.FilePath
import Distribution.Package (PackageId)
import Foliage.HackageSecurity import Foliage.HackageSecurity
import Foliage.Meta import Foliage.Meta
import GHC.Generics (Generic)
import GHC.TypeLits (Symbol)
computeFileInfoSimple' :: FilePath -> Action FileInfo computeFileInfoSimple' :: FilePath -> Action FileInfo
computeFileInfoSimple' fp = do computeFileInfoSimple' fp = do
need [fp] need [fp]
liftIO $ computeFileInfoSimple fp liftIO $ computeFileInfoSimple fp
readFileByteStringLazy :: FilePath -> Action BSL.ByteString
readFileByteStringLazy x = need [x] >> liftIO (BSL.readFile x)
readKeysAt :: FilePath -> Action [Some Key] readKeysAt :: FilePath -> Action [Some Key]
readKeysAt base = do readKeysAt base = do
paths <- getDirectoryFiles base ["*.json"] paths <- getDirectoryFiles base ["*.json"]
@ -33,3 +40,9 @@ readPackageVersionMeta' :: FilePath -> Action PackageVersionMeta
readPackageVersionMeta' fp = do readPackageVersionMeta' fp = do
need [fp] need [fp]
liftIO $ readPackageVersionMeta fp liftIO $ readPackageVersionMeta fp
data PackageRule (tag :: Symbol) a = PackageRule PackageId PackageVersionMeta
deriving (Show, Eq, Generic)
deriving (Hashable, Binary, NFData)
type instance RuleResult (PackageRule tag a) = a

View File

@ -1,66 +0,0 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE TypeFamilies #-}
module Foliage.Shake.Oracle
( UTCTime,
GetCurrentTime (..),
GetExpiryTime (..),
GetPackageDescription (..),
GetPackages (..),
GetPackageVersionMeta (..),
GetSignOptions (..),
PreparePackageSource (..),
)
where
import Data.Time.Compat ()
import Development.Shake (RuleResult)
import Development.Shake.Classes (Binary, Hashable, NFData)
import Distribution.Types.PackageId (PackageId)
import Foliage.Meta
import Foliage.Options (SignOptions)
import GHC.Generics (Generic)
data GetCurrentTime = GetCurrentTime
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetCurrentTime = UTCTime
data GetExpiryTime = GetExpiryTime
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetExpiryTime = Maybe UTCTime
data GetSignOptions = GetSignOptions
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetSignOptions = SignOptions
data GetPackages = GetPackages
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetPackages = [PackageId]
newtype GetPackageVersionMeta = GetPackageVersionMeta PackageId
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetPackageVersionMeta = PackageVersionMeta
newtype PreparePackageSource = PreparePackageSource PackageId
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult PreparePackageSource = FilePath
newtype GetPackageDescription = GetPackageDescription PackageId
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetPackageDescription = FilePath

View File

@ -1,2 +1,3 @@
packages: . packages: .
index-state: 2022-08-29T00:00:00Z index-state: 2022-08-29T00:00:00Z
with-compiler: ghc-8.10.7

View File

@ -21,6 +21,7 @@
}; };
shell.buildInputs = with pkgs; [ shell.buildInputs = with pkgs; [
nixpkgs-fmt nixpkgs-fmt
fsatrace
]; ];
modules = [{ modules = [{
packages.foliage.components.exes.foliage.dontStrip = false; packages.foliage.components.exes.foliage.dontStrip = false;
@ -28,9 +29,11 @@
}; };
in { in {
packages.default = project.foliage.components.exes.foliage; packages.default = project.foliage.components.exes.foliage;
devShell = pkgs.mkShell { devShell = pkgs.mkShell {
name = "foliage-dev-shell"; name = "foliage-dev-shell";
buildInputs = with pkgs; [
fsatrace
];
}; };
}); });

View File

@ -22,9 +22,10 @@ executable foliage
Foliage.HackageSecurity Foliage.HackageSecurity
Foliage.Meta Foliage.Meta
Foliage.Options Foliage.Options
Foliage.PrepareSource
Foliage.PrepareSdist
Foliage.RemoteAsset Foliage.RemoteAsset
Foliage.Shake Foliage.Shake
Foliage.Shake.Oracle
Foliage.Time Foliage.Time
Foliage.UpdateCabalFile Foliage.UpdateCabalFile
Network.URI.Orphans Network.URI.Orphans