Merge remote-tracking branch 'new'

This commit is contained in:
Andrea Bedini 2022-10-05 15:11:48 +08:00
commit b13ecafb31
10 changed files with 431 additions and 526 deletions

View File

@ -1,5 +1,3 @@
{-# OPTIONS_GHC -Wno-name-shadowing #-}
module Foliage.CmdBuild (cmdBuild) where
import Codec.Archive.Tar qualified as Tar
@ -7,32 +5,44 @@ import Codec.Archive.Tar.Entry qualified as Tar
import Codec.Compression.GZip qualified as GZip
import Control.Monad (unless, when)
import Data.ByteString.Lazy qualified as BSL
import Data.Foldable (for_)
import Data.List (sortOn)
import Data.Maybe (fromMaybe)
import Data.Text qualified as T
import Data.Traversable (for)
import Development.Shake
import Development.Shake.FilePath
import Distribution.Client.SrcDist (packageDirToSdist)
import Distribution.Package
import Distribution.Parsec (simpleParsec)
import Distribution.Pretty
import Distribution.Simple.PackageDescription (readGenericPackageDescription)
import Distribution.Verbosity qualified as Verbosity
import Distribution.Pretty (prettyShow)
import Foliage.HackageSecurity
import Foliage.Meta
import Foliage.Options
import Foliage.RemoteAsset (addBuiltinRemoteAssetRule, remoteAssetNeed)
import Foliage.PrepareSdist
import Foliage.PrepareSource (addPrepareSourceRule, prepareSource)
import Foliage.RemoteAsset (addFetchRemoteAssetRule)
import Foliage.Shake
import Foliage.Shake.Oracle
import Foliage.Time qualified as Time
import Foliage.UpdateCabalFile (rewritePackageVersion)
import Network.URI
import System.Directory qualified as IO
import Hackage.Security.Util.Path (castRoot, toFilePath)
cmdBuild :: BuildOptions -> IO ()
cmdBuild
cmdBuild buildOptions = do
outputDirRoot <- liftIO $ makeAbsolute (fromFilePath (buildOptsOutputDir buildOptions))
shake opts $
do
addFetchRemoteAssetRule cacheDir
addPrepareSourceRule (buildOptsInputDir buildOptions) cacheDir
addPrepareSdistRule outputDirRoot
phony "buildAction" (buildAction buildOptions)
want ["buildAction"]
where
cacheDir = "_cache"
opts =
shakeOptions
{ shakeFiles = cacheDir,
shakeVerbosity = Diagnostic
}
buildAction :: BuildOptions -> Action ()
buildAction
BuildOptions
{ buildOptsSignOpts = signOpts,
buildOptsCurrentTime = mCurrentTime,
@ -40,453 +50,226 @@ cmdBuild
buildOptsInputDir = inputDir,
buildOptsOutputDir = outputDir
} = do
let cacheDir = "_cache"
let pkgMetaDir PackageIdentifier {pkgName, pkgVersion} =
inputDir </> unPackageName pkgName </> prettyShow pkgVersion
outputDirRoot <- liftIO $ makeAbsolute (fromFilePath outputDir)
maybeReadKeysAt <- case signOpts of
SignOptsSignWithKeys keysPath -> do
ks <- IO.doesDirectoryExist keysPath
ks <- doesDirectoryExist keysPath
unless ks $ do
putStrLn $ "You don't seem to have created a set of TUF keys. I will create one in " <> keysPath
createKeys keysPath
putWarn $ "You don't seem to have created a set of TUF keys. I will create one in " <> keysPath
liftIO $ createKeys keysPath
return $ \name -> readKeysAt (keysPath </> name)
SignOptsDon'tSign ->
return $ const $ return []
let opts =
shakeOptions
{ shakeChange = ChangeDigest,
shakeFiles = cacheDir,
shakeVerbosity = Info
}
expiryTime <-
for mExpireSignaturesOn $ \expireSignaturesOn -> do
putInfo $ "Expiry time set to " <> Time.iso8601Show expireSignaturesOn
return expireSignaturesOn
shake opts $ do
addBuiltinRemoteAssetRule (cacheDir </> "downloads")
currentTime <- case mCurrentTime of
Nothing -> do
t <- Time.truncateSeconds <$> liftIO Time.getCurrentTime
putInfo $ "Current time set to " <> Time.iso8601Show t <> ". You can set a fixed time using the --current-time option."
return t
Just t -> do
putInfo $ "Current time set to " <> Time.iso8601Show t <> "."
return t
--
-- Oracles
--
packages <- getPackages inputDir
getCurrentTime <- addOracle $ \GetCurrentTime ->
case mCurrentTime of
Nothing -> do
t <- Time.truncateSeconds <$> liftIO Time.getCurrentTime
putInfo $ "Current time set to " <> Time.iso8601Show t <> ". You can set a fixed time using the --current-time option."
return t
Just t -> do
putInfo $ "Current time set to " <> Time.iso8601Show t <> "."
return t
allCabalFiles <-
concat
<$> for
packages
( \(pkgId, pkgMeta) -> do
let PackageVersionMeta {packageVersionTimestamp, packageVersionRevisions} = pkgMeta
srcDir <- prepareSource pkgId pkgMeta
let cabalFilePath = srcDir </> unPackageName (pkgName pkgId) <.> "cabal"
let cabalFileTimestamp = fromMaybe currentTime packageVersionTimestamp
return $
(pkgId, cabalFileTimestamp, cabalFilePath) :
map
( \RevisionMeta {revisionTimestamp, revisionNumber} ->
(pkgId, revisionTimestamp, cabalFileRevisionPath inputDir pkgId revisionNumber)
)
packageVersionRevisions
)
getExpiryTime <- addOracleCache $ \GetExpiryTime -> do
alwaysRerun
for mExpireSignaturesOn $ \expireSignaturesOn -> do
putInfo $ "Expiry time set to " <> Time.iso8601Show expireSignaturesOn
return expireSignaturesOn
entries1 <- for allCabalFiles $ \(pkgId, timestamp, filePath) ->
prepareIndexPkgCabal pkgId timestamp filePath
getPackageVersionMeta <- addOracleCache $ \(GetPackageVersionMeta pkgId) -> do
meta <- readPackageVersionMeta' $ pkgMetaDir pkgId </> "meta.toml"
targetKeys <- maybeReadKeysAt "target"
entries2 <- for packages $ uncurry (prepareIndexPkgMetadata currentTime expiryTime targetKeys)
-- Here we do some validation of the package metadata. We could
-- fine a better place for it.
case meta of
PackageVersionMeta {packageVersionRevisions, packageVersionTimestamp = Nothing}
| not (null packageVersionRevisions) -> do
putError $
"Package " <> prettyShow pkgId
<> " has cabal file revisions but the original package has no timestamp. This combination doesn't make sense. Either add a timestamp on the original package or remove the revisions"
fail "invalid package metadata"
PackageVersionMeta {packageVersionRevisions, packageVersionTimestamp = Just pkgTs}
| any ((< pkgTs) . revisionTimestamp) packageVersionRevisions -> do
putError $
"Package " <> prettyShow pkgId
<> " has a revision with timestamp earlier than the package itself. Adjust the timestamps so that all revisions come after the original package"
fail "invalid package metadata"
_ ->
return meta
let tarContents = Tar.write $ sortOn Tar.entryTime (entries1 ++ entries2)
traced "Writing index" $ do
BSL.writeFile (anchorPath outputDirRoot repoLayoutIndexTar) tarContents
BSL.writeFile (anchorPath outputDirRoot repoLayoutIndexTarGz) $ GZip.compress tarContents
preparePackageSource <- addOracleCache $ \(PreparePackageSource pkgId) -> do
PackageVersionMeta {packageVersionSource, packageVersionForce} <- getPackageVersionMeta (GetPackageVersionMeta pkgId)
privateKeysRoot <- maybeReadKeysAt "root"
privateKeysTarget <- maybeReadKeysAt "target"
privateKeysSnapshot <- maybeReadKeysAt "snapshot"
privateKeysTimestamp <- maybeReadKeysAt "timestamp"
privateKeysMirrors <- maybeReadKeysAt "mirrors"
-- FIXME too much rework?
-- this action only depends on the tarball and the package metadata
let PackageIdentifier {pkgName, pkgVersion} = pkgId
let srcDir = cacheDir </> unPackageName pkgName </> prettyShow pkgVersion
liftIO $
writeSignedJSON outputDirRoot repoLayoutMirrors privateKeysMirrors $
Mirrors
{ mirrorsVersion = FileVersion 1,
mirrorsExpires = FileExpires expiryTime,
mirrorsMirrors = []
}
-- delete everything inside the package source tree
liftIO $ do
-- FIXME this should only delete inside srcDir but apparently
-- also deletes srcDir itself
removeFiles srcDir ["//*"]
IO.createDirectoryIfMissing True srcDir
case packageVersionSource of
TarballSource url mSubdir -> do
tarballPath <- remoteAssetNeed url
withTempDir $ \tmpDir -> do
cmd_ ["tar", "xzf", tarballPath, "-C", tmpDir]
-- Special treatment of top-level directory: we remove it
--
-- Note: Don't let shake look into tmpDir! it will cause
-- unnecessary rework because tmpDir is always new
ls <- liftIO $ IO.getDirectoryContents tmpDir
let ls' = filter (not . all (== '.')) ls
let fix1 = case ls' of [l] -> (</> l); _ -> id
fix2 = case mSubdir of Just s -> (</> s); _ -> id
tdir = fix2 $ fix1 tmpDir
cmd_ ["cp", "--recursive", "--no-target-directory", "--dereference", tdir, srcDir]
--
-- This is almost identical to the above but we get to keep the
-- metadata.
--
GitHubSource repo rev mSubdir -> do
let url =
nullURI
{ uriScheme = "https:",
uriAuthority = Just nullURIAuth {uriRegName = "github.com"},
uriPath = "/" </> T.unpack (unGitHubRepo repo) </> "tarball" </> T.unpack (unGitHubRev rev)
}
tarballPath <- remoteAssetNeed url
withTempDir $ \tmpDir -> do
cmd_ ["tar", "xzf", tarballPath, "-C", tmpDir]
-- Special treatment of top-level directory: we remove it
--
-- Note: Don't let shake look into tmpDir! it will cause
-- unnecessary rework because tmpDir is always new
ls <- liftIO $ IO.getDirectoryContents tmpDir
let ls' = filter (not . all (== '.')) ls
let fix1 = case ls' of [l] -> (</> l); _ -> id
fix2 = case mSubdir of Just s -> (</> s); _ -> id
tdir = fix2 $ fix1 tmpDir
cmd_ ["cp", "--recursive", "--no-target-directory", "--dereference", tdir, srcDir]
applyPatches inputDir srcDir pkgId
when packageVersionForce $
forcePackageVersion srcDir pkgId
return srcDir
getPackageDescription <- addOracleCache $ \(GetPackageDescription pkgId) -> do
let PackageIdentifier {pkgName, pkgVersion} = pkgId
meta <- getPackageVersionMeta $ GetPackageVersionMeta pkgId
case latestRevisionNumber meta of
Nothing -> do
srcDir <- preparePackageSource $ PreparePackageSource pkgId
return $ srcDir </> unPackageName pkgName <.> "cabal"
Just revNum -> do
return $ inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "revisions" </> show revNum <.> "cabal"
getPackages <- addOracleCache $ \GetPackages -> do
metaFiles <- getDirectoryFiles inputDir ["*/*/meta.toml"]
when (null metaFiles) $ do
putError $
unlines
[ "We could not find any package metadata file (i.e. _sources/<name>/<version>/meta.toml)",
"Make sure you are passing the right input directory. The default input directory is _sources"
]
fail "no package metadata found"
return $
[ PackageIdentifier name version
| path <- metaFiles,
let [pkgName, pkgVersion, _] = splitDirectories path,
let Just name = simpleParsec pkgName,
let Just version = simpleParsec pkgVersion
]
--
-- Entrypoint
--
-- This triggers the whole chain of TUF metadata
want [outputDir </> "timestamp.json"]
-- This build the current index entry for all packages
action $ do
pkgIds <- getPackages GetPackages
need
[ outputDir </> "index" </> unPackageName pkgName </> prettyShow pkgVersion </> unPackageName pkgName <.> "cabal"
| PackageIdentifier pkgName pkgVersion <- pkgIds
]
--
-- timestamp.json
--
outputDir </> "timestamp.json" %> \path -> do
snapshotInfo <- computeFileInfoSimple' (outputDir </> "snapshot.json")
expires <- getExpiryTime GetExpiryTime
let timestamp =
Timestamp
{ timestampVersion = FileVersion 1,
timestampExpires = FileExpires expires,
timestampInfoSnapshot = snapshotInfo
}
keys <- maybeReadKeysAt "timestamp"
let timestampSigned = withSignatures hackageRepoLayout keys timestamp
traced "writing" $
liftIO $ do
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p timestampSigned
--
-- snapshot.json
--
outputDir </> "snapshot.json" %> \path -> do
rootInfo <- computeFileInfoSimple' (outputDir </> "root.json")
mirrorsInfo <- computeFileInfoSimple' (outputDir </> "mirrors.json")
tarInfo <- computeFileInfoSimple' (outputDir </> "01-index.tar")
tarGzInfo <- computeFileInfoSimple' (outputDir </> "01-index.tar.gz")
expires <- getExpiryTime GetExpiryTime
let snapshot =
Snapshot
{ snapshotVersion = FileVersion 1,
snapshotExpires = FileExpires expires,
snapshotInfoRoot = rootInfo,
snapshotInfoMirrors = mirrorsInfo,
snapshotInfoTar = Just tarInfo,
snapshotInfoTarGz = tarGzInfo
}
keys <- maybeReadKeysAt "snapshot"
let snapshotSigned = withSignatures hackageRepoLayout keys snapshot
traced "writing" $
liftIO $ do
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p snapshotSigned
--
-- root.json
--
outputDir </> "root.json" %> \path -> do
expires <- getExpiryTime GetExpiryTime
privateKeysRoot <- maybeReadKeysAt "root"
privateKeysTarget <- maybeReadKeysAt "target"
privateKeysSnapshot <- maybeReadKeysAt "snapshot"
privateKeysTimestamp <- maybeReadKeysAt "timestamp"
privateKeysMirrors <- maybeReadKeysAt "mirrors"
let root =
Root
{ rootVersion = FileVersion 1,
rootExpires = FileExpires expires,
rootKeys =
fromKeys $
concat
[ privateKeysRoot,
privateKeysTarget,
privateKeysSnapshot,
privateKeysTimestamp,
privateKeysMirrors
],
rootRoles =
RootRoles
{ rootRolesRoot =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysRoot,
roleSpecThreshold = KeyThreshold 2
},
rootRolesSnapshot =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysSnapshot,
roleSpecThreshold = KeyThreshold 1
},
rootRolesTargets =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysTarget,
roleSpecThreshold = KeyThreshold 1
},
rootRolesTimestamp =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysTimestamp,
roleSpecThreshold = KeyThreshold 1
},
rootRolesMirrors =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysMirrors,
roleSpecThreshold = KeyThreshold 1
}
liftIO $
writeSignedJSON outputDirRoot repoLayoutRoot privateKeysRoot $
Root
{ rootVersion = FileVersion 1,
rootExpires = FileExpires expiryTime,
rootKeys =
fromKeys $
concat
[ privateKeysRoot,
privateKeysTarget,
privateKeysSnapshot,
privateKeysTimestamp,
privateKeysMirrors
],
rootRoles =
RootRoles
{ rootRolesRoot =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysRoot,
roleSpecThreshold = KeyThreshold 2
},
rootRolesSnapshot =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysSnapshot,
roleSpecThreshold = KeyThreshold 1
},
rootRolesTargets =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysTarget,
roleSpecThreshold = KeyThreshold 1
},
rootRolesTimestamp =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysTimestamp,
roleSpecThreshold = KeyThreshold 1
},
rootRolesMirrors =
RoleSpec
{ roleSpecKeys = map somePublicKey privateKeysMirrors,
roleSpecThreshold = KeyThreshold 1
}
}
}
keys <- maybeReadKeysAt "root"
let signedRoot = withSignatures hackageRepoLayout keys root
traced "writing" $
liftIO $ do
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p signedRoot
rootInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutRoot)
mirrorsInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutMirrors)
tarInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutIndexTar)
tarGzInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutIndexTarGz)
--
-- mirrors.json
--
liftIO $
writeSignedJSON outputDirRoot repoLayoutSnapshot privateKeysSnapshot $
Snapshot
{ snapshotVersion = FileVersion 1,
snapshotExpires = FileExpires expiryTime,
snapshotInfoRoot = rootInfo,
snapshotInfoMirrors = mirrorsInfo,
snapshotInfoTar = Just tarInfo,
snapshotInfoTarGz = tarGzInfo
}
outputDir </> "mirrors.json" %> \path -> do
expires <- getExpiryTime GetExpiryTime
let mirrors =
Mirrors
{ mirrorsVersion = FileVersion 1,
mirrorsExpires = FileExpires expires,
mirrorsMirrors = []
}
snapshotInfo <- computeFileInfoSimple' (anchorPath outputDirRoot repoLayoutSnapshot)
liftIO $
writeSignedJSON outputDirRoot repoLayoutTimestamp privateKeysTimestamp $
Timestamp
{ timestampVersion = FileVersion 1,
timestampExpires = FileExpires expiryTime,
timestampInfoSnapshot = snapshotInfo
}
keys <- maybeReadKeysAt "mirrors"
let signedMirrors = withSignatures hackageRepoLayout keys mirrors
traced "writing" $
liftIO $ do
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p signedMirrors
getPackages :: FilePath -> Action [(PackageIdentifier, PackageVersionMeta)]
getPackages inputDir = do
metaFiles <- getDirectoryFiles inputDir ["*/*/meta.toml"]
--
-- 01-index.tar
--
when (null metaFiles) $ do
putError $
unlines
[ "We could not find any package metadata file (i.e. _sources/<name>/<version>/meta.toml)",
"Make sure you are passing the right input directory. The default input directory is _sources"
]
fail "no package metadata found"
outputDir </> "01-index.tar" %> \path -> do
pkgIds <- getPackages GetPackages
for metaFiles $ \metaFile -> do
let [pkgName, pkgVersion, _] = splitDirectories metaFile
let Just name = simpleParsec pkgName
let Just version = simpleParsec pkgVersion
let pkgId = PackageIdentifier name version
entries <-
flip foldMap pkgIds $ \pkgId -> do
let PackageIdentifier {pkgName, pkgVersion} = pkgId
PackageVersionMeta {packageVersionTimestamp, packageVersionRevisions} <- getPackageVersionMeta (GetPackageVersionMeta pkgId)
meta <-
readPackageVersionMeta' (inputDir </> metaFile) >>= \case
PackageVersionMeta {packageVersionRevisions, packageVersionTimestamp = Nothing}
| not (null packageVersionRevisions) -> do
putError $
inputDir </> metaFile <> " has cabal file revisions but the original package has no timestamp. This combination doesn't make sense. Either add a timestamp on the original package or remove the revisions"
fail "invalid package metadata"
PackageVersionMeta {packageVersionRevisions, packageVersionTimestamp = Just pkgTs}
| any ((< pkgTs) . revisionTimestamp) packageVersionRevisions -> do
putError $
inputDir </> metaFile <> " has a revision with timestamp earlier than the package itself. Adjust the timestamps so that all revisions come after the original package"
fail "invalid package metadata"
meta ->
return meta
return (pkgId, meta)
let name = unPackageName pkgName
let version = prettyShow pkgVersion
prepareIndexPkgCabal :: PackageId -> UTCTime -> FilePath -> Action Tar.Entry
prepareIndexPkgCabal pkgId timestamp filePath = do
need [filePath]
contents <- liftIO $ BSL.readFile filePath
return $ mkTarEntry contents (IndexPkgCabal pkgId) timestamp
srcDir <- preparePackageSource $ PreparePackageSource pkgId
now <- getCurrentTime GetCurrentTime
prepareIndexPkgMetadata :: UTCTime -> Maybe UTCTime -> [Some Key] -> PackageIdentifier -> PackageVersionMeta -> Action Tar.Entry
prepareIndexPkgMetadata currentTime expiryTime keys pkgId pkgMeta = do
let PackageVersionMeta {packageVersionTimestamp} = pkgMeta
sdist <- prepareSdist pkgId pkgMeta
targetFileInfo <- computeFileInfoSimple' sdist
let packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId
let targets =
Targets
{ targetsVersion = FileVersion 1,
targetsExpires = FileExpires expiryTime,
targetsTargets = fromList [(TargetPathRepo packagePath, targetFileInfo)],
targetsDelegations = Nothing
}
-- original cabal file
cabalEntry <-
mkTarEntry
(srcDir </> name <.> "cabal")
(name </> version </> name <.> "cabal")
(fromMaybe now packageVersionTimestamp)
return $
mkTarEntry
(renderSignedJSON keys targets)
(IndexPkgMetadata pkgId)
(fromMaybe currentTime packageVersionTimestamp)
-- package.json
packageEntry <-
mkTarEntry
(outputDir </> "index" </> name </> version </> "package.json")
(name </> version </> "package.json")
(fromMaybe now packageVersionTimestamp)
mkTarEntry :: BSL.ByteString -> IndexFile dec -> UTCTime -> Tar.Entry
mkTarEntry contents indexFile timestamp =
(Tar.fileEntry tarPath contents)
{ Tar.entryTime = floor $ Time.utcTimeToPOSIXSeconds timestamp,
Tar.entryOwnership =
Tar.Ownership
{ Tar.ownerName = "foliage",
Tar.groupName = "foliage",
Tar.ownerId = 0,
Tar.groupId = 0
}
}
where
indexPath = toFilePath $ castRoot $ indexFileToPath hackageIndexLayout indexFile
Right tarPath = Tar.toTarPath False indexPath
-- revised cabal files
revisionEntries <- for packageVersionRevisions $ \RevisionMeta {revisionNumber, revisionTimestamp} ->
mkTarEntry
(inputDir </> name </> version </> "revisions" </> show revisionNumber <.> "cabal")
(name </> version </> name <.> "cabal")
revisionTimestamp
anchorPath :: Path Absolute -> (RepoLayout -> RepoPath) -> FilePath
anchorPath outputDirRoot p =
toFilePath $ anchorRepoPathLocally outputDirRoot $ p hackageRepoLayout
return $ cabalEntry : packageEntry : revisionEntries
traced "writing" $ liftIO $ BSL.writeFile path $ Tar.write $ sortOn Tar.entryTime entries
--
-- 01-index.tar.gz
--
outputDir </> "01-index.tar.gz" %> \path -> do
tar <- readFileByteStringLazy (outputDir </> "01-index.tar")
traced "writing" $ liftIO $ BSL.writeFile path $ GZip.compress tar
--
-- index cabal files
--
-- these come either from the package source or the revision files
--
outputDir </> "index/*/*/*.cabal" %> \path -> do
let [_, _, pkgName, pkgVersion, _] = splitDirectories (drop (length outputDir) path)
let Just name = simpleParsec pkgName
let Just version = simpleParsec pkgVersion
let pkgId = PackageIdentifier name version
cabalFile <- getPackageDescription (GetPackageDescription pkgId)
copyFileChanged cabalFile path
--
-- index package files (only depends on the source distribution)
--
outputDir </> "index/*/*/package.json" %> \path -> do
let [_, _, pkgName, pkgVersion, _] = splitDirectories (drop (length outputDir) path)
let packagePath = "package" </> pkgName <> "-" <> pkgVersion <.> "tar.gz"
let targetPath = rootPath $ fromUnrootedFilePath packagePath
targetFileInfo <- computeFileInfoSimple' (outputDir </> packagePath)
expires <- getExpiryTime GetExpiryTime
let targets =
Targets
{ targetsVersion = FileVersion 1,
targetsExpires = FileExpires expires,
targetsTargets = fromList [(TargetPathRepo targetPath, targetFileInfo)],
targetsDelegations = Nothing
}
keys <- maybeReadKeysAt "target"
let signedTargets = withSignatures hackageRepoLayout keys targets
liftIO $ do
p <- makeAbsolute (fromFilePath path)
writeJSON hackageRepoLayout p signedTargets
--
-- source distributions, including patching
--
outputDir </> "package/*.tar.gz" %> \path -> do
let [_, _, filename] = splitDirectories (drop (length outputDir) path)
let Just pkgId = stripExtension "tar.gz" filename >>= simpleParsec
cabalFile <- getPackageDescription $ GetPackageDescription pkgId
srcDir <- preparePackageSource $ PreparePackageSource pkgId
traced "cabal sdist" $ do
gpd <- readGenericPackageDescription Verbosity.normal cabalFile
packageDirToSdist Verbosity.normal gpd srcDir >>= BSL.writeFile path
putStrLn $ "All done. The repository is now available in " <> outputDir <> "."
mkTarEntry :: FilePath -> [Char] -> UTCTime -> Action Tar.Entry
mkTarEntry filePath indexPath timestamp = do
let Right tarPath = Tar.toTarPath False indexPath
contents <- readFileByteStringLazy filePath
return
(Tar.fileEntry tarPath contents)
{ Tar.entryTime = floor $ Time.utcTimeToPOSIXSeconds timestamp,
Tar.entryOwnership =
Tar.Ownership
{ Tar.ownerName = "foliage",
Tar.groupName = "foliage",
Tar.ownerId = 0,
Tar.groupId = 0
}
}
applyPatches :: [Char] -> FilePath -> PackageId -> Action ()
applyPatches inputDir srcDir PackageIdentifier {pkgName, pkgVersion} = do
let patchesDir = inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "patches"
hasPatches <- doesDirectoryExist patchesDir
when hasPatches $ do
patchfiles <- getDirectoryFiles patchesDir ["*.patch"]
for_ patchfiles $ \patchfile -> do
let patch = patchesDir </> patchfile
cmd_ Shell (Cwd srcDir) (FileStdin patch) "patch -p1"
forcePackageVersion :: FilePath -> PackageId -> Action ()
forcePackageVersion srcDir PackageIdentifier {pkgName, pkgVersion} = do
let cabalFilePath = srcDir </> unPackageName pkgName <.> "cabal"
liftIO $ rewritePackageVersion cabalFilePath pkgVersion
cabalFileRevisionPath :: FilePath -> PackageIdentifier -> Int -> FilePath
cabalFileRevisionPath inputDir PackageIdentifier {pkgName, pkgVersion} revisionNumber =
inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "revisions" </> show revisionNumber <.> "cabal"

View File

@ -11,11 +11,11 @@ module Foliage.HackageSecurity
where
import Control.Monad (replicateM_)
import Data.Functor.Identity
import Data.ByteString.Lazy qualified as BSL
import Hackage.Security.Key.Env (fromKeys)
import Hackage.Security.Server
import Hackage.Security.TUF.FileMap
import Hackage.Security.Util.Path (fromFilePath, fromUnrootedFilePath, makeAbsolute, rootPath)
import Hackage.Security.Util.Path (Absolute, Path, fromFilePath, fromUnrootedFilePath, makeAbsolute, rootPath, writeLazyByteString)
import Hackage.Security.Util.Some
import System.Directory (createDirectoryIfMissing)
import System.FilePath
@ -25,11 +25,6 @@ readJSONSimple fp = do
p <- makeAbsolute (fromFilePath fp)
readJSON_NoKeys_NoLayout p
writeJSONSimple :: ToJSON Identity a => FilePath -> a -> IO ()
writeJSONSimple fp a = do
p <- makeAbsolute (fromFilePath fp)
writeJSON_NoLayout p a
computeFileInfoSimple :: FilePath -> IO FileInfo
computeFileInfoSimple fp = do
p <- makeAbsolute (fromFilePath fp)
@ -56,3 +51,15 @@ writeKey :: FilePath -> Some Key -> IO ()
writeKey fp key = do
p <- makeAbsolute (fromFilePath fp)
writeJSON_NoLayout p key
renderSignedJSON :: ToJSON WriteJSON a => [Some Key] -> a -> BSL.ByteString
renderSignedJSON keys thing =
renderJSON
hackageRepoLayout
(withSignatures hackageRepoLayout keys thing)
writeSignedJSON :: ToJSON WriteJSON a => Path Absolute -> (RepoLayout -> RepoPath) -> [Some Key] -> a -> IO ()
writeSignedJSON outputDirRoot repoPath keys thing = do
writeLazyByteString fp $ renderSignedJSON keys thing
where
fp = anchorRepoPathLocally outputDirRoot $ repoPath hackageRepoLayout

View File

@ -0,0 +1,55 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeApplications #-}
module Foliage.PrepareSdist
( prepareSdist,
addPrepareSdistRule,
)
where
import Data.ByteString qualified as BS
import Data.ByteString.Lazy qualified as BSL
import Development.Shake
import Development.Shake.FilePath (takeDirectory, (<.>), (</>))
import Development.Shake.Rule
import Distribution.Client.HashValue (readFileHashValue)
import Distribution.Client.SrcDist (packageDirToSdist)
import Distribution.Compat.Binary (encode)
import Distribution.Simple.PackageDescription (readGenericPackageDescription)
import Distribution.Types.PackageId
import Distribution.Types.PackageName
import Distribution.Verbosity qualified as Verbosity
import Foliage.HackageSecurity
import Foliage.Meta
import Foliage.PrepareSource (prepareSource)
import Foliage.Shake
import Hackage.Security.Util.Path (toFilePath)
import System.Directory qualified as IO
prepareSdist :: PackageId -> PackageVersionMeta -> Action FilePath
prepareSdist pkgId pkgMeta = apply1 $ PackageRule @"prepareSdist" pkgId pkgMeta
addPrepareSdistRule :: Path Absolute -> Rules ()
addPrepareSdistRule outputDirRoot = addBuiltinRule noLint noIdentity run
where
run :: PackageRule "prepareSdist" FilePath -> Maybe BS.ByteString -> RunMode -> Action (RunResult FilePath)
run (PackageRule pkgId _pkgMeta) (Just old) RunDependenciesSame =
let packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId
path = toFilePath $ anchorRepoPathLocally outputDirRoot packagePath
in return $ RunResult ChangedNothing old path
run (PackageRule pkgId pkgMeta) old _ = do
let packagePath = repoLayoutPkgTarGz hackageRepoLayout pkgId
path = toFilePath $ anchorRepoPathLocally outputDirRoot packagePath
srcDir <- prepareSource pkgId pkgMeta
let PackageIdentifier {pkgName} = pkgId
traced "cabal sdist" $ do
let cabalFilePath = srcDir </> unPackageName pkgName <.> "cabal"
gpd <- readGenericPackageDescription Verbosity.normal cabalFilePath
IO.createDirectoryIfMissing True (takeDirectory path)
packageDirToSdist Verbosity.normal gpd srcDir
>>= BSL.writeFile path
hash <- BSL.toStrict . encode <$> readFileHashValue path
return $
if old == Just hash
then RunResult ChangedRecomputeSame hash path
else RunResult ChangedRecomputeDiff hash path

View File

@ -0,0 +1,114 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
module Foliage.PrepareSource where
import Control.Monad (when)
import Data.ByteString qualified as BS
import Data.Foldable (for_)
import Data.Text qualified as T
import Development.Shake
import Development.Shake.Rule
import Distribution.Pretty (prettyShow)
import Distribution.Types.PackageId
import Distribution.Types.PackageName (unPackageName)
import Foliage.Meta
import Foliage.RemoteAsset (fetchRemoteAsset)
import Foliage.Shake (PackageRule (PackageRule))
import Foliage.UpdateCabalFile (rewritePackageVersion)
import Network.URI (URI (..), URIAuth (..), nullURI, nullURIAuth)
import System.Directory qualified as IO
import System.FilePath ((<.>), (</>))
prepareSource :: PackageId -> PackageVersionMeta -> Action FilePath
prepareSource pkgId pkgMeta = apply1 $ PackageRule @"prepareSource" pkgId pkgMeta
addPrepareSourceRule :: FilePath -> FilePath -> Rules ()
addPrepareSourceRule inputDir cacheDir = addBuiltinRule noLint noIdentity run
where
run :: BuiltinRun (PackageRule "prepareSource" FilePath) FilePath
run (PackageRule pkgId pkgMeta) _old mode = do
let PackageIdentifier {pkgName, pkgVersion} = pkgId
let PackageVersionMeta {packageVersionSource, packageVersionForce} = pkgMeta
let srcDir = cacheDir </> unPackageName pkgName </> prettyShow pkgVersion
case mode of
RunDependenciesSame ->
return $ RunResult ChangedNothing BS.empty srcDir
RunDependenciesChanged -> do
-- FIXME too much rework?
-- this action only depends on the tarball and the package metadata
-- delete everything inside the package source tree
liftIO $ do
-- FIXME this should only delete inside srcDir but apparently
-- also deletes srcDir itself
removeFiles srcDir ["//*"]
IO.createDirectoryIfMissing True srcDir
case packageVersionSource of
TarballSource url mSubdir -> do
tarballPath <- fetchRemoteAsset url
withTempDir $ \tmpDir -> do
cmd_ "tar xzf" [tarballPath] "-C" [tmpDir]
-- Special treatment of top-level directory: we remove it
--
-- Note: Don't let shake look into tmpDir! it will cause
-- unnecessary rework because tmpDir is always new
ls <- liftIO $ IO.getDirectoryContents tmpDir
let ls' = filter (not . all (== '.')) ls
let fix1 = case ls' of [l] -> (</> l); _ -> id
fix2 = case mSubdir of Just s -> (</> s); _ -> id
tdir = fix2 $ fix1 tmpDir
cmd_ "cp --recursive --no-target-directory --dereference" [tdir, srcDir]
--
-- This is almost identical to the above but we get to keep the
-- metadata.
--
GitHubSource repo rev mSubdir -> do
let url =
nullURI
{ uriScheme = "https:",
uriAuthority = Just nullURIAuth {uriRegName = "github.com"},
uriPath = "/" </> T.unpack (unGitHubRepo repo) </> "tarball" </> T.unpack (unGitHubRev rev)
}
tarballPath <- fetchRemoteAsset url
withTempDir $ \tmpDir -> do
cmd_ ["tar", "xzf", tarballPath, "-C", tmpDir]
-- Special treatment of top-level directory: we remove it
--
-- Note: Don't let shake look into tmpDir! it will cause
-- unnecessary rework because tmpDir is always new
ls <- liftIO $ IO.getDirectoryContents tmpDir
let ls' = filter (not . all (== '.')) ls
let fix1 = case ls' of [l] -> (</> l); _ -> id
fix2 = case mSubdir of Just s -> (</> s); _ -> id
tdir = fix2 $ fix1 tmpDir
cmd_ ["cp", "--recursive", "--no-target-directory", "--dereference", tdir, srcDir]
let patchesDir = inputDir </> unPackageName pkgName </> prettyShow pkgVersion </> "patches"
hasPatches <- doesDirectoryExist patchesDir
when hasPatches $ do
patchfiles <- getDirectoryFiles patchesDir ["*.patch"]
for_ patchfiles $ \patchfile -> do
let patch = patchesDir </> patchfile
cmd_ Shell (Cwd srcDir) (FileStdin patch) "patch -p1"
when packageVersionForce $ do
let cabalFilePath = srcDir </> unPackageName pkgName <.> "cabal"
putInfo $ "Updating version in cabal file" ++ cabalFilePath
liftIO $ rewritePackageVersion cabalFilePath pkgVersion
return $ RunResult ChangedRecomputeDiff BS.empty srcDir

View File

@ -2,13 +2,12 @@
{-# LANGUAGE TypeFamilies #-}
module Foliage.RemoteAsset
( remoteAssetNeed,
remoteAssetRule,
addBuiltinRemoteAssetRule,
( fetchRemoteAsset,
addFetchRemoteAssetRule,
)
where
import Control.Monad (unless)
import Control.Monad
import Data.ByteString qualified as BS
import Data.Char (isAlpha)
import Data.List (dropWhileEnd)
@ -17,7 +16,7 @@ import Development.Shake
import Development.Shake.Classes
import Development.Shake.FilePath
import Development.Shake.Rule
import Network.URI (URI (uriAuthority, uriFragment, uriQuery, uriScheme), URIAuth (uriRegName), pathSegments)
import Network.URI (URI (..), URIAuth (..), pathSegments)
import Network.URI.Orphans ()
import System.Directory (createDirectoryIfMissing)
@ -27,16 +26,11 @@ newtype RemoteAsset = RemoteAsset URI
type instance RuleResult RemoteAsset = FilePath
data RemoteAssetRule = RemoteAssetRule RemoteAsset (Action FilePath)
fetchRemoteAsset :: URI -> Action FilePath
fetchRemoteAsset = apply1 . RemoteAsset
remoteAssetRule :: URI -> Action FilePath -> Rules ()
remoteAssetRule url act = addUserRule $ RemoteAssetRule (RemoteAsset url) act
remoteAssetNeed :: URI -> Action FilePath
remoteAssetNeed = apply1 . RemoteAsset
addBuiltinRemoteAssetRule :: FilePath -> Rules ()
addBuiltinRemoteAssetRule cacheDir = addBuiltinRule noLint noIdentity run
addFetchRemoteAssetRule :: FilePath -> Rules ()
addFetchRemoteAssetRule cacheDir = addBuiltinRule noLint noIdentity run
where
run :: BuiltinRun RemoteAsset FilePath
run (RemoteAsset uri) old _mode = do

View File

@ -1,26 +1,33 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Foliage.Shake
( computeFileInfoSimple',
readFileByteStringLazy,
readKeysAt,
readPackageVersionMeta',
PackageRule (PackageRule),
)
where
import Data.ByteString.Lazy qualified as BSL
import Data.Traversable (for)
import Development.Shake
import Development.Shake.Classes
import Development.Shake.FilePath
import Distribution.Package (PackageId)
import Foliage.HackageSecurity
import Foliage.Meta
import GHC.Generics (Generic)
import GHC.TypeLits (Symbol)
computeFileInfoSimple' :: FilePath -> Action FileInfo
computeFileInfoSimple' fp = do
need [fp]
liftIO $ computeFileInfoSimple fp
readFileByteStringLazy :: FilePath -> Action BSL.ByteString
readFileByteStringLazy x = need [x] >> liftIO (BSL.readFile x)
readKeysAt :: FilePath -> Action [Some Key]
readKeysAt base = do
paths <- getDirectoryFiles base ["*.json"]
@ -33,3 +40,9 @@ readPackageVersionMeta' :: FilePath -> Action PackageVersionMeta
readPackageVersionMeta' fp = do
need [fp]
liftIO $ readPackageVersionMeta fp
data PackageRule (tag :: Symbol) a = PackageRule PackageId PackageVersionMeta
deriving (Show, Eq, Generic)
deriving (Hashable, Binary, NFData)
type instance RuleResult (PackageRule tag a) = a

View File

@ -1,66 +0,0 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE TypeFamilies #-}
module Foliage.Shake.Oracle
( UTCTime,
GetCurrentTime (..),
GetExpiryTime (..),
GetPackageDescription (..),
GetPackages (..),
GetPackageVersionMeta (..),
GetSignOptions (..),
PreparePackageSource (..),
)
where
import Data.Time.Compat ()
import Development.Shake (RuleResult)
import Development.Shake.Classes (Binary, Hashable, NFData)
import Distribution.Types.PackageId (PackageId)
import Foliage.Meta
import Foliage.Options (SignOptions)
import GHC.Generics (Generic)
data GetCurrentTime = GetCurrentTime
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetCurrentTime = UTCTime
data GetExpiryTime = GetExpiryTime
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetExpiryTime = Maybe UTCTime
data GetSignOptions = GetSignOptions
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetSignOptions = SignOptions
data GetPackages = GetPackages
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetPackages = [PackageId]
newtype GetPackageVersionMeta = GetPackageVersionMeta PackageId
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetPackageVersionMeta = PackageVersionMeta
newtype PreparePackageSource = PreparePackageSource PackageId
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult PreparePackageSource = FilePath
newtype GetPackageDescription = GetPackageDescription PackageId
deriving (Show, Eq, Generic)
deriving anyclass (Binary, Hashable, NFData)
type instance RuleResult GetPackageDescription = FilePath

View File

@ -1,2 +1,3 @@
packages: .
index-state: 2022-08-29T00:00:00Z
with-compiler: ghc-8.10.7

View File

@ -21,6 +21,7 @@
};
shell.buildInputs = with pkgs; [
nixpkgs-fmt
fsatrace
];
modules = [{
packages.foliage.components.exes.foliage.dontStrip = false;
@ -28,9 +29,11 @@
};
in {
packages.default = project.foliage.components.exes.foliage;
devShell = pkgs.mkShell {
name = "foliage-dev-shell";
buildInputs = with pkgs; [
fsatrace
];
};
});

View File

@ -22,9 +22,10 @@ executable foliage
Foliage.HackageSecurity
Foliage.Meta
Foliage.Options
Foliage.PrepareSource
Foliage.PrepareSdist
Foliage.RemoteAsset
Foliage.Shake
Foliage.Shake.Oracle
Foliage.Time
Foliage.UpdateCabalFile
Network.URI.Orphans