diff --git a/default.nix b/default.nix index d8e11dd..0d7b915 100644 --- a/default.nix +++ b/default.nix @@ -4,7 +4,7 @@ returnShellEnv ? pkgs.lib.inNixShell let - compiler = pkgs.haskell.packages.ghc882; + compiler = pkgs.haskell.packages.ghc883; inherit (pkgs.haskell.lib) dontCheck doJailbreak overrideCabal; diff --git a/nix/sources.json b/nix/sources.json index 1030d24..4506162 100644 --- a/nix/sources.json +++ b/nix/sources.json @@ -5,10 +5,10 @@ "homepage": "https://github.com/nmattia/niv", "owner": "nmattia", "repo": "niv", - "rev": "98c74a80934123cb4c3bf3314567f67311eb711a", - "sha256": "1w8n54hapd4x9f1am33icvngkqns7m3hl9yair38yqq08ffwg0kn", + "rev": "f73bf8d584148677b01859677a63191c31911eae", + "sha256": "0jlmrx633jvqrqlyhlzpvdrnim128gc81q5psz2lpp2af8p8q9qs", "type": "tarball", - "url": "https://github.com/nmattia/niv/archive/98c74a80934123cb4c3bf3314567f67311eb711a.tar.gz", + "url": "https://github.com/nmattia/niv/archive/f73bf8d584148677b01859677a63191c31911eae.tar.gz", "url_template": "https://github.com///archive/.tar.gz" }, "nixpkgs": { @@ -17,10 +17,10 @@ "homepage": null, "owner": "nixos", "repo": "nixpkgs", - "rev": "0bfd0187dafe3f597355e6be16b7b9a5f4b90376", - "sha256": "1ydpmvfshkaxr005imhkf8h5ihsb2l97ycyl6fmyanqjdw149wgl", + "rev": "78bfdbb291fd20df0f0f65061ee3081610b0a48f", + "sha256": "0qy72dm799vrmcmb72zcxkj2rrcgqgsj0z58f9gl069p9aag2z3a", "type": "tarball", - "url": "https://github.com/nixos/nixpkgs/archive/0bfd0187dafe3f597355e6be16b7b9a5f4b90376.tar.gz", + "url": "https://github.com/nixos/nixpkgs/archive/78bfdbb291fd20df0f0f65061ee3081610b0a48f.tar.gz", "url_template": "https://github.com///archive/.tar.gz" } } diff --git a/nix/sources.nix b/nix/sources.nix index 718ea6f..8a725cb 100644 --- a/nix/sources.nix +++ b/nix/sources.nix @@ -19,7 +19,7 @@ let pkgs.fetchzip { inherit (spec) url sha256; }; fetch_git = spec: - builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; }; + builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; }; fetch_builtin-tarball = spec: builtins.trace @@ -49,26 +49,22 @@ let # The set of packages used when specs are fetched using non-builtins. mkPkgs = sources: - if hasNixpkgsPath - then - if hasThisAsNixpkgsPath - then import (builtins_fetchTarball { inherit (mkNixpkgs sources) url sha256; }) {} - else import {} - else - import (builtins_fetchTarball { inherit (mkNixpkgs sources) url sha256; }) {}; - - mkNixpkgs = sources: - if builtins.hasAttr "nixpkgs" sources - then sources.nixpkgs - else abort - '' - Please specify either (through -I or NIX_PATH=nixpkgs=...) or - add a package called "nixpkgs" to your sources.json. - ''; - - hasNixpkgsPath = (builtins.tryEval ).success; - hasThisAsNixpkgsPath = - (builtins.tryEval ).success && == ./.; + let + sourcesNixpkgs = + import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {}; + hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath; + hasThisAsNixpkgsPath = == ./.; + in + if builtins.hasAttr "nixpkgs" sources + then sourcesNixpkgs + else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then + import {} + else + abort + '' + Please specify either (through -I or NIX_PATH=nixpkgs=...) or + add a package called "nixpkgs" to your sources.json. + ''; # The actual fetching function. fetch = pkgs: name: spec: @@ -125,12 +121,14 @@ let # The "config" used by the fetchers mkConfig = { sourcesFile ? ./sources.json + , sources ? builtins.fromJSON (builtins.readFile sourcesFile) + , pkgs ? mkPkgs sources }: rec { # The sources, i.e. the attribute set of spec name to spec - sources = builtins.fromJSON (builtins.readFile sourcesFile); + inherit sources; + # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers - pkgs = mkPkgs sources; + inherit pkgs; }; in -mkSources (mkConfig {}) // - { __functor = _: settings: mkSources (mkConfig settings); } +mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); } diff --git a/nixpkgs-update.nix b/nixpkgs-update.nix index c980707..087256e 100644 --- a/nixpkgs-update.nix +++ b/nixpkgs-update.nix @@ -3,7 +3,7 @@ , hpack, hspec, hspec-discover, http-client-tls, http-conduit , iso8601-time, lifted-base, mtl, neat-interpolation , optparse-applicative, parsec, parsers, partial-order, polysemy -, raw-strings-qq, regex-applicative-text, servant, servant-client +, polysemy-plugin, regex-applicative-text, servant, servant-client , sqlite-simple, stdenv, template-haskell, temporary, text, time , transformers, typed-process, unix, unordered-containers, vector , versions, xdg-basedir, zlib @@ -19,9 +19,10 @@ mkDerivation { directory errors filepath github http-client-tls http-conduit iso8601-time lifted-base mtl neat-interpolation optparse-applicative parsec parsers partial-order polysemy - regex-applicative-text servant servant-client sqlite-simple - template-haskell temporary text time transformers typed-process - unix unordered-containers vector versions xdg-basedir zlib + polysemy-plugin regex-applicative-text servant servant-client + sqlite-simple template-haskell temporary text time transformers + typed-process unix unordered-containers vector versions xdg-basedir + zlib ]; libraryToolDepends = [ hpack ]; executableHaskellDepends = [ @@ -29,16 +30,17 @@ mkDerivation { directory errors filepath github http-client-tls http-conduit iso8601-time lifted-base mtl neat-interpolation optparse-applicative parsec parsers partial-order polysemy - regex-applicative-text servant servant-client sqlite-simple - template-haskell temporary text time transformers typed-process - unix unordered-containers vector versions xdg-basedir zlib + polysemy-plugin regex-applicative-text servant servant-client + sqlite-simple template-haskell temporary text time transformers + typed-process unix unordered-containers vector versions xdg-basedir + zlib ]; testHaskellDepends = [ aeson base bytestring conduit containers cryptohash-sha256 directory doctest errors filepath github hspec hspec-discover http-client-tls http-conduit iso8601-time lifted-base mtl neat-interpolation optparse-applicative parsec parsers - partial-order polysemy raw-strings-qq regex-applicative-text + partial-order polysemy polysemy-plugin regex-applicative-text servant servant-client sqlite-simple template-haskell temporary text time transformers typed-process unix unordered-containers vector versions xdg-basedir zlib diff --git a/package.yaml b/package.yaml index fb34451..3bb4a9c 100644 --- a/package.yaml +++ b/package.yaml @@ -16,8 +16,7 @@ extra-source-files: github: ryantm/nixpkgs-update -ghc-options: -Wall -O2 -flate-specialise -fspecialise-aggressively - +ghc-options: -Wall -O2 -flate-specialise -fspecialise-aggressively -fplugin=Polysemy.Plugin default-extensions: - DataKinds @@ -54,6 +53,7 @@ dependencies: - parsers - partial-order - polysemy + - polysemy-plugin - regex-applicative-text - servant - servant-client @@ -82,9 +82,8 @@ tests: dependencies: - hspec - hspec-discover - - doctest - - raw-strings-qq - nixpkgs-update + - doctest executables: nixpkgs-update: diff --git a/src/File.hs b/src/File.hs index 91a72fd..e656287 100644 --- a/src/File.hs +++ b/src/File.hs @@ -1,18 +1,58 @@ -module File - ( replace, - ) -where +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE LambdaCase, BlockArguments #-} +{-# LANGUAGE OverloadedStrings #-} + +module File where import qualified Data.Text as T import Data.Text.IO as T import OurPrelude -import Control.Monad (when) +import Polysemy.Input +import Polysemy.Output -replace :: MonadIO m => Text -> Text -> FilePath -> m Bool -replace find r file = - liftIO $ do - contents <- T.readFile file - let newContents = T.replace find r contents - when (contents /= newContents) $ do - T.writeFile file newContents - return $ contents /= newContents +data File m a where + Read :: FilePath -> File m Text + Write :: FilePath -> Text -> File m () + +makeSem ''File + +runIO :: + Member (Embed IO) r => + Sem (File ': r) a -> + Sem r a +runIO = + interpret $ \case + Read file -> embed $ T.readFile file + Write file contents -> embed $ T.writeFile file contents + +runPure :: + [Text] -> + Sem (File ': r) a -> + Sem r ([Text], a) +runPure contentList + = runOutputMonoid pure + . runInputList contentList + . reinterpret2 \case + Read _file -> maybe "" id <$> input + Write _file contents -> output contents + +replace :: + Member File r => + Text -> + Text -> + FilePath -> + Sem r Bool +replace find replacement file = do + contents <- File.read file + let newContents = T.replace find replacement contents + when (contents /= newContents) $ do + File.write file newContents + return $ contents /= newContents + +replaceIO :: MonadIO m => Text -> Text -> FilePath -> m Bool +replaceIO find replacement file = + liftIO + $ runFinal + $ embedToFinal @IO + $ runIO + $ (replace find replacement file) diff --git a/src/Nix.hs b/src/Nix.hs index 67030c5..5153d7f 100644 --- a/src/Nix.hs +++ b/src/Nix.hs @@ -1,6 +1,7 @@ {-# LANGUAGE ExtendedDefaultRules #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TemplateHaskell #-} {-# OPTIONS_GHC -fno-warn-type-defaults #-} module Nix @@ -15,6 +16,7 @@ module Nix getHash, getHashFromBuild, getHomepage, + getHomepageET, getIsBroken, getMaintainers, getOldHash, @@ -40,6 +42,9 @@ import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Encoding as TL import qualified Data.Vector as V import OurPrelude +import qualified Polysemy.Error as Error +import qualified Process as P +import qualified Process import System.Exit import Text.Parsec (parse) import Text.Parser.Combinators @@ -49,16 +54,26 @@ import Prelude hiding (log) data Env = Env [(String, String)] -data EvalOptions = EvalOptions Raw Env - data Raw = Raw | NoRaw +data EvalOptions = EvalOptions Raw Env + rawOpt :: Raw -> [String] rawOpt Raw = ["--raw"] rawOpt NoRaw = [] +nixEvalSem :: + Members '[P.Process, Error Text] r => + EvalOptions -> + Text -> + Sem r Text +nixEvalSem (EvalOptions raw (Env env)) expr = + T.strip + <$> ourReadProcessInterleaved_Sem + (setEnv env (proc "nix" (["eval", "-f", "."] <> rawOpt raw <> [T.unpack expr]))) + nixEvalET :: MonadIO m => EvalOptions -> Text -> ExceptT Text m Text nixEvalET (EvalOptions raw (Env env)) expr = ourReadProcessInterleaved_ @@ -192,15 +207,27 @@ getDescription attrPath = ) & overwriteErrorT ("Could not get meta.description for attrpath " <> attrPath) -getHomepage :: MonadIO m => Text -> ExceptT Text m Text +getHomepage :: + Members '[P.Process, Error Text] r => + Text -> + Sem r Text getHomepage attrPath = - nixEvalET + nixEvalSem (EvalOptions NoRaw (Env [])) ( "(let pkgs = import ./. {}; in pkgs." <> attrPath <> ".meta.homepage or \"\")" ) - & overwriteErrorT ("Could not get meta.homepage for attrpath " <> attrPath) + +getHomepageET :: MonadIO m => Text -> ExceptT Text m Text +getHomepageET attrPath = + ExceptT + . liftIO + . runFinal + . embedToFinal @IO + . Error.runError + . Process.runIO + $ getHomepage attrPath getSrcUrl :: MonadIO m => Text -> ExceptT Text m Text getSrcUrl = diff --git a/src/OurPrelude.hs b/src/OurPrelude.hs index 26e05d6..8aa610c 100644 --- a/src/OurPrelude.hs +++ b/src/OurPrelude.hs @@ -13,6 +13,7 @@ module OurPrelude module Data.Bifunctor, module System.Process.Typed, module Polysemy, + module Polysemy.Error, Set, Text, Vector, @@ -23,6 +24,7 @@ module OurPrelude ourReadProcessInterleaved_, ourReadProcessInterleavedBS_, ourReadProcessInterleaved, + ourReadProcessInterleaved_Sem, silently, bytestringToText, ) @@ -45,6 +47,8 @@ import Data.Vector (Vector) import Language.Haskell.TH.Quote import qualified NeatInterpolation import Polysemy +import Polysemy.Error hiding (tryJust, note, try) +import qualified Process as P import System.Exit import System.Process.Typed @@ -76,6 +80,13 @@ ourReadProcessInterleaved_ :: ourReadProcessInterleaved_ = readProcessInterleaved_ >>> tryIOTextET >>> fmapRT bytestringToText +ourReadProcessInterleaved_Sem :: + Members '[P.Process, Error Text] r => + ProcessConfig stdin stdoutIgnored stderrIgnored -> + Sem r Text +ourReadProcessInterleaved_Sem = + P.readInterleaved >>> fmap bytestringToText + ourReadProcessInterleaved :: MonadIO m => ProcessConfig stdin stdoutIgnored stderrIgnored -> diff --git a/src/Process.hs b/src/Process.hs new file mode 100644 index 0000000..c323354 --- /dev/null +++ b/src/Process.hs @@ -0,0 +1,31 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskell #-} + +module Process where + +import qualified Data.ByteString.Lazy as BSL +import Polysemy +import Polysemy.Input +import qualified System.Process.Typed as TP + +data Process m a where + ReadInterleaved :: TP.ProcessConfig stdin stdout stderr -> Process m BSL.ByteString + +makeSem ''Process + +runIO :: + Member (Embed IO) r => + Sem (Process ': r) a -> + Sem r a +runIO = + interpret $ \case + ReadInterleaved config -> embed $ (TP.readProcessInterleaved_ config :: IO BSL.ByteString) + +runPure :: + [BSL.ByteString] -> + Sem (Process ': r) a -> + Sem r a +runPure outputList = + runInputList outputList + . reinterpret \case + ReadInterleaved _config -> maybe "" id <$> input diff --git a/src/Rewrite.hs b/src/Rewrite.hs index bdec05a..e625ae2 100644 --- a/src/Rewrite.hs +++ b/src/Rewrite.hs @@ -4,6 +4,7 @@ module Rewrite ( Args (..), golangModuleVersion, quotedUrls, + quotedUrlsET, rustCrateVersion, version, ) @@ -13,8 +14,12 @@ import qualified Data.Text as T import qualified File import qualified Nix import OurPrelude +import qualified Polysemy.Error as Error +import Polysemy.Output (Output, output) +import qualified Process import qualified Utils ( UpdateEnv (..), + runLog, ) import Prelude hiding (log) @@ -59,9 +64,12 @@ version log args@(Args _ _ _ drvContents) = do -------------------------------------------------------------------------------- -- Rewrite meta.homepage (and eventually other URLs) to be quoted if not -- already, as per https://github.com/NixOS/rfcs/pull/45 -quotedUrls :: MonadIO m => (Text -> m ()) -> Args -> ExceptT Text m (Maybe Text) -quotedUrls log (Args _ attrPth drvFile _) = do - lift $ log "[quotedUrls]" +quotedUrls :: + Members '[Process.Process, File.File, Error Text, Output Text] r => + Args -> + Sem r (Maybe Text) +quotedUrls (Args _ attrPth drvFile _) = do + output "[quotedUrls]" homepage <- Nix.getHomepage attrPth -- Bit of a hack, but the homepage that comes out of nix-env is *always* -- quoted by the nix eval, so we drop the first and last characters. @@ -73,12 +81,24 @@ quotedUrls log (Args _ attrPth drvFile _) = do urlReplaced4 <- File.replace ("homepage =" <> stripped <> "; ") goodHomepage drvFile if urlReplaced1 || urlReplaced2 || urlReplaced3 || urlReplaced4 then do - lift $ log "[quotedUrls] added quotes to meta.homepage" + output "[quotedUrls]: added quotes to meta.homepage" return $ Just "Quoted meta.homepage for [RFC 45](https://github.com/NixOS/rfcs/pull/45)" else do - lift $ log "[quotedUrls] nothing found to replace" + output "[quotedUrls] nothing found to replace" return Nothing +quotedUrlsET :: MonadIO m => (Text -> IO ()) -> Args -> ExceptT Text m (Maybe Text) +quotedUrlsET log rwArgs = + ExceptT + $ liftIO + . runFinal + . embedToFinal @IO + . Error.runError + . Process.runIO + . File.runIO + . Utils.runLog log + $ quotedUrls rwArgs + -------------------------------------------------------------------------------- -- Rewrite Rust on rustPlatform.buildRustPackage -- This is basically `version` above, but with a second pass to also update the @@ -95,11 +115,11 @@ rustCrateVersion log args@(Args _ attrPth drvFile drvContents) = do srcVersionFix args -- But then from there we need to do this a second time for the cargoSha256! oldCargoSha256 <- Nix.getAttr "cargoSha256" attrPth - _ <- lift $ File.replace oldCargoSha256 Nix.sha256Zero drvFile + _ <- lift $ File.replaceIO oldCargoSha256 Nix.sha256Zero drvFile newCargoSha256 <- Nix.getHashFromBuild attrPth when (oldCargoSha256 == newCargoSha256) $ throwE "cargoSha256 hashes equal; no update necessary" lift . log $ "[rustCrateVersion] Replacing cargoSha256 with " <> newCargoSha256 - _ <- lift $ File.replace Nix.sha256Zero newCargoSha256 drvFile + _ <- lift $ File.replaceIO Nix.sha256Zero newCargoSha256 drvFile -- Ensure the package actually builds and passes its tests Nix.build attrPth lift $ log "[rustCrateVersion] Finished updating Crate version and replacing hashes" @@ -122,11 +142,11 @@ golangModuleVersion log args@(Args _ attrPth drvFile drvContents) = do -- But then from there we need to do this a second time for the modSha256! oldModSha256 <- Nix.getAttr "modSha256" attrPth lift . log $ "[golangModuleVersion] Found old modSha256 = " <> oldModSha256 - _ <- lift $ File.replace oldModSha256 Nix.sha256Zero drvFile + _ <- lift $ File.replaceIO oldModSha256 Nix.sha256Zero drvFile newModSha256 <- Nix.getHashFromBuild attrPth when (oldModSha256 == newModSha256) $ throwE "modSha256 hashes equal; no update necessary" lift . log $ "[golangModuleVersion] Replacing modSha256 with " <> newModSha256 - _ <- lift $ File.replace Nix.sha256Zero newModSha256 drvFile + _ <- lift $ File.replaceIO Nix.sha256Zero newModSha256 drvFile -- Ensure the package actually builds and passes its tests Nix.build attrPth lift $ log "[golangModuleVersion] Finished updating modSha256" @@ -139,9 +159,9 @@ golangModuleVersion log args@(Args _ attrPth drvFile drvContents) = do srcVersionFix :: MonadIO m => Args -> ExceptT Text m () srcVersionFix (Args env attrPth drvFile _) = do oldHash <- Nix.getOldHash attrPth - _ <- lift $ File.replace (Utils.oldVersion env) (Utils.newVersion env) drvFile - _ <- lift $ File.replace oldHash Nix.sha256Zero drvFile + _ <- lift $ File.replaceIO (Utils.oldVersion env) (Utils.newVersion env) drvFile + _ <- lift $ File.replaceIO oldHash Nix.sha256Zero drvFile newHash <- Nix.getHashFromBuild attrPth when (oldHash == newHash) $ throwE "Hashes equal; no update necessary" - _ <- lift $ File.replace Nix.sha256Zero newHash drvFile + _ <- lift $ File.replaceIO Nix.sha256Zero newHash drvFile return () diff --git a/src/Update.hs b/src/Update.hs index 5dda1d5..29a8aaf 100644 --- a/src/Update.hs +++ b/src/Update.hs @@ -20,7 +20,6 @@ import qualified Check import Control.Concurrent import qualified Data.ByteString.Lazy.Char8 as BSL import Data.IORef -import Data.Maybe (catMaybes) import qualified Data.Set as S import qualified Data.Text as T import qualified Data.Text.IO as T @@ -74,6 +73,9 @@ updateAll o updates = do let log = log' logFile T.appendFile logFile "\n\n" log "New run of nixpkgs-update" + when (dryRun o) $ log "Dry Run." + when (pushToCachix o) $ log "Will push to cachix." + when (calculateOutpaths o) $ log "Will calculate outpaths." twoHoursAgo <- runM $ Time.runIO Time.twoHoursAgo mergeBaseOutpathSet <- liftIO $ newIORef (MergeBaseOutpathsInfo twoHoursAgo S.empty) @@ -115,12 +117,11 @@ sourceGithubAll o updates = do u' updateLoop :: - MonadIO m => Options -> - (Text -> m ()) -> + (Text -> IO ()) -> [Either Text (Text, Version, Version, Maybe URL)] -> IORef MergeBaseOutpathsInfo -> - m () + IO () updateLoop _ log [] _ = log "nixpkgs-update finished" updateLoop o log (Left e : moreUpdates) mergeBaseOutpathsContext = do log e @@ -154,11 +155,10 @@ updateLoop o log (Right (pName, oldVer, newVer, url) : moreUpdates) mergeBaseOut -- - the merge base context should be updated externally to this function -- - the commit for branches: master, staging, staging-next, python-unstable updatePackage :: - MonadIO m => - (Text -> m ()) -> + (Text -> IO ()) -> UpdateEnv -> IORef MergeBaseOutpathsInfo -> - m (Either Text ()) + IO (Either Text ()) updatePackage log updateEnv mergeBaseOutpathsContext = runExceptT $ do let dry = dryRun . options $ updateEnv @@ -169,7 +169,10 @@ updatePackage log updateEnv mergeBaseOutpathsContext = -- -- Update our git checkout Git.fetchIfStale <|> liftIO (T.putStrLn "Failed to fetch.") - Git.checkAutoUpdateBranchDoesntExist (packageName updateEnv) + -- If we're doing a dry run, we want to re-run locally even if there's + -- already a PR open upstream + unless dry $ + Git.checkAutoUpdateBranchDoesntExist (packageName updateEnv) Git.cleanAndResetTo "master" -- -- Filters: various cases where we shouldn't update the package @@ -223,13 +226,13 @@ updatePackage log updateEnv mergeBaseOutpathsContext = msg1 <- Rewrite.version log rwArgs msg2 <- Rewrite.rustCrateVersion log rwArgs msg3 <- Rewrite.golangModuleVersion log rwArgs - msg4 <- Rewrite.quotedUrls log rwArgs + msg4 <- Rewrite.quotedUrlsET log rwArgs let msgs = catMaybes [msg1, msg2, msg3, msg4] ---------------------------------------------------------------------------- -- -- Compute the diff and get updated values diffAfterRewrites <- Git.diff - lift . log $ "Diff after rewrites::\n" <> diffAfterRewrites + lift . log $ "Diff after rewrites:\n" <> diffAfterRewrites updatedDerivationContents <- liftIO $ T.readFile derivationFile newSrcUrl <- Nix.getSrcUrl attrPath newHash <- Nix.getHash attrPath @@ -268,7 +271,7 @@ publishPackage log updateEnv oldSrcUrl newSrcUrl attrPath result opDiff msgs = d Right () -> lift $ Check.result updateEnv (T.unpack result) Left msg -> pure msg d <- Nix.getDescription attrPath <|> return T.empty - u <- Nix.getHomepage attrPath <|> return T.empty + u <- Nix.getHomepageET attrPath <|> return T.empty cveRep <- liftIO $ cveReport updateEnv let metaDescription = if d == T.empty diff --git a/src/Utils.hs b/src/Utils.hs index 66ff022..886f28a 100644 --- a/src/Utils.hs +++ b/src/Utils.hs @@ -22,6 +22,7 @@ module Utils prTitle, nixBuildOptions, nixCommonOptions, + runLog ) where @@ -55,6 +56,7 @@ import System.Posix.Types (FileMode) import qualified System.Process.Typed import Text.Read (readEither) import Type.Reflection (Typeable) +import Polysemy.Output default (T.Text) @@ -239,3 +241,12 @@ nixBuildOptions = "true" ] <> nixCommonOptions + +runLog :: + Member (Embed IO) r => + (Text -> IO ()) -> + Sem ((Output Text) ': r) a -> + Sem r a +runLog logger = + interpret \case + Output o -> embed $ logger o diff --git a/test/DoctestSpec.hs b/test/DoctestSpec.hs index e4169ac..febc59e 100644 --- a/test/DoctestSpec.hs +++ b/test/DoctestSpec.hs @@ -26,12 +26,7 @@ spec = do "-XBlockArguments", "-flate-specialise", "-fspecialise-aggressively", - -- "-fplugin=Polysemy.Plugin", - -- src/Process.hs:1:1: error: - -- Can't find interface-file declaration for type constructor or class Polysemy.Internal.Union.LocateEffect - -- Probable cause: bug in .hi-boot file, or inconsistent .hi file - -- Use -ddump-if-trace to get an idea of which file caused the error - + "-fplugin=Polysemy.Plugin", "src/Version.hs", "src/GH.hs", "src/Time.hs" diff --git a/test/RewriteSpec.hs b/test/RewriteSpec.hs index 04b64ca..886a175 100644 --- a/test/RewriteSpec.hs +++ b/test/RewriteSpec.hs @@ -7,9 +7,11 @@ import qualified Data.Text as T import qualified Data.Text.IO as T import qualified File import OurPrelude +import qualified Polysemy.Error as Error +import qualified Polysemy.Output as Output +import qualified Process import qualified Rewrite import Test.Hspec -import Text.RawString.QQ (r) import qualified Utils main :: IO () @@ -17,6 +19,24 @@ main = hspec spec spec :: Spec spec = do - describe "Hello world" do - it "is alive" do - 2 + 2 `shouldBe` 4 + describe "Rewrite.quotedUrls" do + it "quotes an unquoted meta.homepage URL" do + nixQuotedHomepageBad <- T.readFile "test_data/quoted_homepage_bad.nix" + nixQuotedHomepageGood <- T.readFile "test_data/quoted_homepage_good.nix" + let options = Utils.Options False "" False False + let updateEnv = Utils.UpdateEnv "inadyn" "2.5" "2.6" Nothing options + -- TODO test correct file is being read + let rwArgs = Rewrite.Args updateEnv "inadyn" undefined undefined + (logs, (newContents, result)) <- + ( runFinal + . embedToFinal @IO + . Output.runOutputList + . File.runPure [nixQuotedHomepageBad] + . Process.runPure ["\"http://troglobit.com/project/inadyn/\""] + . Error.errorToIOFinal + $ Rewrite.quotedUrls rwArgs + ) + T.putStrLn $ T.unlines logs + head logs `shouldBe` "[quotedUrls]" + result `shouldBe` Right (Just "Quoted meta.homepage for [RFC 45](https://github.com/NixOS/rfcs/pull/45)") + head newContents `shouldBe` nixQuotedHomepageGood diff --git a/test_data/quoted_homepage_bad.nix b/test_data/quoted_homepage_bad.nix new file mode 100644 index 0000000..3417f3f --- /dev/null +++ b/test_data/quoted_homepage_bad.nix @@ -0,0 +1,28 @@ +{ stdenv, fetchFromGitHub, autoreconfHook, pkgconfig +, gnutls, libite, libconfuse }: + +stdenv.mkDerivation rec { + pname = "inadyn"; + version = "2.6"; + + src = fetchFromGitHub { + owner = "troglobit"; + repo = "inadyn"; + rev = "v${version}"; + sha256 = "013kxlglxliajv3lrsix4w88w40g709rvycajb6ad6gbh8giqv47"; + }; + + nativeBuildInputs = [ autoreconfHook pkgconfig ]; + + buildInputs = [ gnutls libite libconfuse ]; + + enableParallelBuilding = true; + + meta = with stdenv.lib; { + homepage = http://troglobit.com/project/inadyn/; + description = "Free dynamic DNS client"; + license = licenses.gpl2Plus; + maintainers = with maintainers; [ ]; + platforms = platforms.linux; + }; +} diff --git a/test_data/quoted_homepage_good.nix b/test_data/quoted_homepage_good.nix new file mode 100644 index 0000000..9f7d79b --- /dev/null +++ b/test_data/quoted_homepage_good.nix @@ -0,0 +1,28 @@ +{ stdenv, fetchFromGitHub, autoreconfHook, pkgconfig +, gnutls, libite, libconfuse }: + +stdenv.mkDerivation rec { + pname = "inadyn"; + version = "2.6"; + + src = fetchFromGitHub { + owner = "troglobit"; + repo = "inadyn"; + rev = "v${version}"; + sha256 = "013kxlglxliajv3lrsix4w88w40g709rvycajb6ad6gbh8giqv47"; + }; + + nativeBuildInputs = [ autoreconfHook pkgconfig ]; + + buildInputs = [ gnutls libite libconfuse ]; + + enableParallelBuilding = true; + + meta = with stdenv.lib; { + homepage = "http://troglobit.com/project/inadyn/"; + description = "Free dynamic DNS client"; + license = licenses.gpl2Plus; + maintainers = with maintainers; [ ]; + platforms = platforms.linux; + }; +}