use Polysemy for File and Process, add test for quotedUrls

regenerate haskell nix file (drops hex dependency)

apply ormolu to a few files

fixup cabal file

add polysemy logger interpretation of Output, fix bug in nixEvalSem bug

* nixEvalSem was accidentally not stripping the output

create test_data and simplify RewriteSpec to use it

move Polysemy interpretation stacks closer to where polysemy methods are made

eventually these will move more top level but for now this keeps it
more contained

update to ghc883, remove raw-strings-qq dep
This commit is contained in:
Ryan Mulligan 2020-03-04 22:31:59 -08:00
parent 423f23d02a
commit a0a2e6afbe
16 changed files with 308 additions and 95 deletions

View File

@ -4,7 +4,7 @@ returnShellEnv ? pkgs.lib.inNixShell
let let
compiler = pkgs.haskell.packages.ghc882; compiler = pkgs.haskell.packages.ghc883;
inherit (pkgs.haskell.lib) dontCheck doJailbreak overrideCabal; inherit (pkgs.haskell.lib) dontCheck doJailbreak overrideCabal;

View File

@ -5,10 +5,10 @@
"homepage": "https://github.com/nmattia/niv", "homepage": "https://github.com/nmattia/niv",
"owner": "nmattia", "owner": "nmattia",
"repo": "niv", "repo": "niv",
"rev": "98c74a80934123cb4c3bf3314567f67311eb711a", "rev": "f73bf8d584148677b01859677a63191c31911eae",
"sha256": "1w8n54hapd4x9f1am33icvngkqns7m3hl9yair38yqq08ffwg0kn", "sha256": "0jlmrx633jvqrqlyhlzpvdrnim128gc81q5psz2lpp2af8p8q9qs",
"type": "tarball", "type": "tarball",
"url": "https://github.com/nmattia/niv/archive/98c74a80934123cb4c3bf3314567f67311eb711a.tar.gz", "url": "https://github.com/nmattia/niv/archive/f73bf8d584148677b01859677a63191c31911eae.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz" "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}, },
"nixpkgs": { "nixpkgs": {
@ -17,10 +17,10 @@
"homepage": null, "homepage": null,
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "0bfd0187dafe3f597355e6be16b7b9a5f4b90376", "rev": "78bfdbb291fd20df0f0f65061ee3081610b0a48f",
"sha256": "1ydpmvfshkaxr005imhkf8h5ihsb2l97ycyl6fmyanqjdw149wgl", "sha256": "0qy72dm799vrmcmb72zcxkj2rrcgqgsj0z58f9gl069p9aag2z3a",
"type": "tarball", "type": "tarball",
"url": "https://github.com/nixos/nixpkgs/archive/0bfd0187dafe3f597355e6be16b7b9a5f4b90376.tar.gz", "url": "https://github.com/nixos/nixpkgs/archive/78bfdbb291fd20df0f0f65061ee3081610b0a48f.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz" "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
} }
} }

View File

@ -19,7 +19,7 @@ let
pkgs.fetchzip { inherit (spec) url sha256; }; pkgs.fetchzip { inherit (spec) url sha256; };
fetch_git = spec: fetch_git = spec:
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; }; builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };
fetch_builtin-tarball = spec: fetch_builtin-tarball = spec:
builtins.trace builtins.trace
@ -49,26 +49,22 @@ let
# The set of packages used when specs are fetched using non-builtins. # The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources: mkPkgs = sources:
if hasNixpkgsPath let
then sourcesNixpkgs =
if hasThisAsNixpkgsPath import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {};
then import (builtins_fetchTarball { inherit (mkNixpkgs sources) url sha256; }) {} hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath;
else import <nixpkgs> {} hasThisAsNixpkgsPath = <nixpkgs> == ./.;
else in
import (builtins_fetchTarball { inherit (mkNixpkgs sources) url sha256; }) {}; if builtins.hasAttr "nixpkgs" sources
then sourcesNixpkgs
mkNixpkgs = sources: else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then
if builtins.hasAttr "nixpkgs" sources import <nixpkgs> {}
then sources.nixpkgs else
else abort abort
'' ''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json. add a package called "nixpkgs" to your sources.json.
''; '';
hasNixpkgsPath = (builtins.tryEval <nixpkgs>).success;
hasThisAsNixpkgsPath =
(builtins.tryEval <nixpkgs>).success && <nixpkgs> == ./.;
# The actual fetching function. # The actual fetching function.
fetch = pkgs: name: spec: fetch = pkgs: name: spec:
@ -125,12 +121,14 @@ let
# The "config" used by the fetchers # The "config" used by the fetchers
mkConfig = mkConfig =
{ sourcesFile ? ./sources.json { sourcesFile ? ./sources.json
, sources ? builtins.fromJSON (builtins.readFile sourcesFile)
, pkgs ? mkPkgs sources
}: rec { }: rec {
# The sources, i.e. the attribute set of spec name to spec # The sources, i.e. the attribute set of spec name to spec
sources = builtins.fromJSON (builtins.readFile sourcesFile); inherit sources;
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
pkgs = mkPkgs sources; inherit pkgs;
}; };
in in
mkSources (mkConfig {}) // mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); }
{ __functor = _: settings: mkSources (mkConfig settings); }

View File

@ -3,7 +3,7 @@
, hpack, hspec, hspec-discover, http-client-tls, http-conduit , hpack, hspec, hspec-discover, http-client-tls, http-conduit
, iso8601-time, lifted-base, mtl, neat-interpolation , iso8601-time, lifted-base, mtl, neat-interpolation
, optparse-applicative, parsec, parsers, partial-order, polysemy , optparse-applicative, parsec, parsers, partial-order, polysemy
, raw-strings-qq, regex-applicative-text, servant, servant-client , polysemy-plugin, regex-applicative-text, servant, servant-client
, sqlite-simple, stdenv, template-haskell, temporary, text, time , sqlite-simple, stdenv, template-haskell, temporary, text, time
, transformers, typed-process, unix, unordered-containers, vector , transformers, typed-process, unix, unordered-containers, vector
, versions, xdg-basedir, zlib , versions, xdg-basedir, zlib
@ -19,9 +19,10 @@ mkDerivation {
directory errors filepath github http-client-tls http-conduit directory errors filepath github http-client-tls http-conduit
iso8601-time lifted-base mtl neat-interpolation iso8601-time lifted-base mtl neat-interpolation
optparse-applicative parsec parsers partial-order polysemy optparse-applicative parsec parsers partial-order polysemy
regex-applicative-text servant servant-client sqlite-simple polysemy-plugin regex-applicative-text servant servant-client
template-haskell temporary text time transformers typed-process sqlite-simple template-haskell temporary text time transformers
unix unordered-containers vector versions xdg-basedir zlib typed-process unix unordered-containers vector versions xdg-basedir
zlib
]; ];
libraryToolDepends = [ hpack ]; libraryToolDepends = [ hpack ];
executableHaskellDepends = [ executableHaskellDepends = [
@ -29,16 +30,17 @@ mkDerivation {
directory errors filepath github http-client-tls http-conduit directory errors filepath github http-client-tls http-conduit
iso8601-time lifted-base mtl neat-interpolation iso8601-time lifted-base mtl neat-interpolation
optparse-applicative parsec parsers partial-order polysemy optparse-applicative parsec parsers partial-order polysemy
regex-applicative-text servant servant-client sqlite-simple polysemy-plugin regex-applicative-text servant servant-client
template-haskell temporary text time transformers typed-process sqlite-simple template-haskell temporary text time transformers
unix unordered-containers vector versions xdg-basedir zlib typed-process unix unordered-containers vector versions xdg-basedir
zlib
]; ];
testHaskellDepends = [ testHaskellDepends = [
aeson base bytestring conduit containers cryptohash-sha256 aeson base bytestring conduit containers cryptohash-sha256
directory doctest errors filepath github hspec hspec-discover directory doctest errors filepath github hspec hspec-discover
http-client-tls http-conduit iso8601-time lifted-base mtl http-client-tls http-conduit iso8601-time lifted-base mtl
neat-interpolation optparse-applicative parsec parsers neat-interpolation optparse-applicative parsec parsers
partial-order polysemy raw-strings-qq regex-applicative-text partial-order polysemy polysemy-plugin regex-applicative-text
servant servant-client sqlite-simple template-haskell temporary servant servant-client sqlite-simple template-haskell temporary
text time transformers typed-process unix unordered-containers text time transformers typed-process unix unordered-containers
vector versions xdg-basedir zlib vector versions xdg-basedir zlib

View File

@ -16,8 +16,7 @@ extra-source-files:
github: ryantm/nixpkgs-update github: ryantm/nixpkgs-update
ghc-options: -Wall -O2 -flate-specialise -fspecialise-aggressively ghc-options: -Wall -O2 -flate-specialise -fspecialise-aggressively -fplugin=Polysemy.Plugin
default-extensions: default-extensions:
- DataKinds - DataKinds
@ -54,6 +53,7 @@ dependencies:
- parsers - parsers
- partial-order - partial-order
- polysemy - polysemy
- polysemy-plugin
- regex-applicative-text - regex-applicative-text
- servant - servant
- servant-client - servant-client
@ -82,9 +82,8 @@ tests:
dependencies: dependencies:
- hspec - hspec
- hspec-discover - hspec-discover
- doctest
- raw-strings-qq
- nixpkgs-update - nixpkgs-update
- doctest
executables: executables:
nixpkgs-update: nixpkgs-update:

View File

@ -1,18 +1,58 @@
module File {-# LANGUAGE TemplateHaskell #-}
( replace, {-# LANGUAGE LambdaCase, BlockArguments #-}
) {-# LANGUAGE OverloadedStrings #-}
where
module File where
import qualified Data.Text as T import qualified Data.Text as T
import Data.Text.IO as T import Data.Text.IO as T
import OurPrelude import OurPrelude
import Control.Monad (when) import Polysemy.Input
import Polysemy.Output
replace :: MonadIO m => Text -> Text -> FilePath -> m Bool data File m a where
replace find r file = Read :: FilePath -> File m Text
liftIO $ do Write :: FilePath -> Text -> File m ()
contents <- T.readFile file
let newContents = T.replace find r contents makeSem ''File
when (contents /= newContents) $ do
T.writeFile file newContents runIO ::
return $ contents /= newContents Member (Embed IO) r =>
Sem (File ': r) a ->
Sem r a
runIO =
interpret $ \case
Read file -> embed $ T.readFile file
Write file contents -> embed $ T.writeFile file contents
runPure ::
[Text] ->
Sem (File ': r) a ->
Sem r ([Text], a)
runPure contentList
= runOutputMonoid pure
. runInputList contentList
. reinterpret2 \case
Read _file -> maybe "" id <$> input
Write _file contents -> output contents
replace ::
Member File r =>
Text ->
Text ->
FilePath ->
Sem r Bool
replace find replacement file = do
contents <- File.read file
let newContents = T.replace find replacement contents
when (contents /= newContents) $ do
File.write file newContents
return $ contents /= newContents
replaceIO :: MonadIO m => Text -> Text -> FilePath -> m Bool
replaceIO find replacement file =
liftIO
$ runFinal
$ embedToFinal @IO
$ runIO
$ (replace find replacement file)

View File

@ -1,6 +1,7 @@
{-# LANGUAGE ExtendedDefaultRules #-} {-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-} {-# OPTIONS_GHC -fno-warn-type-defaults #-}
module Nix module Nix
@ -15,6 +16,7 @@ module Nix
getHash, getHash,
getHashFromBuild, getHashFromBuild,
getHomepage, getHomepage,
getHomepageET,
getIsBroken, getIsBroken,
getMaintainers, getMaintainers,
getOldHash, getOldHash,
@ -40,6 +42,9 @@ import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL import qualified Data.Text.Lazy.Encoding as TL
import qualified Data.Vector as V import qualified Data.Vector as V
import OurPrelude import OurPrelude
import qualified Polysemy.Error as Error
import qualified Process as P
import qualified Process
import System.Exit import System.Exit
import Text.Parsec (parse) import Text.Parsec (parse)
import Text.Parser.Combinators import Text.Parser.Combinators
@ -49,16 +54,26 @@ import Prelude hiding (log)
data Env = Env [(String, String)] data Env = Env [(String, String)]
data EvalOptions = EvalOptions Raw Env
data Raw data Raw
= Raw = Raw
| NoRaw | NoRaw
data EvalOptions = EvalOptions Raw Env
rawOpt :: Raw -> [String] rawOpt :: Raw -> [String]
rawOpt Raw = ["--raw"] rawOpt Raw = ["--raw"]
rawOpt NoRaw = [] rawOpt NoRaw = []
nixEvalSem ::
Members '[P.Process, Error Text] r =>
EvalOptions ->
Text ->
Sem r Text
nixEvalSem (EvalOptions raw (Env env)) expr =
T.strip
<$> ourReadProcessInterleaved_Sem
(setEnv env (proc "nix" (["eval", "-f", "."] <> rawOpt raw <> [T.unpack expr])))
nixEvalET :: MonadIO m => EvalOptions -> Text -> ExceptT Text m Text nixEvalET :: MonadIO m => EvalOptions -> Text -> ExceptT Text m Text
nixEvalET (EvalOptions raw (Env env)) expr = nixEvalET (EvalOptions raw (Env env)) expr =
ourReadProcessInterleaved_ ourReadProcessInterleaved_
@ -192,15 +207,27 @@ getDescription attrPath =
) )
& overwriteErrorT ("Could not get meta.description for attrpath " <> attrPath) & overwriteErrorT ("Could not get meta.description for attrpath " <> attrPath)
getHomepage :: MonadIO m => Text -> ExceptT Text m Text getHomepage ::
Members '[P.Process, Error Text] r =>
Text ->
Sem r Text
getHomepage attrPath = getHomepage attrPath =
nixEvalET nixEvalSem
(EvalOptions NoRaw (Env [])) (EvalOptions NoRaw (Env []))
( "(let pkgs = import ./. {}; in pkgs." ( "(let pkgs = import ./. {}; in pkgs."
<> attrPath <> attrPath
<> ".meta.homepage or \"\")" <> ".meta.homepage or \"\")"
) )
& overwriteErrorT ("Could not get meta.homepage for attrpath " <> attrPath)
getHomepageET :: MonadIO m => Text -> ExceptT Text m Text
getHomepageET attrPath =
ExceptT
. liftIO
. runFinal
. embedToFinal @IO
. Error.runError
. Process.runIO
$ getHomepage attrPath
getSrcUrl :: MonadIO m => Text -> ExceptT Text m Text getSrcUrl :: MonadIO m => Text -> ExceptT Text m Text
getSrcUrl = getSrcUrl =

View File

@ -13,6 +13,7 @@ module OurPrelude
module Data.Bifunctor, module Data.Bifunctor,
module System.Process.Typed, module System.Process.Typed,
module Polysemy, module Polysemy,
module Polysemy.Error,
Set, Set,
Text, Text,
Vector, Vector,
@ -23,6 +24,7 @@ module OurPrelude
ourReadProcessInterleaved_, ourReadProcessInterleaved_,
ourReadProcessInterleavedBS_, ourReadProcessInterleavedBS_,
ourReadProcessInterleaved, ourReadProcessInterleaved,
ourReadProcessInterleaved_Sem,
silently, silently,
bytestringToText, bytestringToText,
) )
@ -45,6 +47,8 @@ import Data.Vector (Vector)
import Language.Haskell.TH.Quote import Language.Haskell.TH.Quote
import qualified NeatInterpolation import qualified NeatInterpolation
import Polysemy import Polysemy
import Polysemy.Error hiding (tryJust, note, try)
import qualified Process as P
import System.Exit import System.Exit
import System.Process.Typed import System.Process.Typed
@ -76,6 +80,13 @@ ourReadProcessInterleaved_ ::
ourReadProcessInterleaved_ = ourReadProcessInterleaved_ =
readProcessInterleaved_ >>> tryIOTextET >>> fmapRT bytestringToText readProcessInterleaved_ >>> tryIOTextET >>> fmapRT bytestringToText
ourReadProcessInterleaved_Sem ::
Members '[P.Process, Error Text] r =>
ProcessConfig stdin stdoutIgnored stderrIgnored ->
Sem r Text
ourReadProcessInterleaved_Sem =
P.readInterleaved >>> fmap bytestringToText
ourReadProcessInterleaved :: ourReadProcessInterleaved ::
MonadIO m => MonadIO m =>
ProcessConfig stdin stdoutIgnored stderrIgnored -> ProcessConfig stdin stdoutIgnored stderrIgnored ->

31
src/Process.hs Normal file
View File

@ -0,0 +1,31 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Process where
import qualified Data.ByteString.Lazy as BSL
import Polysemy
import Polysemy.Input
import qualified System.Process.Typed as TP
data Process m a where
ReadInterleaved :: TP.ProcessConfig stdin stdout stderr -> Process m BSL.ByteString
makeSem ''Process
runIO ::
Member (Embed IO) r =>
Sem (Process ': r) a ->
Sem r a
runIO =
interpret $ \case
ReadInterleaved config -> embed $ (TP.readProcessInterleaved_ config :: IO BSL.ByteString)
runPure ::
[BSL.ByteString] ->
Sem (Process ': r) a ->
Sem r a
runPure outputList =
runInputList outputList
. reinterpret \case
ReadInterleaved _config -> maybe "" id <$> input

View File

@ -4,6 +4,7 @@ module Rewrite
( Args (..), ( Args (..),
golangModuleVersion, golangModuleVersion,
quotedUrls, quotedUrls,
quotedUrlsET,
rustCrateVersion, rustCrateVersion,
version, version,
) )
@ -13,8 +14,12 @@ import qualified Data.Text as T
import qualified File import qualified File
import qualified Nix import qualified Nix
import OurPrelude import OurPrelude
import qualified Polysemy.Error as Error
import Polysemy.Output (Output, output)
import qualified Process
import qualified Utils import qualified Utils
( UpdateEnv (..), ( UpdateEnv (..),
runLog,
) )
import Prelude hiding (log) import Prelude hiding (log)
@ -59,9 +64,12 @@ version log args@(Args _ _ _ drvContents) = do
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
-- Rewrite meta.homepage (and eventually other URLs) to be quoted if not -- Rewrite meta.homepage (and eventually other URLs) to be quoted if not
-- already, as per https://github.com/NixOS/rfcs/pull/45 -- already, as per https://github.com/NixOS/rfcs/pull/45
quotedUrls :: MonadIO m => (Text -> m ()) -> Args -> ExceptT Text m (Maybe Text) quotedUrls ::
quotedUrls log (Args _ attrPth drvFile _) = do Members '[Process.Process, File.File, Error Text, Output Text] r =>
lift $ log "[quotedUrls]" Args ->
Sem r (Maybe Text)
quotedUrls (Args _ attrPth drvFile _) = do
output "[quotedUrls]"
homepage <- Nix.getHomepage attrPth homepage <- Nix.getHomepage attrPth
-- Bit of a hack, but the homepage that comes out of nix-env is *always* -- Bit of a hack, but the homepage that comes out of nix-env is *always*
-- quoted by the nix eval, so we drop the first and last characters. -- quoted by the nix eval, so we drop the first and last characters.
@ -73,12 +81,24 @@ quotedUrls log (Args _ attrPth drvFile _) = do
urlReplaced4 <- File.replace ("homepage =" <> stripped <> "; ") goodHomepage drvFile urlReplaced4 <- File.replace ("homepage =" <> stripped <> "; ") goodHomepage drvFile
if urlReplaced1 || urlReplaced2 || urlReplaced3 || urlReplaced4 if urlReplaced1 || urlReplaced2 || urlReplaced3 || urlReplaced4
then do then do
lift $ log "[quotedUrls] added quotes to meta.homepage" output "[quotedUrls]: added quotes to meta.homepage"
return $ Just "Quoted meta.homepage for [RFC 45](https://github.com/NixOS/rfcs/pull/45)" return $ Just "Quoted meta.homepage for [RFC 45](https://github.com/NixOS/rfcs/pull/45)"
else do else do
lift $ log "[quotedUrls] nothing found to replace" output "[quotedUrls] nothing found to replace"
return Nothing return Nothing
quotedUrlsET :: MonadIO m => (Text -> IO ()) -> Args -> ExceptT Text m (Maybe Text)
quotedUrlsET log rwArgs =
ExceptT
$ liftIO
. runFinal
. embedToFinal @IO
. Error.runError
. Process.runIO
. File.runIO
. Utils.runLog log
$ quotedUrls rwArgs
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
-- Rewrite Rust on rustPlatform.buildRustPackage -- Rewrite Rust on rustPlatform.buildRustPackage
-- This is basically `version` above, but with a second pass to also update the -- This is basically `version` above, but with a second pass to also update the
@ -95,11 +115,11 @@ rustCrateVersion log args@(Args _ attrPth drvFile drvContents) = do
srcVersionFix args srcVersionFix args
-- But then from there we need to do this a second time for the cargoSha256! -- But then from there we need to do this a second time for the cargoSha256!
oldCargoSha256 <- Nix.getAttr "cargoSha256" attrPth oldCargoSha256 <- Nix.getAttr "cargoSha256" attrPth
_ <- lift $ File.replace oldCargoSha256 Nix.sha256Zero drvFile _ <- lift $ File.replaceIO oldCargoSha256 Nix.sha256Zero drvFile
newCargoSha256 <- Nix.getHashFromBuild attrPth newCargoSha256 <- Nix.getHashFromBuild attrPth
when (oldCargoSha256 == newCargoSha256) $ throwE "cargoSha256 hashes equal; no update necessary" when (oldCargoSha256 == newCargoSha256) $ throwE "cargoSha256 hashes equal; no update necessary"
lift . log $ "[rustCrateVersion] Replacing cargoSha256 with " <> newCargoSha256 lift . log $ "[rustCrateVersion] Replacing cargoSha256 with " <> newCargoSha256
_ <- lift $ File.replace Nix.sha256Zero newCargoSha256 drvFile _ <- lift $ File.replaceIO Nix.sha256Zero newCargoSha256 drvFile
-- Ensure the package actually builds and passes its tests -- Ensure the package actually builds and passes its tests
Nix.build attrPth Nix.build attrPth
lift $ log "[rustCrateVersion] Finished updating Crate version and replacing hashes" lift $ log "[rustCrateVersion] Finished updating Crate version and replacing hashes"
@ -122,11 +142,11 @@ golangModuleVersion log args@(Args _ attrPth drvFile drvContents) = do
-- But then from there we need to do this a second time for the modSha256! -- But then from there we need to do this a second time for the modSha256!
oldModSha256 <- Nix.getAttr "modSha256" attrPth oldModSha256 <- Nix.getAttr "modSha256" attrPth
lift . log $ "[golangModuleVersion] Found old modSha256 = " <> oldModSha256 lift . log $ "[golangModuleVersion] Found old modSha256 = " <> oldModSha256
_ <- lift $ File.replace oldModSha256 Nix.sha256Zero drvFile _ <- lift $ File.replaceIO oldModSha256 Nix.sha256Zero drvFile
newModSha256 <- Nix.getHashFromBuild attrPth newModSha256 <- Nix.getHashFromBuild attrPth
when (oldModSha256 == newModSha256) $ throwE "modSha256 hashes equal; no update necessary" when (oldModSha256 == newModSha256) $ throwE "modSha256 hashes equal; no update necessary"
lift . log $ "[golangModuleVersion] Replacing modSha256 with " <> newModSha256 lift . log $ "[golangModuleVersion] Replacing modSha256 with " <> newModSha256
_ <- lift $ File.replace Nix.sha256Zero newModSha256 drvFile _ <- lift $ File.replaceIO Nix.sha256Zero newModSha256 drvFile
-- Ensure the package actually builds and passes its tests -- Ensure the package actually builds and passes its tests
Nix.build attrPth Nix.build attrPth
lift $ log "[golangModuleVersion] Finished updating modSha256" lift $ log "[golangModuleVersion] Finished updating modSha256"
@ -139,9 +159,9 @@ golangModuleVersion log args@(Args _ attrPth drvFile drvContents) = do
srcVersionFix :: MonadIO m => Args -> ExceptT Text m () srcVersionFix :: MonadIO m => Args -> ExceptT Text m ()
srcVersionFix (Args env attrPth drvFile _) = do srcVersionFix (Args env attrPth drvFile _) = do
oldHash <- Nix.getOldHash attrPth oldHash <- Nix.getOldHash attrPth
_ <- lift $ File.replace (Utils.oldVersion env) (Utils.newVersion env) drvFile _ <- lift $ File.replaceIO (Utils.oldVersion env) (Utils.newVersion env) drvFile
_ <- lift $ File.replace oldHash Nix.sha256Zero drvFile _ <- lift $ File.replaceIO oldHash Nix.sha256Zero drvFile
newHash <- Nix.getHashFromBuild attrPth newHash <- Nix.getHashFromBuild attrPth
when (oldHash == newHash) $ throwE "Hashes equal; no update necessary" when (oldHash == newHash) $ throwE "Hashes equal; no update necessary"
_ <- lift $ File.replace Nix.sha256Zero newHash drvFile _ <- lift $ File.replaceIO Nix.sha256Zero newHash drvFile
return () return ()

View File

@ -20,7 +20,6 @@ import qualified Check
import Control.Concurrent import Control.Concurrent
import qualified Data.ByteString.Lazy.Char8 as BSL import qualified Data.ByteString.Lazy.Char8 as BSL
import Data.IORef import Data.IORef
import Data.Maybe (catMaybes)
import qualified Data.Set as S import qualified Data.Set as S
import qualified Data.Text as T import qualified Data.Text as T
import qualified Data.Text.IO as T import qualified Data.Text.IO as T
@ -74,6 +73,9 @@ updateAll o updates = do
let log = log' logFile let log = log' logFile
T.appendFile logFile "\n\n" T.appendFile logFile "\n\n"
log "New run of nixpkgs-update" log "New run of nixpkgs-update"
when (dryRun o) $ log "Dry Run."
when (pushToCachix o) $ log "Will push to cachix."
when (calculateOutpaths o) $ log "Will calculate outpaths."
twoHoursAgo <- runM $ Time.runIO Time.twoHoursAgo twoHoursAgo <- runM $ Time.runIO Time.twoHoursAgo
mergeBaseOutpathSet <- mergeBaseOutpathSet <-
liftIO $ newIORef (MergeBaseOutpathsInfo twoHoursAgo S.empty) liftIO $ newIORef (MergeBaseOutpathsInfo twoHoursAgo S.empty)
@ -115,12 +117,11 @@ sourceGithubAll o updates = do
u' u'
updateLoop :: updateLoop ::
MonadIO m =>
Options -> Options ->
(Text -> m ()) -> (Text -> IO ()) ->
[Either Text (Text, Version, Version, Maybe URL)] -> [Either Text (Text, Version, Version, Maybe URL)] ->
IORef MergeBaseOutpathsInfo -> IORef MergeBaseOutpathsInfo ->
m () IO ()
updateLoop _ log [] _ = log "nixpkgs-update finished" updateLoop _ log [] _ = log "nixpkgs-update finished"
updateLoop o log (Left e : moreUpdates) mergeBaseOutpathsContext = do updateLoop o log (Left e : moreUpdates) mergeBaseOutpathsContext = do
log e log e
@ -154,11 +155,10 @@ updateLoop o log (Right (pName, oldVer, newVer, url) : moreUpdates) mergeBaseOut
-- - the merge base context should be updated externally to this function -- - the merge base context should be updated externally to this function
-- - the commit for branches: master, staging, staging-next, python-unstable -- - the commit for branches: master, staging, staging-next, python-unstable
updatePackage :: updatePackage ::
MonadIO m => (Text -> IO ()) ->
(Text -> m ()) ->
UpdateEnv -> UpdateEnv ->
IORef MergeBaseOutpathsInfo -> IORef MergeBaseOutpathsInfo ->
m (Either Text ()) IO (Either Text ())
updatePackage log updateEnv mergeBaseOutpathsContext = updatePackage log updateEnv mergeBaseOutpathsContext =
runExceptT $ do runExceptT $ do
let dry = dryRun . options $ updateEnv let dry = dryRun . options $ updateEnv
@ -169,7 +169,10 @@ updatePackage log updateEnv mergeBaseOutpathsContext =
-- --
-- Update our git checkout -- Update our git checkout
Git.fetchIfStale <|> liftIO (T.putStrLn "Failed to fetch.") Git.fetchIfStale <|> liftIO (T.putStrLn "Failed to fetch.")
Git.checkAutoUpdateBranchDoesntExist (packageName updateEnv) -- If we're doing a dry run, we want to re-run locally even if there's
-- already a PR open upstream
unless dry $
Git.checkAutoUpdateBranchDoesntExist (packageName updateEnv)
Git.cleanAndResetTo "master" Git.cleanAndResetTo "master"
-- --
-- Filters: various cases where we shouldn't update the package -- Filters: various cases where we shouldn't update the package
@ -223,13 +226,13 @@ updatePackage log updateEnv mergeBaseOutpathsContext =
msg1 <- Rewrite.version log rwArgs msg1 <- Rewrite.version log rwArgs
msg2 <- Rewrite.rustCrateVersion log rwArgs msg2 <- Rewrite.rustCrateVersion log rwArgs
msg3 <- Rewrite.golangModuleVersion log rwArgs msg3 <- Rewrite.golangModuleVersion log rwArgs
msg4 <- Rewrite.quotedUrls log rwArgs msg4 <- Rewrite.quotedUrlsET log rwArgs
let msgs = catMaybes [msg1, msg2, msg3, msg4] let msgs = catMaybes [msg1, msg2, msg3, msg4]
---------------------------------------------------------------------------- ----------------------------------------------------------------------------
-- --
-- Compute the diff and get updated values -- Compute the diff and get updated values
diffAfterRewrites <- Git.diff diffAfterRewrites <- Git.diff
lift . log $ "Diff after rewrites::\n" <> diffAfterRewrites lift . log $ "Diff after rewrites:\n" <> diffAfterRewrites
updatedDerivationContents <- liftIO $ T.readFile derivationFile updatedDerivationContents <- liftIO $ T.readFile derivationFile
newSrcUrl <- Nix.getSrcUrl attrPath newSrcUrl <- Nix.getSrcUrl attrPath
newHash <- Nix.getHash attrPath newHash <- Nix.getHash attrPath
@ -268,7 +271,7 @@ publishPackage log updateEnv oldSrcUrl newSrcUrl attrPath result opDiff msgs = d
Right () -> lift $ Check.result updateEnv (T.unpack result) Right () -> lift $ Check.result updateEnv (T.unpack result)
Left msg -> pure msg Left msg -> pure msg
d <- Nix.getDescription attrPath <|> return T.empty d <- Nix.getDescription attrPath <|> return T.empty
u <- Nix.getHomepage attrPath <|> return T.empty u <- Nix.getHomepageET attrPath <|> return T.empty
cveRep <- liftIO $ cveReport updateEnv cveRep <- liftIO $ cveReport updateEnv
let metaDescription = let metaDescription =
if d == T.empty if d == T.empty

View File

@ -22,6 +22,7 @@ module Utils
prTitle, prTitle,
nixBuildOptions, nixBuildOptions,
nixCommonOptions, nixCommonOptions,
runLog
) )
where where
@ -55,6 +56,7 @@ import System.Posix.Types (FileMode)
import qualified System.Process.Typed import qualified System.Process.Typed
import Text.Read (readEither) import Text.Read (readEither)
import Type.Reflection (Typeable) import Type.Reflection (Typeable)
import Polysemy.Output
default (T.Text) default (T.Text)
@ -239,3 +241,12 @@ nixBuildOptions =
"true" "true"
] ]
<> nixCommonOptions <> nixCommonOptions
runLog ::
Member (Embed IO) r =>
(Text -> IO ()) ->
Sem ((Output Text) ': r) a ->
Sem r a
runLog logger =
interpret \case
Output o -> embed $ logger o

View File

@ -26,12 +26,7 @@ spec = do
"-XBlockArguments", "-XBlockArguments",
"-flate-specialise", "-flate-specialise",
"-fspecialise-aggressively", "-fspecialise-aggressively",
-- "-fplugin=Polysemy.Plugin", "-fplugin=Polysemy.Plugin",
-- src/Process.hs:1:1: error:
-- Can't find interface-file declaration for type constructor or class Polysemy.Internal.Union.LocateEffect
-- Probable cause: bug in .hi-boot file, or inconsistent .hi file
-- Use -ddump-if-trace to get an idea of which file caused the error
"src/Version.hs", "src/Version.hs",
"src/GH.hs", "src/GH.hs",
"src/Time.hs" "src/Time.hs"

View File

@ -7,9 +7,11 @@ import qualified Data.Text as T
import qualified Data.Text.IO as T import qualified Data.Text.IO as T
import qualified File import qualified File
import OurPrelude import OurPrelude
import qualified Polysemy.Error as Error
import qualified Polysemy.Output as Output
import qualified Process
import qualified Rewrite import qualified Rewrite
import Test.Hspec import Test.Hspec
import Text.RawString.QQ (r)
import qualified Utils import qualified Utils
main :: IO () main :: IO ()
@ -17,6 +19,24 @@ main = hspec spec
spec :: Spec spec :: Spec
spec = do spec = do
describe "Hello world" do describe "Rewrite.quotedUrls" do
it "is alive" do it "quotes an unquoted meta.homepage URL" do
2 + 2 `shouldBe` 4 nixQuotedHomepageBad <- T.readFile "test_data/quoted_homepage_bad.nix"
nixQuotedHomepageGood <- T.readFile "test_data/quoted_homepage_good.nix"
let options = Utils.Options False "" False False
let updateEnv = Utils.UpdateEnv "inadyn" "2.5" "2.6" Nothing options
-- TODO test correct file is being read
let rwArgs = Rewrite.Args updateEnv "inadyn" undefined undefined
(logs, (newContents, result)) <-
( runFinal
. embedToFinal @IO
. Output.runOutputList
. File.runPure [nixQuotedHomepageBad]
. Process.runPure ["\"http://troglobit.com/project/inadyn/\""]
. Error.errorToIOFinal
$ Rewrite.quotedUrls rwArgs
)
T.putStrLn $ T.unlines logs
head logs `shouldBe` "[quotedUrls]"
result `shouldBe` Right (Just "Quoted meta.homepage for [RFC 45](https://github.com/NixOS/rfcs/pull/45)")
head newContents `shouldBe` nixQuotedHomepageGood

View File

@ -0,0 +1,28 @@
{ stdenv, fetchFromGitHub, autoreconfHook, pkgconfig
, gnutls, libite, libconfuse }:
stdenv.mkDerivation rec {
pname = "inadyn";
version = "2.6";
src = fetchFromGitHub {
owner = "troglobit";
repo = "inadyn";
rev = "v${version}";
sha256 = "013kxlglxliajv3lrsix4w88w40g709rvycajb6ad6gbh8giqv47";
};
nativeBuildInputs = [ autoreconfHook pkgconfig ];
buildInputs = [ gnutls libite libconfuse ];
enableParallelBuilding = true;
meta = with stdenv.lib; {
homepage = http://troglobit.com/project/inadyn/;
description = "Free dynamic DNS client";
license = licenses.gpl2Plus;
maintainers = with maintainers; [ ];
platforms = platforms.linux;
};
}

View File

@ -0,0 +1,28 @@
{ stdenv, fetchFromGitHub, autoreconfHook, pkgconfig
, gnutls, libite, libconfuse }:
stdenv.mkDerivation rec {
pname = "inadyn";
version = "2.6";
src = fetchFromGitHub {
owner = "troglobit";
repo = "inadyn";
rev = "v${version}";
sha256 = "013kxlglxliajv3lrsix4w88w40g709rvycajb6ad6gbh8giqv47";
};
nativeBuildInputs = [ autoreconfHook pkgconfig ];
buildInputs = [ gnutls libite libconfuse ];
enableParallelBuilding = true;
meta = with stdenv.lib; {
homepage = "http://troglobit.com/project/inadyn/";
description = "Free dynamic DNS client";
license = licenses.gpl2Plus;
maintainers = with maintainers; [ ];
platforms = platforms.linux;
};
}