Infrastructure upgrades to bump dependencies and cleanup build system

No functional change; this pulls out all of the changes except the `polysemy`
refactoring from here:
https://github.com/ryantm/nixpkgs-update/pull/172

This will allow us to do development with the newer GHC and cleaned up
dependency stack, start using the `hspec` test suite, vet the `github` library
updates, etc., while we work on porting more of the effects to polysemy and
evaluating how it looks.
This commit is contained in:
Ryan Mulligan 2020-03-04 22:31:59 -08:00 committed by Benjamin Hipple
parent 32c439cf9a
commit 523681c6d8
21 changed files with 393 additions and 131 deletions

View File

@ -1,33 +1,16 @@
{ nixpkgs-tarball ? builtins.fetchTarball { { pkgs ? import (import ./nix/sources.nix).nixpkgs {config = { allowBroken = true; };},
name = "nixpkgs-unstable"; returnShellEnv ? pkgs.lib.inNixShell
url = }:
"https://releases.nixos.org/nixos/unstable/nixos-20.03pre193829.f0fec244ca3/nixexprs.tar.xz";
sha256 = "03iqwyz5lxaq4k2hw4wfd55gizdf1230jcsqia0zmp3whpyj5y1x";
}, pkgs ? import nixpkgs-tarball { config = { allowBroken = true; }; }
, returnShellEnv ? pkgs.lib.inNixShell }:
let let
compiler = pkgs.haskell.packages."ghc865"; compiler = pkgs.haskell.packages.ghc882;
inherit (pkgs.haskell.lib) dontCheck doJailbreak overrideCabal; inherit (pkgs.haskell.lib) dontCheck doJailbreak overrideCabal;
pkg = compiler.developPackage { pkg = compiler.developPackage {
root = ./.; root = ./.;
overrides = self: super: { overrides = self: super: { };
aeson = dontCheck super.aeson;
polysemy = dontCheck super.polysemy_1_2_1_0;
polysemy-plugin = dontCheck super.polysemy-plugin;
time-compat = dontCheck super.time-compat;
binary-orphans = dontCheck super.binary-orphans;
binary-instances = dontCheck super.binary-instances;
hpack = dontCheck super.hpack;
HUnit = dontCheck super.HUnit;
hspec-core = dontCheck super.hspec-core;
th-abstraction = dontCheck super.th-abstraction;
inspection-testing = dontCheck super.inspection-testing;
partial-order = doJailbreak super.partial-order;
};
source-overrides = { }; source-overrides = { };
inherit returnShellEnv; inherit returnShellEnv;
}; };

26
nix/sources.json Normal file
View File

@ -0,0 +1,26 @@
{
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "98c74a80934123cb4c3bf3314567f67311eb711a",
"sha256": "1w8n54hapd4x9f1am33icvngkqns7m3hl9yair38yqq08ffwg0kn",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/98c74a80934123cb4c3bf3314567f67311eb711a.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "master",
"description": "Nix Packages collection",
"homepage": null,
"owner": "nixos",
"repo": "nixpkgs",
"rev": "0bfd0187dafe3f597355e6be16b7b9a5f4b90376",
"sha256": "1ydpmvfshkaxr005imhkf8h5ihsb2l97ycyl6fmyanqjdw149wgl",
"type": "tarball",
"url": "https://github.com/nixos/nixpkgs/archive/0bfd0187dafe3f597355e6be16b7b9a5f4b90376.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}

136
nix/sources.nix Normal file
View File

@ -0,0 +1,136 @@
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: spec:
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; }
else
pkgs.fetchurl { inherit (spec) url sha256; };
fetch_tarball = pkgs: spec:
if spec.builtin or true then
builtins_fetchTarball { inherit (spec) url sha256; }
else
pkgs.fetchzip { inherit (spec) url sha256; };
fetch_git = spec:
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };
fetch_builtin-tarball = spec:
builtins.trace
''
WARNING:
The niv type "builtin-tarball" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=tarball -a builtin=true
''
builtins_fetchTarball { inherit (spec) url sha256; };
fetch_builtin-url = spec:
builtins.trace
''
WARNING:
The niv type "builtin-url" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=file -a builtin=true
''
(builtins_fetchurl { inherit (spec) url sha256; });
#
# Various helpers
#
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources:
if hasNixpkgsPath
then
if hasThisAsNixpkgsPath
then import (builtins_fetchTarball { inherit (mkNixpkgs sources) url sha256; }) {}
else import <nixpkgs> {}
else
import (builtins_fetchTarball { inherit (mkNixpkgs sources) url sha256; }) {};
mkNixpkgs = sources:
if builtins.hasAttr "nixpkgs" sources
then sources.nixpkgs
else abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
hasNixpkgsPath = (builtins.tryEval <nixpkgs>).success;
hasThisAsNixpkgsPath =
(builtins.tryEval <nixpkgs>).success && <nixpkgs> == ./.;
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs spec
else if spec.type == "tarball" then fetch_tarball pkgs spec
else if spec.type == "git" then fetch_git spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec
else if spec.type == "builtin-url" then fetch_builtin-url spec
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball { inherit url; }
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl { inherit url; }
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = fetch config.pkgs name spec; }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? ./sources.json
}: rec {
# The sources, i.e. the attribute set of spec name to spec
sources = builtins.fromJSON (builtins.readFile sourcesFile);
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
pkgs = mkPkgs sources;
};
in
mkSources (mkConfig {}) //
{ __functor = _: settings: mkSources (mkConfig settings); }

View File

@ -1,40 +1,49 @@
{ mkDerivation, aeson, base, bytestring, conduit, containers { mkDerivation, aeson, base, bytestring, conduit, containers
, cryptohash-sha256, directory, doctest, errors, filepath, github , cryptohash-sha256, directory, doctest, errors, filepath, github
, hex, hpack, http-client-tls, http-conduit, iso8601-time , hpack, hspec, hspec-discover, http-client-tls, http-conduit
, lifted-base, mtl, neat-interpolation, optparse-applicative , iso8601-time, lifted-base, mtl, neat-interpolation
, parsec, parsers, partial-order, polysemy, polysemy-plugin , optparse-applicative, parsec, parsers, partial-order, polysemy
, regex-applicative-text, servant, servant-client, sqlite-simple , raw-strings-qq, regex-applicative-text, servant, servant-client
, stdenv, template-haskell, temporary, text, time, transformers , sqlite-simple, stdenv, template-haskell, temporary, text, time
, typed-process, unix, unordered-containers, vector, versions , transformers, typed-process, unix, unordered-containers, vector
, xdg-basedir, zlib , versions, xdg-basedir, zlib
}: }:
mkDerivation { mkDerivation {
pname = "nixpkgs-update"; pname = "nixpkgs-update";
version = "0.2.0"; version = "0.2.0";
src = ./.; src = ./.;
isLibrary = false; isLibrary = true;
isExecutable = true; isExecutable = true;
libraryHaskellDepends = [
aeson base bytestring conduit containers cryptohash-sha256
directory errors filepath github http-client-tls http-conduit
iso8601-time lifted-base mtl neat-interpolation
optparse-applicative parsec parsers partial-order polysemy
regex-applicative-text servant servant-client sqlite-simple
template-haskell temporary text time transformers typed-process
unix unordered-containers vector versions xdg-basedir zlib
];
libraryToolDepends = [ hpack ]; libraryToolDepends = [ hpack ];
executableHaskellDepends = [ executableHaskellDepends = [
aeson base bytestring conduit containers cryptohash-sha256 aeson base bytestring conduit containers cryptohash-sha256
directory errors filepath github hex http-client-tls http-conduit directory errors filepath github http-client-tls http-conduit
iso8601-time lifted-base mtl neat-interpolation iso8601-time lifted-base mtl neat-interpolation
optparse-applicative parsec parsers partial-order polysemy optparse-applicative parsec parsers partial-order polysemy
polysemy-plugin regex-applicative-text servant servant-client regex-applicative-text servant servant-client sqlite-simple
sqlite-simple template-haskell temporary text time transformers template-haskell temporary text time transformers typed-process
typed-process unix unordered-containers vector versions xdg-basedir unix unordered-containers vector versions xdg-basedir zlib
zlib
]; ];
testHaskellDepends = [ testHaskellDepends = [
aeson base bytestring conduit containers cryptohash-sha256 aeson base bytestring conduit containers cryptohash-sha256
directory doctest errors filepath github hex http-client-tls directory doctest errors filepath github hspec hspec-discover
http-conduit iso8601-time lifted-base mtl neat-interpolation http-client-tls http-conduit iso8601-time lifted-base mtl
optparse-applicative parsec parsers partial-order polysemy neat-interpolation optparse-applicative parsec parsers
polysemy-plugin regex-applicative-text servant servant-client partial-order polysemy raw-strings-qq regex-applicative-text
sqlite-simple template-haskell temporary text time transformers servant servant-client sqlite-simple template-haskell temporary
typed-process unix unordered-containers vector versions xdg-basedir text time transformers typed-process unix unordered-containers
zlib vector versions xdg-basedir zlib
]; ];
testToolDepends = [ hspec-discover ];
prePatch = "hpack"; prePatch = "hpack";
homepage = "https://github.com/ryantm/nixpkgs-update#readme"; homepage = "https://github.com/ryantm/nixpkgs-update#readme";
description = "Tool for semi-automatic updating of nixpkgs repository"; description = "Tool for semi-automatic updating of nixpkgs repository";

View File

@ -16,7 +16,8 @@ extra-source-files:
github: ryantm/nixpkgs-update github: ryantm/nixpkgs-update
ghc-options: -Wall -O2 -flate-specialise -fspecialise-aggressively -fplugin=Polysemy.Plugin ghc-options: -Wall -O2 -flate-specialise -fspecialise-aggressively
default-extensions: default-extensions:
- DataKinds - DataKinds
@ -29,31 +30,30 @@ default-extensions:
- TypeApplications - TypeApplications
- TypeFamilies - TypeFamilies
- TypeOperators - TypeOperators
- BlockArguments
dependencies: dependencies:
- aeson - aeson
- base >= 4.7 && < 5 - base >= 4.13 && < 5
- bytestring - bytestring
- conduit - conduit
- containers - containers
- cryptohash-sha256 - cryptohash-sha256
- directory >= 1.3 && < 1.4 - directory
- errors - errors
- filepath - filepath
- github - github
- hex
- http-client-tls - http-client-tls
- http-conduit - http-conduit
- iso8601-time - iso8601-time
- lifted-base - lifted-base
- mtl - mtl
- neat-interpolation >= 0.3 && < 0.4 - neat-interpolation
- optparse-applicative - optparse-applicative
- parsec - parsec
- parsers - parsers
- partial-order - partial-order
- polysemy - polysemy
- polysemy-plugin
- regex-applicative-text - regex-applicative-text
- servant - servant
- servant-client - servant-client
@ -61,7 +61,7 @@ dependencies:
- template-haskell - template-haskell
- temporary - temporary
- text - text
- time >= 1.8 && < 1.10 - time
- transformers - transformers
- typed-process - typed-process
- unix - unix
@ -71,15 +71,24 @@ dependencies:
- xdg-basedir - xdg-basedir
- zlib - zlib
executables: library:
nixpkgs-update: source-dirs: src
source-dirs: src
main: Main.hs
tests: tests:
doctests: spec:
main: doctests.hs main: Spec.hs
ghc-options: -threaded source-dirs:
source-dirs: test - test
dependencies: dependencies:
- hspec
- hspec-discover
- doctest - doctest
- raw-strings-qq
- nixpkgs-update
executables:
nixpkgs-update:
source-dirs: app
main: Main.hs
dependencies:
- nixpkgs-update

View File

@ -25,7 +25,6 @@ import Data.Aeson
withObject, withObject,
) )
import Data.Aeson.Types (Parser, prependFailure) import Data.Aeson.Types (Parser, prependFailure)
import Data.Bifunctor (bimap)
import qualified Data.ByteString.Lazy.Char8 as BSL import qualified Data.ByteString.Lazy.Char8 as BSL
import Data.List (intercalate) import Data.List (intercalate)
import qualified Data.Text as T import qualified Data.Text as T

View File

@ -15,7 +15,6 @@ import OurPrelude
import System.Directory (doesDirectoryExist, doesFileExist, listDirectory) import System.Directory (doesDirectoryExist, doesFileExist, listDirectory)
import System.Exit import System.Exit
import System.IO.Temp (withSystemTempDirectory) import System.IO.Temp (withSystemTempDirectory)
import System.Process.Typed (setEnv, setStdin, setWorkingDir)
import qualified Text.Regex.Applicative.Text as RE import qualified Text.Regex.Applicative.Text as RE
import Text.Regex.Applicative.Text ((=~), RE') import Text.Regex.Applicative.Text ((=~), RE')
import Utils (UpdateEnv (..), Version, nixBuildOptions) import Utils (UpdateEnv (..), Version, nixBuildOptions)

73
src/Data/Hex.hs Normal file
View File

@ -0,0 +1,73 @@
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Hex
-- Copyright : (c) Taru Karttunen 2009
-- License : BSD-style
-- Maintainer : taruti@taruti.net
-- Stability : provisional
-- Portability : portable
--
-- Convert strings into hexadecimal and back.
--
-----------------------------------------------------------------------------
module Data.Hex(Hex(..)) where
import Control.Monad
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as L
-- | Convert strings into hexadecimal and back.
class Hex t where
-- | Convert string into hexadecimal.
hex :: t -> t
-- | Convert from hexadecimal and fail on invalid input.
unhex :: MonadFail m => t -> m t
instance Hex String where
hex = Prelude.concatMap w
where w ch = let s = "0123456789ABCDEF"
x = fromEnum ch
in [s !! div x 16,s !! mod x 16]
unhex [] = return []
unhex (a:b:r) = do x <- c a
y <- c b
liftM (toEnum ((x * 16) + y) :) $ unhex r
unhex [_] = fail "Non-even length"
c :: MonadFail m => Char -> m Int
c '0' = return 0
c '1' = return 1
c '2' = return 2
c '3' = return 3
c '4' = return 4
c '5' = return 5
c '6' = return 6
c '7' = return 7
c '8' = return 8
c '9' = return 9
c 'A' = return 10
c 'B' = return 11
c 'C' = return 12
c 'D' = return 13
c 'E' = return 14
c 'F' = return 15
c 'a' = return 10
c 'b' = return 11
c 'c' = return 12
c 'd' = return 13
c 'e' = return 14
c 'f' = return 15
c _ = fail "Invalid hex digit!"
instance Hex B.ByteString where
hex = B.pack . hex . B.unpack
unhex x = liftM B.pack $ unhex $ B.unpack x
instance Hex L.ByteString where
hex = L.pack . hex . L.unpack
unhex x = liftM L.pack $ unhex $ L.unpack x

View File

@ -16,7 +16,7 @@ deleteDone githubToken = do
runExceptT $ do runExceptT $ do
Git.fetch Git.fetch
Git.cleanAndResetTo "master" Git.cleanAndResetTo "master"
refs <- ExceptT $ GH.closedAutoUpdateRefs githubToken refs <- ExceptT $ GH.closedAutoUpdateRefs (GH.authFromToken githubToken)
let branches = fmap (\r -> ("auto-update/" <> r)) refs let branches = fmap (\r -> ("auto-update/" <> r)) refs
liftIO $ Git.deleteBranchesEverywhere branches liftIO $ Git.deleteBranchesEverywhere branches
case result of case result of

View File

@ -13,6 +13,7 @@ module GH
openAutoUpdatePR, openAutoUpdatePR,
checkExistingUpdatePR, checkExistingUpdatePR,
latestVersion, latestVersion,
authFromToken,
) )
where where
@ -21,10 +22,7 @@ import qualified Data.Text as T
import qualified Data.Text.Encoding as T import qualified Data.Text.Encoding as T
import qualified Data.Vector as V import qualified Data.Vector as V
import GitHub import GitHub
import GitHub.Data.Name (Name (..), untagName) import GitHub.Data.Name (Name (..))
import GitHub.Endpoints.GitData.References (references')
import GitHub.Endpoints.Repos.Releases (latestRelease', releaseByTagName)
import GitHub.Endpoints.Search (searchIssues')
import OurPrelude import OurPrelude
import qualified Text.Regex.Applicative.Text as RE import qualified Text.Regex.Applicative.Text as RE
import Text.Regex.Applicative.Text ((=~)) import Text.Regex.Applicative.Text ((=~))
@ -33,16 +31,16 @@ import qualified Utils as U
default (T.Text) default (T.Text)
gReleaseUrl :: MonadIO m => URLParts -> ExceptT Text m Text gReleaseUrl :: MonadIO m => Auth -> URLParts -> ExceptT Text m Text
gReleaseUrl (URLParts o r t) = gReleaseUrl auth (URLParts o r t) =
ExceptT $ ExceptT $
bimap (T.pack . show) (getUrl . releaseHtmlUrl) bimap (T.pack . show) (getUrl . releaseHtmlUrl)
<$> liftIO (releaseByTagName o r t) <$> liftIO (github auth (releaseByTagNameR o r t))
releaseUrl :: MonadIO m => Text -> ExceptT Text m Text releaseUrl :: MonadIO m => UpdateEnv -> Text -> ExceptT Text m Text
releaseUrl url = do releaseUrl env url = do
urlParts <- parseURL url urlParts <- parseURL url
gReleaseUrl urlParts gReleaseUrl (authFrom env) urlParts
pr :: MonadIO m => Text -> Text -> m () pr :: MonadIO m => Text -> Text -> m ()
pr base msg = pr base msg =
@ -111,9 +109,10 @@ compareUrl urlOld urlNew = do
--deleteDoneBranches :: IO () --deleteDoneBranches :: IO ()
--deleteDoneBranches = do --deleteDoneBranches = do
autoUpdateRefs :: Text -> IO (Either Text (Vector Text)) -- (OAuth (T.encodeUtf8 githubToken))
autoUpdateRefs githubToken = autoUpdateRefs :: Auth -> IO (Either Text (Vector Text))
references' (Just (OAuth (T.encodeUtf8 githubToken))) "r-ryantm" "nixpkgs" autoUpdateRefs auth =
github auth (referencesR "r-ryantm" "nixpkgs" FetchAll)
& fmap & fmap
( first (T.pack . show) ( first (T.pack . show)
>>> second (fmap gitReferenceRef >>> V.mapMaybe (T.stripPrefix prefix)) >>> second (fmap gitReferenceRef >>> V.mapMaybe (T.stripPrefix prefix))
@ -121,10 +120,10 @@ autoUpdateRefs githubToken =
where where
prefix = "refs/heads/auto-update/" prefix = "refs/heads/auto-update/"
openPRWithAutoUpdateRefFromRRyanTM :: Text -> Text -> IO (Either Text Bool) openPRWithAutoUpdateRefFromRRyanTM :: Auth -> Text -> IO (Either Text Bool)
openPRWithAutoUpdateRefFromRRyanTM githubToken ref = openPRWithAutoUpdateRefFromRRyanTM auth ref =
executeRequest executeRequest
(OAuth (T.encodeUtf8 githubToken)) auth
( pullRequestsForR ( pullRequestsForR
"nixos" "nixos"
"nixpkgs" "nixpkgs"
@ -133,16 +132,16 @@ openPRWithAutoUpdateRefFromRRyanTM githubToken ref =
) )
& fmap (first (T.pack . show) >>> second (not . V.null)) & fmap (first (T.pack . show) >>> second (not . V.null))
refShouldBeDeleted :: Text -> Text -> IO Bool refShouldBeDeleted :: Auth -> Text -> IO Bool
refShouldBeDeleted githubToken ref = refShouldBeDeleted auth ref =
not . either (const True) id not . either (const True) id
<$> openPRWithAutoUpdateRefFromRRyanTM githubToken ref <$> openPRWithAutoUpdateRefFromRRyanTM auth ref
closedAutoUpdateRefs :: Text -> IO (Either Text (Vector Text)) closedAutoUpdateRefs :: Auth -> IO (Either Text (Vector Text))
closedAutoUpdateRefs githubToken = closedAutoUpdateRefs auth =
runExceptT $ do runExceptT $ do
aur :: Vector Text <- ExceptT $ autoUpdateRefs githubToken aur :: Vector Text <- ExceptT $ autoUpdateRefs auth
ExceptT (Right <$> V.filterM (refShouldBeDeleted githubToken) aur) ExceptT (Right <$> V.filterM (refShouldBeDeleted auth) aur)
-- This is too slow -- This is too slow
openPullRequests :: Text -> IO (Either Text (Vector SimplePullRequest)) openPullRequests :: Text -> IO (Either Text (Vector SimplePullRequest))
@ -161,14 +160,18 @@ openAutoUpdatePR updateEnv oprs = oprs & (V.find isThisPkg >>> isJust)
titleHasNewVersion = newVersion updateEnv `T.isSuffixOf` title titleHasNewVersion = newVersion updateEnv `T.isSuffixOf` title
in titleHasName && titleHasNewVersion in titleHasName && titleHasNewVersion
authFromToken :: Text -> Auth
authFromToken = OAuth . T.encodeUtf8
authFrom :: UpdateEnv -> Auth
authFrom = authFromToken . U.githubToken . options
checkExistingUpdatePR :: MonadIO m => UpdateEnv -> Text -> ExceptT Text m () checkExistingUpdatePR :: MonadIO m => UpdateEnv -> Text -> ExceptT Text m ()
checkExistingUpdatePR ue attrPath = do checkExistingUpdatePR env attrPath = do
searchResult <- searchResult <-
ExceptT ExceptT
$ liftIO $ liftIO
$ searchIssues' $ github (authFrom env) (searchIssuesR search)
(Just (OAuth (T.encodeUtf8 (U.githubToken (options ue)))))
search
& fmap (first (T.pack . show)) & fmap (first (T.pack . show))
if T.length (openPRReport searchResult) == 0 if T.length (openPRReport searchResult) == 0
then return () then return ()
@ -178,7 +181,7 @@ checkExistingUpdatePR ue attrPath = do
<> openPRReport searchResult <> openPRReport searchResult
) )
where where
title = U.prTitle ue attrPath title = U.prTitle env attrPath
search = [interpolate|repo:nixos/nixpkgs $title |] search = [interpolate|repo:nixos/nixpkgs $title |]
openPRReport searchResult = openPRReport searchResult =
searchResultResults searchResult & V.filter (issueClosedAt >>> isNothing) searchResultResults searchResult & V.filter (issueClosedAt >>> isNothing)
@ -188,14 +191,11 @@ checkExistingUpdatePR ue attrPath = do
report i = "- " <> issueTitle i <> "\n " <> tshow (issueUrl i) report i = "- " <> issueTitle i <> "\n " <> tshow (issueUrl i)
latestVersion :: MonadIO m => UpdateEnv -> Text -> ExceptT Text m Version latestVersion :: MonadIO m => UpdateEnv -> Text -> ExceptT Text m Version
latestVersion ue url = do latestVersion env url = do
urlParts <- parseURL url urlParts <- parseURL url
r <- r <-
ExceptT fmapLT tshow $ ExceptT
$ liftIO $ liftIO
$ latestRelease' $ executeRequest (authFrom env)
(Just (OAuth (T.encodeUtf8 (U.githubToken (options ue))))) $ latestReleaseR (owner urlParts) (repo urlParts)
(owner urlParts)
(repo urlParts)
& fmap (first (T.pack . show))
return $ T.dropWhile (\c -> c == 'v' || c == 'V') (releaseTagName r) return $ T.dropWhile (\c -> c == 'v' || c == 'V') (releaseTagName r)

View File

@ -24,7 +24,7 @@ import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T import qualified Data.Text.IO as T
import Data.Time.Clock (addUTCTime, getCurrentTime) import Data.Time.Clock (addUTCTime, getCurrentTime)
import qualified Data.Vector as V import qualified Data.Vector as V
import OurPrelude import OurPrelude hiding (throw)
import System.Directory (getModificationTime) import System.Directory (getModificationTime)
import System.Environment.XDG.BaseDir (getUserCacheDir) import System.Environment.XDG.BaseDir (getUserCacheDir)
import System.Exit import System.Exit

View File

@ -22,9 +22,8 @@ import CVE
parseFeed, parseFeed,
) )
import Codec.Compression.GZip (decompress) import Codec.Compression.GZip (decompress)
import Control.Exception (SomeException, ioError, try) import Control.Exception (SomeException, try)
import Crypto.Hash.SHA256 (hashlazy) import Crypto.Hash.SHA256 (hashlazy)
import Data.Bifunctor (second)
import qualified Data.ByteString.Lazy.Char8 as BSL import qualified Data.ByteString.Lazy.Char8 as BSL
import Data.Hex (hex, unhex) import Data.Hex (hex, unhex)
import Data.List (group) import Data.List (group)
@ -59,7 +58,6 @@ import System.Directory
removeFile, removeFile,
) )
import System.FilePath ((</>)) import System.FilePath ((</>))
import System.IO.Error (userError)
import Utils (ProductID, Version) import Utils (ProductID, Version)
import Version (matchVersion) import Version (matchVersion)

View File

@ -38,7 +38,6 @@ import Data.Bifunctor
import qualified Data.ByteString.Lazy as BSL import qualified Data.ByteString.Lazy as BSL
import Data.Function ((&)) import Data.Function ((&))
import Data.Functor ((<&>)) import Data.Functor ((<&>))
import Data.Semigroup ((<>))
import Data.Set (Set) import Data.Set (Set)
import Data.Text (Text, pack) import Data.Text (Text, pack)
import qualified Data.Text.Encoding as T import qualified Data.Text.Encoding as T

View File

@ -1,6 +1,3 @@
#!/usr/bin/env nix-shell
#!nix-shell -p nix -i "nix-env -qaP --no-name --out-path --arg checkMeta true --argstr path $$PWD -f"
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
@ -27,9 +24,6 @@ import Text.Parser.Combinators
outPathsExpr :: Text outPathsExpr :: Text
outPathsExpr = outPathsExpr =
[interpolate| [interpolate|
# When using as a callable script, passing `--argstr path some/path` overrides $$PWD.
{ checkMeta { checkMeta
, path ? ./. , path ? ./.
}: }:

View File

@ -4,15 +4,12 @@
module Repology where module Repology where
import Control.Category ((>>>))
import Data.Aeson import Data.Aeson
import Data.HashMap.Strict import Data.HashMap.Strict
import Data.List import Data.List
import Data.Proxy import Data.Proxy
import Data.Text (Text)
import qualified Data.Text as T import qualified Data.Text as T
import qualified Data.Text.IO import qualified Data.Text.IO
import Data.Vector (Vector)
import qualified Data.Vector as V import qualified Data.Vector as V
import GHC.Generics import GHC.Generics
import Network.HTTP.Client.TLS (newTlsManager) import Network.HTTP.Client.TLS (newTlsManager)

View File

@ -276,7 +276,7 @@ publishPackage log updateEnv oldSrcUrl newSrcUrl attrPath result opDiff msgs = d
let rewriteMessages = foldl (\ms m -> ms <> T.pack "\n- " <> m) "\nUpdates performed:" msgs let rewriteMessages = foldl (\ms m -> ms <> T.pack "\n- " <> m) "\nUpdates performed:" msgs
releaseUrlMessage <- releaseUrlMessage <-
( do ( do
msg <- GH.releaseUrl newSrcUrl msg <- GH.releaseUrl updateEnv newSrcUrl
return ("\n[Release on GitHub](" <> msg <> ")\n\n") return ("\n[Release on GitHub](" <> msg <> ")\n\n")
) )
<|> return "" <|> return ""

38
test/DoctestSpec.hs Normal file
View File

@ -0,0 +1,38 @@
module DoctestSpec where
import Test.DocTest
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Doctests" do
it "should all pass" do
doctest
[ "-isrc",
"-XOverloadedStrings",
"-XDataKinds",
"-XFlexibleContexts",
"-XGADTs",
"-XLambdaCase",
"-XPolyKinds",
"-XRankNTypes",
"-XScopedTypeVariables",
"-XTypeApplications",
"-XTypeFamilies",
"-XTypeOperators",
"-XBlockArguments",
"-flate-specialise",
"-fspecialise-aggressively",
-- "-fplugin=Polysemy.Plugin",
-- src/Process.hs:1:1: error:
-- Can't find interface-file declaration for type constructor or class Polysemy.Internal.Union.LocateEffect
-- Probable cause: bug in .hi-boot file, or inconsistent .hi file
-- Use -ddump-if-trace to get an idea of which file caused the error
"src/Version.hs",
"src/GH.hs",
"src/Time.hs"
]

22
test/RewriteSpec.hs Normal file
View File

@ -0,0 +1,22 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module RewriteSpec where
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified File
import OurPrelude
import qualified Rewrite
import Test.Hspec
import Text.RawString.QQ (r)
import qualified Utils
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Hello world" do
it "is alive" do
2 + 2 `shouldBe` 4

1
test/Spec.hs Normal file
View File

@ -0,0 +1 @@
{-# OPTIONS_GHC -F -pgmF hspec-discover #-}

View File

@ -1,21 +0,0 @@
import Test.DocTest
main :: IO ()
main =
doctest
[ "-isrc"
, "-XOverloadedStrings"
, "-XDataKinds"
, "-XFlexibleContexts"
, "-XGADTs"
, "-XLambdaCase"
, "-XPolyKinds"
, "-XRankNTypes"
, "-XScopedTypeVariables"
, "-XTypeApplications"
, "-XTypeFamilies"
, "-XTypeOperators"
, "src/Version.hs"
, "src/GH.hs"
, "src/Time.hs"
]