Infrastructure upgrades to bump dependencies and cleanup build system

No functional change; this pulls out all of the changes except the `polysemy`
refactoring from here:
https://github.com/ryantm/nixpkgs-update/pull/172

This will allow us to do development with the newer GHC and cleaned up
dependency stack, start using the `hspec` test suite, vet the `github` library
updates, etc., while we work on porting more of the effects to polysemy and
evaluating how it looks.
This commit is contained in:
Ryan Mulligan 2020-03-04 22:31:59 -08:00 committed by Benjamin Hipple
parent 32c439cf9a
commit 523681c6d8
21 changed files with 393 additions and 131 deletions

View File

@ -1,33 +1,16 @@
{ nixpkgs-tarball ? builtins.fetchTarball {
name = "nixpkgs-unstable";
url =
"https://releases.nixos.org/nixos/unstable/nixos-20.03pre193829.f0fec244ca3/nixexprs.tar.xz";
sha256 = "03iqwyz5lxaq4k2hw4wfd55gizdf1230jcsqia0zmp3whpyj5y1x";
}, pkgs ? import nixpkgs-tarball { config = { allowBroken = true; }; }
, returnShellEnv ? pkgs.lib.inNixShell }:
{ pkgs ? import (import ./nix/sources.nix).nixpkgs {config = { allowBroken = true; };},
returnShellEnv ? pkgs.lib.inNixShell
}:
let
compiler = pkgs.haskell.packages."ghc865";
compiler = pkgs.haskell.packages.ghc882;
inherit (pkgs.haskell.lib) dontCheck doJailbreak overrideCabal;
pkg = compiler.developPackage {
root = ./.;
overrides = self: super: {
aeson = dontCheck super.aeson;
polysemy = dontCheck super.polysemy_1_2_1_0;
polysemy-plugin = dontCheck super.polysemy-plugin;
time-compat = dontCheck super.time-compat;
binary-orphans = dontCheck super.binary-orphans;
binary-instances = dontCheck super.binary-instances;
hpack = dontCheck super.hpack;
HUnit = dontCheck super.HUnit;
hspec-core = dontCheck super.hspec-core;
th-abstraction = dontCheck super.th-abstraction;
inspection-testing = dontCheck super.inspection-testing;
partial-order = doJailbreak super.partial-order;
};
overrides = self: super: { };
source-overrides = { };
inherit returnShellEnv;
};

26
nix/sources.json Normal file
View File

@ -0,0 +1,26 @@
{
"niv": {
"branch": "master",
"description": "Easy dependency management for Nix projects",
"homepage": "https://github.com/nmattia/niv",
"owner": "nmattia",
"repo": "niv",
"rev": "98c74a80934123cb4c3bf3314567f67311eb711a",
"sha256": "1w8n54hapd4x9f1am33icvngkqns7m3hl9yair38yqq08ffwg0kn",
"type": "tarball",
"url": "https://github.com/nmattia/niv/archive/98c74a80934123cb4c3bf3314567f67311eb711a.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
},
"nixpkgs": {
"branch": "master",
"description": "Nix Packages collection",
"homepage": null,
"owner": "nixos",
"repo": "nixpkgs",
"rev": "0bfd0187dafe3f597355e6be16b7b9a5f4b90376",
"sha256": "1ydpmvfshkaxr005imhkf8h5ihsb2l97ycyl6fmyanqjdw149wgl",
"type": "tarball",
"url": "https://github.com/nixos/nixpkgs/archive/0bfd0187dafe3f597355e6be16b7b9a5f4b90376.tar.gz",
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
}
}

136
nix/sources.nix Normal file
View File

@ -0,0 +1,136 @@
# This file has been generated by Niv.
let
#
# The fetchers. fetch_<type> fetches specs of type <type>.
#
fetch_file = pkgs: spec:
if spec.builtin or true then
builtins_fetchurl { inherit (spec) url sha256; }
else
pkgs.fetchurl { inherit (spec) url sha256; };
fetch_tarball = pkgs: spec:
if spec.builtin or true then
builtins_fetchTarball { inherit (spec) url sha256; }
else
pkgs.fetchzip { inherit (spec) url sha256; };
fetch_git = spec:
builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; };
fetch_builtin-tarball = spec:
builtins.trace
''
WARNING:
The niv type "builtin-tarball" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=tarball -a builtin=true
''
builtins_fetchTarball { inherit (spec) url sha256; };
fetch_builtin-url = spec:
builtins.trace
''
WARNING:
The niv type "builtin-url" will soon be deprecated. You should
instead use `builtin = true`.
$ niv modify <package> -a type=file -a builtin=true
''
(builtins_fetchurl { inherit (spec) url sha256; });
#
# Various helpers
#
# The set of packages used when specs are fetched using non-builtins.
mkPkgs = sources:
if hasNixpkgsPath
then
if hasThisAsNixpkgsPath
then import (builtins_fetchTarball { inherit (mkNixpkgs sources) url sha256; }) {}
else import <nixpkgs> {}
else
import (builtins_fetchTarball { inherit (mkNixpkgs sources) url sha256; }) {};
mkNixpkgs = sources:
if builtins.hasAttr "nixpkgs" sources
then sources.nixpkgs
else abort
''
Please specify either <nixpkgs> (through -I or NIX_PATH=nixpkgs=...) or
add a package called "nixpkgs" to your sources.json.
'';
hasNixpkgsPath = (builtins.tryEval <nixpkgs>).success;
hasThisAsNixpkgsPath =
(builtins.tryEval <nixpkgs>).success && <nixpkgs> == ./.;
# The actual fetching function.
fetch = pkgs: name: spec:
if ! builtins.hasAttr "type" spec then
abort "ERROR: niv spec ${name} does not have a 'type' attribute"
else if spec.type == "file" then fetch_file pkgs spec
else if spec.type == "tarball" then fetch_tarball pkgs spec
else if spec.type == "git" then fetch_git spec
else if spec.type == "builtin-tarball" then fetch_builtin-tarball spec
else if spec.type == "builtin-url" then fetch_builtin-url spec
else
abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}";
# Ports of functions for older nix versions
# a Nix version of mapAttrs if the built-in doesn't exist
mapAttrs = builtins.mapAttrs or (
f: set: with builtins;
listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set))
);
# fetchTarball version that is compatible between all the versions of Nix
builtins_fetchTarball = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchTarball;
in
if lessThan nixVersion "1.12" then
fetchTarball { inherit url; }
else
fetchTarball attrs;
# fetchurl version that is compatible between all the versions of Nix
builtins_fetchurl = { url, sha256 }@attrs:
let
inherit (builtins) lessThan nixVersion fetchurl;
in
if lessThan nixVersion "1.12" then
fetchurl { inherit url; }
else
fetchurl attrs;
# Create the final "sources" from the config
mkSources = config:
mapAttrs (
name: spec:
if builtins.hasAttr "outPath" spec
then abort
"The values in sources.json should not have an 'outPath' attribute"
else
spec // { outPath = fetch config.pkgs name spec; }
) config.sources;
# The "config" used by the fetchers
mkConfig =
{ sourcesFile ? ./sources.json
}: rec {
# The sources, i.e. the attribute set of spec name to spec
sources = builtins.fromJSON (builtins.readFile sourcesFile);
# The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers
pkgs = mkPkgs sources;
};
in
mkSources (mkConfig {}) //
{ __functor = _: settings: mkSources (mkConfig settings); }

View File

@ -1,40 +1,49 @@
{ mkDerivation, aeson, base, bytestring, conduit, containers
, cryptohash-sha256, directory, doctest, errors, filepath, github
, hex, hpack, http-client-tls, http-conduit, iso8601-time
, lifted-base, mtl, neat-interpolation, optparse-applicative
, parsec, parsers, partial-order, polysemy, polysemy-plugin
, regex-applicative-text, servant, servant-client, sqlite-simple
, stdenv, template-haskell, temporary, text, time, transformers
, typed-process, unix, unordered-containers, vector, versions
, xdg-basedir, zlib
, hpack, hspec, hspec-discover, http-client-tls, http-conduit
, iso8601-time, lifted-base, mtl, neat-interpolation
, optparse-applicative, parsec, parsers, partial-order, polysemy
, raw-strings-qq, regex-applicative-text, servant, servant-client
, sqlite-simple, stdenv, template-haskell, temporary, text, time
, transformers, typed-process, unix, unordered-containers, vector
, versions, xdg-basedir, zlib
}:
mkDerivation {
pname = "nixpkgs-update";
version = "0.2.0";
src = ./.;
isLibrary = false;
isLibrary = true;
isExecutable = true;
libraryHaskellDepends = [
aeson base bytestring conduit containers cryptohash-sha256
directory errors filepath github http-client-tls http-conduit
iso8601-time lifted-base mtl neat-interpolation
optparse-applicative parsec parsers partial-order polysemy
regex-applicative-text servant servant-client sqlite-simple
template-haskell temporary text time transformers typed-process
unix unordered-containers vector versions xdg-basedir zlib
];
libraryToolDepends = [ hpack ];
executableHaskellDepends = [
aeson base bytestring conduit containers cryptohash-sha256
directory errors filepath github hex http-client-tls http-conduit
directory errors filepath github http-client-tls http-conduit
iso8601-time lifted-base mtl neat-interpolation
optparse-applicative parsec parsers partial-order polysemy
polysemy-plugin regex-applicative-text servant servant-client
sqlite-simple template-haskell temporary text time transformers
typed-process unix unordered-containers vector versions xdg-basedir
zlib
regex-applicative-text servant servant-client sqlite-simple
template-haskell temporary text time transformers typed-process
unix unordered-containers vector versions xdg-basedir zlib
];
testHaskellDepends = [
aeson base bytestring conduit containers cryptohash-sha256
directory doctest errors filepath github hex http-client-tls
http-conduit iso8601-time lifted-base mtl neat-interpolation
optparse-applicative parsec parsers partial-order polysemy
polysemy-plugin regex-applicative-text servant servant-client
sqlite-simple template-haskell temporary text time transformers
typed-process unix unordered-containers vector versions xdg-basedir
zlib
directory doctest errors filepath github hspec hspec-discover
http-client-tls http-conduit iso8601-time lifted-base mtl
neat-interpolation optparse-applicative parsec parsers
partial-order polysemy raw-strings-qq regex-applicative-text
servant servant-client sqlite-simple template-haskell temporary
text time transformers typed-process unix unordered-containers
vector versions xdg-basedir zlib
];
testToolDepends = [ hspec-discover ];
prePatch = "hpack";
homepage = "https://github.com/ryantm/nixpkgs-update#readme";
description = "Tool for semi-automatic updating of nixpkgs repository";

View File

@ -16,7 +16,8 @@ extra-source-files:
github: ryantm/nixpkgs-update
ghc-options: -Wall -O2 -flate-specialise -fspecialise-aggressively -fplugin=Polysemy.Plugin
ghc-options: -Wall -O2 -flate-specialise -fspecialise-aggressively
default-extensions:
- DataKinds
@ -29,31 +30,30 @@ default-extensions:
- TypeApplications
- TypeFamilies
- TypeOperators
- BlockArguments
dependencies:
- aeson
- base >= 4.7 && < 5
- base >= 4.13 && < 5
- bytestring
- conduit
- containers
- cryptohash-sha256
- directory >= 1.3 && < 1.4
- directory
- errors
- filepath
- github
- hex
- http-client-tls
- http-conduit
- iso8601-time
- lifted-base
- mtl
- neat-interpolation >= 0.3 && < 0.4
- neat-interpolation
- optparse-applicative
- parsec
- parsers
- partial-order
- polysemy
- polysemy-plugin
- regex-applicative-text
- servant
- servant-client
@ -61,7 +61,7 @@ dependencies:
- template-haskell
- temporary
- text
- time >= 1.8 && < 1.10
- time
- transformers
- typed-process
- unix
@ -71,15 +71,24 @@ dependencies:
- xdg-basedir
- zlib
executables:
nixpkgs-update:
source-dirs: src
main: Main.hs
library:
source-dirs: src
tests:
doctests:
main: doctests.hs
ghc-options: -threaded
source-dirs: test
spec:
main: Spec.hs
source-dirs:
- test
dependencies:
- hspec
- hspec-discover
- doctest
- raw-strings-qq
- nixpkgs-update
executables:
nixpkgs-update:
source-dirs: app
main: Main.hs
dependencies:
- nixpkgs-update

View File

@ -25,7 +25,6 @@ import Data.Aeson
withObject,
)
import Data.Aeson.Types (Parser, prependFailure)
import Data.Bifunctor (bimap)
import qualified Data.ByteString.Lazy.Char8 as BSL
import Data.List (intercalate)
import qualified Data.Text as T

View File

@ -15,7 +15,6 @@ import OurPrelude
import System.Directory (doesDirectoryExist, doesFileExist, listDirectory)
import System.Exit
import System.IO.Temp (withSystemTempDirectory)
import System.Process.Typed (setEnv, setStdin, setWorkingDir)
import qualified Text.Regex.Applicative.Text as RE
import Text.Regex.Applicative.Text ((=~), RE')
import Utils (UpdateEnv (..), Version, nixBuildOptions)

73
src/Data/Hex.hs Normal file
View File

@ -0,0 +1,73 @@
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Hex
-- Copyright : (c) Taru Karttunen 2009
-- License : BSD-style
-- Maintainer : taruti@taruti.net
-- Stability : provisional
-- Portability : portable
--
-- Convert strings into hexadecimal and back.
--
-----------------------------------------------------------------------------
module Data.Hex(Hex(..)) where
import Control.Monad
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as L
-- | Convert strings into hexadecimal and back.
class Hex t where
-- | Convert string into hexadecimal.
hex :: t -> t
-- | Convert from hexadecimal and fail on invalid input.
unhex :: MonadFail m => t -> m t
instance Hex String where
hex = Prelude.concatMap w
where w ch = let s = "0123456789ABCDEF"
x = fromEnum ch
in [s !! div x 16,s !! mod x 16]
unhex [] = return []
unhex (a:b:r) = do x <- c a
y <- c b
liftM (toEnum ((x * 16) + y) :) $ unhex r
unhex [_] = fail "Non-even length"
c :: MonadFail m => Char -> m Int
c '0' = return 0
c '1' = return 1
c '2' = return 2
c '3' = return 3
c '4' = return 4
c '5' = return 5
c '6' = return 6
c '7' = return 7
c '8' = return 8
c '9' = return 9
c 'A' = return 10
c 'B' = return 11
c 'C' = return 12
c 'D' = return 13
c 'E' = return 14
c 'F' = return 15
c 'a' = return 10
c 'b' = return 11
c 'c' = return 12
c 'd' = return 13
c 'e' = return 14
c 'f' = return 15
c _ = fail "Invalid hex digit!"
instance Hex B.ByteString where
hex = B.pack . hex . B.unpack
unhex x = liftM B.pack $ unhex $ B.unpack x
instance Hex L.ByteString where
hex = L.pack . hex . L.unpack
unhex x = liftM L.pack $ unhex $ L.unpack x

View File

@ -16,7 +16,7 @@ deleteDone githubToken = do
runExceptT $ do
Git.fetch
Git.cleanAndResetTo "master"
refs <- ExceptT $ GH.closedAutoUpdateRefs githubToken
refs <- ExceptT $ GH.closedAutoUpdateRefs (GH.authFromToken githubToken)
let branches = fmap (\r -> ("auto-update/" <> r)) refs
liftIO $ Git.deleteBranchesEverywhere branches
case result of

View File

@ -13,6 +13,7 @@ module GH
openAutoUpdatePR,
checkExistingUpdatePR,
latestVersion,
authFromToken,
)
where
@ -21,10 +22,7 @@ import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Vector as V
import GitHub
import GitHub.Data.Name (Name (..), untagName)
import GitHub.Endpoints.GitData.References (references')
import GitHub.Endpoints.Repos.Releases (latestRelease', releaseByTagName)
import GitHub.Endpoints.Search (searchIssues')
import GitHub.Data.Name (Name (..))
import OurPrelude
import qualified Text.Regex.Applicative.Text as RE
import Text.Regex.Applicative.Text ((=~))
@ -33,16 +31,16 @@ import qualified Utils as U
default (T.Text)
gReleaseUrl :: MonadIO m => URLParts -> ExceptT Text m Text
gReleaseUrl (URLParts o r t) =
gReleaseUrl :: MonadIO m => Auth -> URLParts -> ExceptT Text m Text
gReleaseUrl auth (URLParts o r t) =
ExceptT $
bimap (T.pack . show) (getUrl . releaseHtmlUrl)
<$> liftIO (releaseByTagName o r t)
<$> liftIO (github auth (releaseByTagNameR o r t))
releaseUrl :: MonadIO m => Text -> ExceptT Text m Text
releaseUrl url = do
releaseUrl :: MonadIO m => UpdateEnv -> Text -> ExceptT Text m Text
releaseUrl env url = do
urlParts <- parseURL url
gReleaseUrl urlParts
gReleaseUrl (authFrom env) urlParts
pr :: MonadIO m => Text -> Text -> m ()
pr base msg =
@ -111,9 +109,10 @@ compareUrl urlOld urlNew = do
--deleteDoneBranches :: IO ()
--deleteDoneBranches = do
autoUpdateRefs :: Text -> IO (Either Text (Vector Text))
autoUpdateRefs githubToken =
references' (Just (OAuth (T.encodeUtf8 githubToken))) "r-ryantm" "nixpkgs"
-- (OAuth (T.encodeUtf8 githubToken))
autoUpdateRefs :: Auth -> IO (Either Text (Vector Text))
autoUpdateRefs auth =
github auth (referencesR "r-ryantm" "nixpkgs" FetchAll)
& fmap
( first (T.pack . show)
>>> second (fmap gitReferenceRef >>> V.mapMaybe (T.stripPrefix prefix))
@ -121,10 +120,10 @@ autoUpdateRefs githubToken =
where
prefix = "refs/heads/auto-update/"
openPRWithAutoUpdateRefFromRRyanTM :: Text -> Text -> IO (Either Text Bool)
openPRWithAutoUpdateRefFromRRyanTM githubToken ref =
openPRWithAutoUpdateRefFromRRyanTM :: Auth -> Text -> IO (Either Text Bool)
openPRWithAutoUpdateRefFromRRyanTM auth ref =
executeRequest
(OAuth (T.encodeUtf8 githubToken))
auth
( pullRequestsForR
"nixos"
"nixpkgs"
@ -133,16 +132,16 @@ openPRWithAutoUpdateRefFromRRyanTM githubToken ref =
)
& fmap (first (T.pack . show) >>> second (not . V.null))
refShouldBeDeleted :: Text -> Text -> IO Bool
refShouldBeDeleted githubToken ref =
refShouldBeDeleted :: Auth -> Text -> IO Bool
refShouldBeDeleted auth ref =
not . either (const True) id
<$> openPRWithAutoUpdateRefFromRRyanTM githubToken ref
<$> openPRWithAutoUpdateRefFromRRyanTM auth ref
closedAutoUpdateRefs :: Text -> IO (Either Text (Vector Text))
closedAutoUpdateRefs githubToken =
closedAutoUpdateRefs :: Auth -> IO (Either Text (Vector Text))
closedAutoUpdateRefs auth =
runExceptT $ do
aur :: Vector Text <- ExceptT $ autoUpdateRefs githubToken
ExceptT (Right <$> V.filterM (refShouldBeDeleted githubToken) aur)
aur :: Vector Text <- ExceptT $ autoUpdateRefs auth
ExceptT (Right <$> V.filterM (refShouldBeDeleted auth) aur)
-- This is too slow
openPullRequests :: Text -> IO (Either Text (Vector SimplePullRequest))
@ -161,14 +160,18 @@ openAutoUpdatePR updateEnv oprs = oprs & (V.find isThisPkg >>> isJust)
titleHasNewVersion = newVersion updateEnv `T.isSuffixOf` title
in titleHasName && titleHasNewVersion
authFromToken :: Text -> Auth
authFromToken = OAuth . T.encodeUtf8
authFrom :: UpdateEnv -> Auth
authFrom = authFromToken . U.githubToken . options
checkExistingUpdatePR :: MonadIO m => UpdateEnv -> Text -> ExceptT Text m ()
checkExistingUpdatePR ue attrPath = do
checkExistingUpdatePR env attrPath = do
searchResult <-
ExceptT
$ liftIO
$ searchIssues'
(Just (OAuth (T.encodeUtf8 (U.githubToken (options ue)))))
search
$ github (authFrom env) (searchIssuesR search)
& fmap (first (T.pack . show))
if T.length (openPRReport searchResult) == 0
then return ()
@ -178,7 +181,7 @@ checkExistingUpdatePR ue attrPath = do
<> openPRReport searchResult
)
where
title = U.prTitle ue attrPath
title = U.prTitle env attrPath
search = [interpolate|repo:nixos/nixpkgs $title |]
openPRReport searchResult =
searchResultResults searchResult & V.filter (issueClosedAt >>> isNothing)
@ -188,14 +191,11 @@ checkExistingUpdatePR ue attrPath = do
report i = "- " <> issueTitle i <> "\n " <> tshow (issueUrl i)
latestVersion :: MonadIO m => UpdateEnv -> Text -> ExceptT Text m Version
latestVersion ue url = do
latestVersion env url = do
urlParts <- parseURL url
r <-
ExceptT
fmapLT tshow $ ExceptT
$ liftIO
$ latestRelease'
(Just (OAuth (T.encodeUtf8 (U.githubToken (options ue)))))
(owner urlParts)
(repo urlParts)
& fmap (first (T.pack . show))
$ executeRequest (authFrom env)
$ latestReleaseR (owner urlParts) (repo urlParts)
return $ T.dropWhile (\c -> c == 'v' || c == 'V') (releaseTagName r)

View File

@ -24,7 +24,7 @@ import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import Data.Time.Clock (addUTCTime, getCurrentTime)
import qualified Data.Vector as V
import OurPrelude
import OurPrelude hiding (throw)
import System.Directory (getModificationTime)
import System.Environment.XDG.BaseDir (getUserCacheDir)
import System.Exit

View File

@ -22,9 +22,8 @@ import CVE
parseFeed,
)
import Codec.Compression.GZip (decompress)
import Control.Exception (SomeException, ioError, try)
import Control.Exception (SomeException, try)
import Crypto.Hash.SHA256 (hashlazy)
import Data.Bifunctor (second)
import qualified Data.ByteString.Lazy.Char8 as BSL
import Data.Hex (hex, unhex)
import Data.List (group)
@ -59,7 +58,6 @@ import System.Directory
removeFile,
)
import System.FilePath ((</>))
import System.IO.Error (userError)
import Utils (ProductID, Version)
import Version (matchVersion)

View File

@ -38,7 +38,6 @@ import Data.Bifunctor
import qualified Data.ByteString.Lazy as BSL
import Data.Function ((&))
import Data.Functor ((<&>))
import Data.Semigroup ((<>))
import Data.Set (Set)
import Data.Text (Text, pack)
import qualified Data.Text.Encoding as T

View File

@ -1,6 +1,3 @@
#!/usr/bin/env nix-shell
#!nix-shell -p nix -i "nix-env -qaP --no-name --out-path --arg checkMeta true --argstr path $$PWD -f"
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
@ -27,9 +24,6 @@ import Text.Parser.Combinators
outPathsExpr :: Text
outPathsExpr =
[interpolate|
# When using as a callable script, passing `--argstr path some/path` overrides $$PWD.
{ checkMeta
, path ? ./.
}:

View File

@ -4,15 +4,12 @@
module Repology where
import Control.Category ((>>>))
import Data.Aeson
import Data.HashMap.Strict
import Data.List
import Data.Proxy
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO
import Data.Vector (Vector)
import qualified Data.Vector as V
import GHC.Generics
import Network.HTTP.Client.TLS (newTlsManager)

View File

@ -276,7 +276,7 @@ publishPackage log updateEnv oldSrcUrl newSrcUrl attrPath result opDiff msgs = d
let rewriteMessages = foldl (\ms m -> ms <> T.pack "\n- " <> m) "\nUpdates performed:" msgs
releaseUrlMessage <-
( do
msg <- GH.releaseUrl newSrcUrl
msg <- GH.releaseUrl updateEnv newSrcUrl
return ("\n[Release on GitHub](" <> msg <> ")\n\n")
)
<|> return ""

38
test/DoctestSpec.hs Normal file
View File

@ -0,0 +1,38 @@
module DoctestSpec where
import Test.DocTest
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Doctests" do
it "should all pass" do
doctest
[ "-isrc",
"-XOverloadedStrings",
"-XDataKinds",
"-XFlexibleContexts",
"-XGADTs",
"-XLambdaCase",
"-XPolyKinds",
"-XRankNTypes",
"-XScopedTypeVariables",
"-XTypeApplications",
"-XTypeFamilies",
"-XTypeOperators",
"-XBlockArguments",
"-flate-specialise",
"-fspecialise-aggressively",
-- "-fplugin=Polysemy.Plugin",
-- src/Process.hs:1:1: error:
-- Can't find interface-file declaration for type constructor or class Polysemy.Internal.Union.LocateEffect
-- Probable cause: bug in .hi-boot file, or inconsistent .hi file
-- Use -ddump-if-trace to get an idea of which file caused the error
"src/Version.hs",
"src/GH.hs",
"src/Time.hs"
]

22
test/RewriteSpec.hs Normal file
View File

@ -0,0 +1,22 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module RewriteSpec where
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified File
import OurPrelude
import qualified Rewrite
import Test.Hspec
import Text.RawString.QQ (r)
import qualified Utils
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Hello world" do
it "is alive" do
2 + 2 `shouldBe` 4

1
test/Spec.hs Normal file
View File

@ -0,0 +1 @@
{-# OPTIONS_GHC -F -pgmF hspec-discover #-}

View File

@ -1,21 +0,0 @@
import Test.DocTest
main :: IO ()
main =
doctest
[ "-isrc"
, "-XOverloadedStrings"
, "-XDataKinds"
, "-XFlexibleContexts"
, "-XGADTs"
, "-XLambdaCase"
, "-XPolyKinds"
, "-XRankNTypes"
, "-XScopedTypeVariables"
, "-XTypeApplications"
, "-XTypeFamilies"
, "-XTypeOperators"
, "src/Version.hs"
, "src/GH.hs"
, "src/Time.hs"
]