Fixes #1204: Release bindings and codegens to Maven Central. (#1205)

* Fixes #1204: Release bindings and codegens to Maven Central.

Upload the Java and Scala Bindings with the respective code
generator binaries to Sonatype Open Source Repository
Host for synchronization with Maven Central.
This commit is contained in:
Nick Smith 2019-06-17 15:37:49 +02:00 committed by GitHub
parent 4974c1bdab
commit 0c0e4231f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 801 additions and 143 deletions

View File

@ -239,71 +239,107 @@ def _create_scala_source_jar(**kwargs):
srcs = kwargs["srcs"],
)
def _build_nosrc_jar(ctx):
# this ensures the file is not empty
manifest_path = ctx.actions.declare_file("%s_MANIFEST.MF" % ctx.label.name)
ctx.actions.write(manifest_path, "Manifest-Version: 1.0")
resources = "META-INF/MANIFEST.MF=%s\n" % manifest_path.path
zipper_arg_path = ctx.actions.declare_file("%s_zipper_args" % ctx.label.name)
ctx.actions.write(zipper_arg_path, resources)
cmd = """
rm -f {jar_output}
{zipper} c {jar_output} @{path}
"""
cmd = cmd.format(
path = zipper_arg_path.path,
jar_output = ctx.outputs.out.path,
zipper = ctx.executable._zipper.path,
)
outs = [ctx.outputs.out]
inputs = [manifest_path]
ctx.actions.run_shell(
inputs = inputs,
tools = [ctx.executable._zipper, zipper_arg_path],
outputs = outs,
command = cmd,
progress_message = "scala %s" % ctx.label,
arguments = [],
)
def _scaladoc_jar_impl(ctx):
# Detect an actual scala source file rather than a srcjar or other label
srcFiles = [
src.path
for src in ctx.files.srcs
if src.is_source
]
# The following plugin handling is lifted from a private library of 'rules_scala'.
# https://github.com/bazelbuild/rules_scala/blob/1cffc5fcae1f553a7619b98bf7d6456d65081665/scala/private/rule_impls.bzl#L130
pluginPaths = []
for p in ctx.attr.plugins:
if hasattr(p, "path"):
pluginPaths.append(p)
elif hasattr(p, "scala"):
pluginPaths.extend([j.class_jar for j in p.scala.outputs.jars])
elif hasattr(p, "java"):
pluginPaths.extend([j.class_jar for j in p.java.outputs.jars])
# support http_file pointed at a jar. http_jar uses ijar,
# which breaks scala macros
if srcFiles != []:
# The following plugin handling is lifted from a private library of 'rules_scala'.
# https://github.com/bazelbuild/rules_scala/blob/1cffc5fcae1f553a7619b98bf7d6456d65081665/scala/private/rule_impls.bzl#L130
pluginPaths = []
for p in ctx.attr.plugins:
if hasattr(p, "path"):
pluginPaths.append(p)
elif hasattr(p, "scala"):
pluginPaths.extend([j.class_jar for j in p.scala.outputs.jars])
elif hasattr(p, "java"):
pluginPaths.extend([j.class_jar for j in p.java.outputs.jars])
# support http_file pointed at a jar. http_jar uses ijar,
# which breaks scala macros
elif hasattr(p, "files"):
pluginPaths.extend([f for f in p.files if "-sources.jar" not in f.basename])
elif hasattr(p, "files"):
pluginPaths.extend([f for f in p.files if "-sources.jar" not in f.basename])
transitive_deps = [dep[JavaInfo].transitive_deps for dep in ctx.attr.deps]
classpath = depset([], transitive = transitive_deps).to_list()
transitive_deps = [dep[JavaInfo].transitive_deps for dep in ctx.attr.deps]
classpath = depset([], transitive = transitive_deps).to_list()
outdir = ctx.actions.declare_directory(ctx.label.name + "_tmpdir")
outdir = ctx.actions.declare_directory(ctx.label.name + "_tmpdir")
args = ctx.actions.args()
args.add_all(["-d", outdir.path])
args.add("-classpath")
args.add_joined(classpath, join_with = ":")
args.add_joined(pluginPaths, join_with = ",", format_joined = "-Xplugin:%s")
args.add_all(common_scalacopts)
args.add_all(srcFiles)
args = ctx.actions.args()
args.add_all(["-d", outdir.path])
args.add("-classpath")
args.add_joined(classpath, join_with = ":")
args.add_joined(pluginPaths, join_with = ",", format_joined = "-Xplugin:%s")
args.add_all(common_scalacopts)
args.add_all(srcFiles)
ctx.actions.run(
executable = ctx.executable._scaladoc,
inputs = ctx.files.srcs + classpath + pluginPaths,
outputs = [outdir],
arguments = [args],
mnemonic = "ScaladocGen",
)
ctx.actions.run(
executable = ctx.executable._scaladoc,
inputs = ctx.files.srcs + classpath + pluginPaths,
outputs = [outdir],
arguments = [args],
mnemonic = "ScaladocGen",
)
# since we only have the output directory of the scaladoc generation we need to find
# all the files below sources_out and add them to the zipper args file
zipper_args_file = ctx.actions.declare_file(ctx.label.name + ".zipper_args")
ctx.actions.run_shell(
mnemonic = "ScaladocFindOutputFiles",
outputs = [zipper_args_file],
inputs = [outdir],
command = "find -L {src_path} -type f | sed -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | sort > {args_file}".format(
src_path = outdir.path,
args_file = zipper_args_file.path,
),
progress_message = "find_scaladoc_output_files %s" % zipper_args_file.path,
use_default_shell_env = True,
)
# since we only have the output directory of the scaladoc generation we need to find
# all the files below sources_out and add them to the zipper args file
zipper_args_file = ctx.actions.declare_file(ctx.label.name + ".zipper_args")
ctx.actions.run_shell(
mnemonic = "ScaladocFindOutputFiles",
outputs = [zipper_args_file],
inputs = [outdir],
command = "find -L {src_path} -type f | sed -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | sort > {args_file}".format(
src_path = outdir.path,
args_file = zipper_args_file.path,
),
progress_message = "find_scaladoc_output_files %s" % zipper_args_file.path,
use_default_shell_env = True,
)
ctx.actions.run(
executable = ctx.executable._zipper,
inputs = ctx.files.srcs + classpath + [outdir, zipper_args_file],
outputs = [ctx.outputs.out],
arguments = ["c", ctx.outputs.out.path, "@" + zipper_args_file.path],
mnemonic = "ScaladocJar",
)
ctx.actions.run(
executable = ctx.executable._zipper,
inputs = ctx.files.srcs + classpath + [outdir, zipper_args_file],
outputs = [ctx.outputs.out],
arguments = ["c", ctx.outputs.out.path, "@" + zipper_args_file.path],
mnemonic = "ScaladocJar",
)
else:
_build_nosrc_jar(ctx)
scaladoc_jar = rule(
implementation = _scaladoc_jar_impl,
@ -339,17 +375,8 @@ Arguments:
"""
def _create_scaladoc_jar(**kwargs):
# Try to not create empty scaladoc jars and limit execution to Linux and MacOS
# Detect an actual scala source file rather than a srcjar or other label
create_scaladoc = False
if len(kwargs["srcs"]) > 0 and is_windows == False:
for src in kwargs["srcs"]:
if src.endswith(".scala"):
create_scaladoc = True
break
if create_scaladoc:
# Limit execution to Linux and MacOS
if is_windows == False:
plugins = []
if "plugins" in kwargs:
plugins = kwargs["plugins"]

View File

@ -52,6 +52,12 @@ steps:
env:
# to connect to bintray
JFROG_CONFIG_CONTENT: $(JFROG_CONFIG_CONTENT)
# For signing artifacts to be uploaded to Maven Central.
GPG_KEY: $(gpg-code-signing)
# Configuration the Sonatype Open Source Repository Hosting
MAVEN_USER: $(MAVEN_USER)
MAVEN_PASSWORD: $(MAVEN_PASSWORD)
MAVEN_URL: "https://oss.sonatype.org"
name: release
- bash: |
set -euo pipefail

8
dev-env/bin/gpg Executable file
View File

@ -0,0 +1,8 @@
#!/usr/bin/env bash
# Meant to be linked to from `dev-env/bin`, symlink should be named after the
# tool. Execute a Nix tool from a derivation that creates a `result` directory.
DADE_CURRENT_SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$DADE_CURRENT_SCRIPT_DIR/../lib/dade-common"
base=$(basename $0)
execTool $base out $base "$@"

View File

@ -16,27 +16,4 @@
</dependency>
</dependencies>
<!-- end snippet: dependencies -->
<!-- start snippet: repositories -->
<repositories>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>bintray-digitalassetsdk-DigitalAssetSDK</id>
<name>bintray</name>
<url>https://digitalassetsdk.bintray.com/DigitalAssetSDK</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>bintray-digitalassetsdk-DigitalAssetSDK</id>
<name>bintray</name>
<url>https://digitalassetsdk.bintray.com/DigitalAssetSDK</url>
</pluginRepository>
</pluginRepositories>
<!-- end snippet: repositories -->
</project>

View File

@ -94,7 +94,7 @@ The following snippet is an excerpt from the ``pom.xml`` that is part of the :re
.. literalinclude:: ../../getting-started/quickstart/template-root/pom.xml
:language: xml
:lines: 68-100,116-117
:lines: 47-79,95-96
:dedent: 12

View File

@ -154,21 +154,14 @@ To use the Java bindings library, add the following dependencies to your project
:end-before: <!-- end snippet: dependencies -->
:dedent: 4
Replace ``x.y.z`` for both dependencies with the version that you want to use. You can find the available versions at
`https://digitalassetsdk.bintray.com/DigitalAssetSDK/com/daml/ledger/`.
You also have to add the DAML Bintray Repository to your ``pom.xml``:
.. literalinclude:: ./code-snippets/pom.xml
:language: xml
:start-after: <!-- start snippet: repositories -->
:end-before: <!-- end snippet: repositories -->
:dedent: 4
Replace ``x.y.z`` for both dependencies with the version that you want to use. You can find the available versions by checking
the `Maven Central Repository <https://search.maven.org/search?q=g:com.daml.ledger>`__.
.. note::
As of DAML SDK release 0.13.1, the Java Bindings libraries are available via the public Maven Central repository. Earlier releases are available from the `DAML Bintray repository <https://digitalassetsdk.bintray.com>`__.
You can also take a look at the ``pom.xml`` file from the :ref:`quickstart project <quickstart>`.
.. _ledger-api-java-binding-connecting:
Connecting to the ledger

View File

@ -42,27 +42,6 @@
</dependency>
</dependencies>
<repositories>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>bintray-digitalassetsdk-DigitalAssetSDK</id>
<name>bintray</name>
<url>https://digitalassetsdk.bintray.com/DigitalAssetSDK</url>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>bintray-digitalassetsdk-DigitalAssetSDK</id>
<name>bintray</name>
<url>https://digitalassetsdk.bintray.com/DigitalAssetSDK</url>
</pluginRepository>
</pluginRepositories>
<build>
<plugins>
<plugin>

View File

@ -14,6 +14,14 @@ Language
- Add an instance for ``IsParties (Optional Party)``, allowing ``Optional`` values to be used in ``signatory``, ``observer`` and ``maintainer`` clauses.
Java Bindings
~~~~~~~~~~~~~
- Release the Java Bindings to the public Maven Central repository. To move to using the Maven Central repository, remove
the ``<repository>...</repository>`` and ``<pluginRepository>...</pluginRepository>`` blocks from Maven POM files
that use version 0.12.26 (or later) of the Java Bindings.
See `#1205 <https://github.com/digital-asset/daml/issues/1205>`__.
.. _release-0-13-0:
0.13.0 - 2019-06-17

View File

@ -4,12 +4,15 @@
load(
"//bazel_tools:scala.bzl",
"da_scala_binary",
"scala_source_jar",
"scaladoc_jar",
)
load(
"@com_github_johnynek_bazel_jar_jar//:jar_jar.bzl",
"jar_jar",
)
load("//bazel_tools:pom_file.bzl", "pom_file")
load("@os_info//:os_info.bzl", "is_windows")
da_scala_binary(
name = "codegen-main",
@ -36,3 +39,16 @@ pom_file(
target = ":shaded_binary",
visibility = ["//visibility:public"],
)
# Create empty Scaladoc JAR for uploading to Maven Central
scaladoc_jar(
name = "shaded_binary_scaladoc",
srcs = [],
deps = [],
) if is_windows == False else None
# Create empty Sources JAR for uploading to Maven Central
scala_source_jar(
name = "shaded_binary_src",
srcs = [],
)

View File

@ -6,6 +6,8 @@ load(
"da_scala_binary",
"da_scala_library",
"da_scala_test",
"scala_source_jar",
"scaladoc_jar",
)
load(
"//rules_daml:daml.bzl",
@ -21,6 +23,7 @@ load(
"jar_jar",
)
load("//bazel_tools:pom_file.bzl", "pom_file")
load("@os_info//:os_info.bzl", "is_windows")
da_scala_binary(
name = "codegen",
@ -105,6 +108,19 @@ pom_file(
visibility = ["//visibility:public"],
)
# Create empty Scaladoc JAR for uploading to Maven Central
scaladoc_jar(
name = "shaded_binary_scaladoc",
srcs = [],
deps = [],
) if is_windows == False else None
# Create empty Sources JAR for uploading to Maven Central
scala_source_jar(
name = "shaded_binary_src",
srcs = [],
)
daml_lf_target_versions = [
"1.0",
"1.1",

View File

@ -6,6 +6,12 @@ load("//bazel_tools:pkg.bzl", "pkg_tar")
load("//bazel_tools:proto.bzl", "proto_gen")
load("//bazel_tools:pom_file.bzl", "pom_file")
load("@io_bazel_rules_scala//scala:scala.bzl", "scala_library")
load(
"//bazel_tools:scala.bzl",
"scala_source_jar",
"scaladoc_jar",
)
load("@os_info//:os_info.bzl", "is_windows")
ledger_api_proto_source_root = "ledger-api/grpc-definitions"
@ -259,6 +265,19 @@ pom_file(
target = ":ledger-api-scalapb",
)
# Create empty Scaladoc JAR for uploading to Maven Central
scaladoc_jar(
name = "ledger-api-scalapb_scaladoc",
srcs = [],
deps = [],
) if is_windows == False else None
# Create empty Sources JAR for uploading to Maven Central
scala_source_jar(
name = "ledger-api-scalapb_src",
srcs = [],
)
proto_gen(
name = "ledger-api-docs",
srcs = [":protos"],

View File

@ -229,6 +229,10 @@ in rec {
base64 = pkgs.coreutils;
sha1sum = pkgs.coreutils;
xmlstarlet = pkgs.xmlstarlet;
# Cryptography tooling
gnupg = pkgs.gnupg;
gpg = gnupg;
# Packaging tools
patchelf = bazel_dependencies.patchelf;

View File

@ -8,17 +8,25 @@ da_haskell_binary(
srcs = glob(["src/**/*.hs"]),
hazel_deps = [
"aeson",
"async",
"ansi-terminal",
"base",
"base64-bytestring",
"bytestring",
"conduit",
"conduit-extra",
"containers",
"connection",
"cryptohash",
"directory",
"exceptions",
"extra",
"fast-logger",
"filepath",
"http-client",
"http-client-tls",
"http-conduit",
"http-types",
"lifted-async",
"lifted-base",
"monad-control",
@ -27,9 +35,12 @@ da_haskell_binary(
"path",
"path-io",
"process",
"retry",
"safe",
"safe-exceptions",
"time",
"text",
"temporary",
"transformers",
"unliftio-core",
"unordered-containers",

View File

@ -56,15 +56,19 @@
type: jar-scala
- target: //ledger-api/grpc-definitions:ledger-api-protos-tarball
type: targz
mavenUpload: True
location:
groupId: com.digitalasset
artifactId: ledger-api-protos
- target: //ledger-api/rs-grpc-bridge:rs-grpc-bridge
type: jar-lib
mavenUpload: True
- target: //language-support/java/bindings:bindings-java
type: jar-lib
mavenUpload: True
- target: //language-support/java/bindings-rxjava:bindings-rxjava
type: jar-lib
mavenUpload: True
- target: //docs:quickstart-java
type: targz
location:
@ -78,11 +82,13 @@
- target: //extractor:extractor-binary
type: jar-deploy
- target: //ledger-api/grpc-definitions:ledger-api-scalapb
type: jar
type: jar-scala
mavenUpload: True
- target: //ledger-api/testing-utils:testing-utils
type: jar-scala
- target: //language-support/scala/bindings:bindings
type: jar-scala
mavenUpload: True
- target: //ledger-api/rs-grpc-akka:rs-grpc-akka
type: jar-scala
- target: //ledger/ledger-api-akka:ledger-api-akka
@ -107,14 +113,17 @@
type: jar
- target: //language-support/scala/bindings-akka:bindings-akka
type: jar-scala
mavenUpload: True
- target: //language-support/java/codegen:shaded_binary
type: jar
type: jar-scala
mavenUpload: True
- target: //navigator/backend:navigator-binary
type: jar-deploy
- target: //language-support/codegen-common:codegen-common
type: jar
type: jar-scala
- target: //language-support/codegen-main:shaded_binary
type: jar
type: jar-scala
mavenUpload: True
- target: //ledger/participant-state:participant-state-v1
type: jar-scala
- target: //ledger/participant-state:participant-state

View File

@ -5,9 +5,9 @@
{-# LANGUAGE TemplateHaskell #-}
module Main (main) where
import Control.Monad.Extra
import Control.Monad.IO.Class
import Control.Monad.Logger
import Data.Traversable
import Data.Yaml
import Path
import Path.IO
@ -18,6 +18,7 @@ import System.Process
import Options
import Types
import Upload
import Util
main :: IO ()
@ -54,10 +55,19 @@ main = do
then do
$logInfo "Make release"
releaseToBintray upload releaseDir (map (\(a, (_, outp)) -> (a, outp)) files)
-- Uploading to Maven Central
mavenUploadConfig <- mavenConfigFromEnv
let mavenUploadArtifacts = filter (\a -> getMavenUpload $ artMavenUpload a) artifacts
uploadArtifacts <- concatMapM (artifactCoords optsAllArtifacts) mavenUploadArtifacts
uploadToMavenCentral mavenUploadConfig releaseDir uploadArtifacts
-- set variables for next steps in Azure pipelines
liftIO . putStrLn $ "##vso[task.setvariable variable=has_released;isOutput=true]true"
liftIO . putStrLn . T.unpack $ "##vso[task.setvariable variable=release_tag]" # renderVersion sdkVersion
else $logInfo "Make dry run of release"
where
runLog Options{..} m0 = do
let m = filterLogger (\_ ll -> ll >= optsLogLevel) m0

View File

@ -30,7 +30,7 @@ data Options = Options
optsParser :: Parser Options
optsParser = Options
<$> strOption (long "artifacts" <> help "Path to yaml file listing the artifacts to be released")
<*> (PerformUpload <$> switch (long "upload" <> help "upload artifacts to bintray. If false, we don't upload artifacts to artifactory or bintray even when the last commit is a release commit."))
<*> (PerformUpload <$> switch (long "upload" <> help "upload artifacts to bintray and Maven Central. If false, we don't upload artifacts to Maven Central or bintray even when the last commit is a release commit."))
<*> option str (long "release-dir" <> help "specify full path to release directory")
<*> option (Just <$> str) (long "slack-release-message" <> help "if present will write out what to write in slack. if there are no releases the file will be empty" <> value Nothing)
<*> switch (long "full-logging" <> help "full logging detail")

View File

@ -6,11 +6,16 @@
module Types (
AllArtifacts(..),
ArtifactId,
ArtifactType,
CIException(..),
Classifier,
BintrayPackage(..),
GitRev,
GroupId,
MavenAllowUnsecureTls(..),
MavenCoords(..),
MavenUpload(..),
MavenUploadConfig(..),
MonadCI,
OS(..),
PerformUpload(..),
@ -35,7 +40,8 @@ import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.IO.Unlift (MonadUnliftIO)
import Control.Monad.Logger
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson
import Data.Aeson
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable (Typeable)
@ -62,10 +68,23 @@ type TextVersion = Text
type GroupId = [Text]
type ArtifactId = Text
type Classifier = Text
type ArtifactType = Text
-- Fully qualified coordinates for a Maven artifact.
data MavenCoords = MavenCoords
{ groupId :: !GroupId
, artifactId :: !ArtifactId
, version :: !TextVersion
, classifier :: Maybe ArtifactType
, artifactType :: !ArtifactType
} deriving Show
newtype PlatformDependent = PlatformDependent{getPlatformDependent :: Bool}
deriving (Eq, Show, FromJSON)
newtype MavenUpload = MavenUpload { getMavenUpload :: Bool }
deriving (Eq, Show, FromJSON)
-- | If this is True, we produce all artifacts even platform independent artifacts on MacOS.
-- This is useful for testing purposes.
newtype AllArtifacts = AllArtifacts Bool
@ -147,3 +166,24 @@ parseVersion (T.strip -> txt) = do
renderVersion :: Version -> Text
renderVersion (Version maj min_ patch) = T.intercalate "." [tshow maj, tshow min_, tshow patch]
newtype MavenAllowUnsecureTls = MavenAllowUnsecureTls { getAllowUnsecureTls :: Bool }
deriving (Eq, Show, FromJSON)
data MavenUploadConfig = MavenUploadConfig
{ mucUrl :: !Text
, mucUser :: !Text
, mucPassword :: !Text
, mucAllowUnsecureTls :: !MavenAllowUnsecureTls
-- ^^ For testing with an Artifactory (or similar) instance using a self-signed SSL certificate.
-- This flag should NEVER be set in production.
, mucSigningKey :: String
} deriving (Eq, Show)
instance FromJSON MavenUploadConfig where
parseJSON = withObject "MavenUploadConfig" $ \o -> MavenUploadConfig
<$> o .: "url"
<*> o .: "user"
<*> o .: "password"
<*> (fromMaybe (MavenAllowUnsecureTls False) <$> o .:? "allowUnsecureTls")
<*> o .: "signingKey"

494
release/src/Upload.hs Normal file
View File

@ -0,0 +1,494 @@
-- Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
-- SPDX-License-Identifier: Apache-2.0
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DuplicateRecordFields #-}
module Upload (
uploadToMavenCentral,
mavenConfigFromEnv,
) where
import qualified Control.Concurrent.Async.Lifted.Safe as Async
import qualified Control.Exception.Safe as E
import Control.Monad (when)
import Control.Monad.Logger
import Control.Monad.IO.Class
import "cryptohash" Crypto.Hash (Digest, MD5(..), SHA1(..), digestToHexByteString, hash)
import Control.Retry
import Data.Aeson
import Data.Foldable (for_)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Char8 as C8
import qualified Data.List as List
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
import Network.Connection (TLSSettings(..))
import Network.HTTP.Client
import Network.HTTP.Client.TLS (mkManagerSettings, tlsManagerSettings)
import Network.HTTP.Simple (setRequestBasicAuth, setRequestBodyFile, setRequestBodyLBS, setRequestHeader, setRequestMethod, setRequestPath)
import Network.HTTP.Types.Status
import Path
import System.Environment
import System.IO.Temp
import Types
import Util
--
-- Upload the artifacts to Maven Central
--
-- The artifacts are first uploaded to a staging repository on the Sonatype Open Source Repository Hosting platform
-- where the repository contents is verified to conform to the Maven Central standards before being released to
-- the public repository.
--
-- Digitalasset has been assigned the 'com.daml' and 'com.digitalasset' namespaces (group IDs for Maven repos and artifacts
-- need to be uploaded to the staging repository corresponding with their group ID. The staging repository for each group ID
-- is handled separately, hence there are several 'duplicated' REST calls.
--
-- Further information:
--
-- Staging requirements: https://central.sonatype.org/pages/requirements.html
-- Staging REST API: https://oss.sonatype.org/nexus-staging-plugin/default/docs/index.html
--
uploadToMavenCentral :: (MonadCI m) => MavenUploadConfig -> Path Abs Dir -> [(MavenCoords, Path Rel File)] -> m ()
uploadToMavenCentral MavenUploadConfig{..} releaseDir artifacts = do
-- Note: TLS verification settings switchable by MavenUpload settings
let managerSettings = if getAllowUnsecureTls mucAllowUnsecureTls then noVerifyTlsManagerSettings else tlsManagerSettings
-- Create HTTP Connection manager with 2min response timeout as the OSSRH can be slow...
manager <- liftIO $ newManager managerSettings { managerResponseTimeout = responseTimeoutMicro (120 * 1000 * 1000) }
parsedUrlRequest <- parseUrlThrow $ T.unpack mucUrl -- Note: Will throw exception on non-2XX responses
let baseRequest = setRequestMethod "PUT" $ setRequestBasicAuth (encodeUtf8 mucUser) (encodeUtf8 mucPassword) parsedUrlRequest
decodedSigningKey <- decodeSigningKey mucSigningKey
-- Security Note: Using the withSystemTempDirectory function to always cleanup the private key data from the filesystems.
withSystemTempDirectory "gnupg" $ \gnupgTempDir -> do
-- Write the secret key used for signing into a temporary file and use 'gpg' command line tool to import into
-- GPG's internal file tree.
secretKeyImportFile <- liftIO $ emptyTempFile gnupgTempDir "gpg-private-key.asc"
_ <- liftIO $ BS.writeFile secretKeyImportFile decodedSigningKey
loggedProcess_ "gpg" [ "--homedir", T.pack gnupgTempDir, "--no-tty", "--quiet", "--import", T.pack secretKeyImportFile ]
--
-- Prepare the remote staging repositories
--
(comDamlStagingRepoId, comDigitalAssetRepoId) <- prepareStagingRepo baseRequest manager
--
-- Upload the artifacts; each with:
-- 1. PGP signature
-- 2. SHA1 checksum
-- 3. MD5 checksum
for_ artifacts $ \(coords@MavenCoords{..}, file) -> do
let absFile = releaseDir </> file -- (T.intercalate "/" (groupId <> [artifactId]))
sigTempFile <- liftIO $ emptySystemTempFile $ T.unpack $ artifactId <> maybe "" ("-" <>) classifier <> "-" <> artifactType <> ".asc"
-- The "--batch" and "--yes" flags are used to prevent gpg waiting on stdin.
loggedProcess_ "gpg" [ "--homedir", T.pack gnupgTempDir, "-ab", "-o", T.pack sigTempFile, "--batch", "--yes", T.pack (fromAbsFile absFile) ]
let artUploadPath = uploadPath coords comDamlStagingRepoId comDigitalAssetRepoId
(md5Hash, sha1Hash) <- chksumFileContents absFile
$logInfo ("(Uploading " <> artUploadPath <> " from " <> tshow absFile <> ")")
let request
= setRequestHeader "Content-Type" [ encodeUtf8 $ getContentType artifactType ]
$ setRequestPath (encodeUtf8 artUploadPath)
$ setRequestBodyFile (fromAbsFile absFile) baseRequest
let pgpSigRequest
= setRequestHeader "Content-Type" [ "text/plain" ]
$ setRequestPath (encodeUtf8 $ artUploadPath <> ".asc")
$ setRequestBodyFile sigTempFile baseRequest
let sha1CksumRequest
= setRequestHeader "Content-Type" [ "text/plain" ]
$ setRequestPath (encodeUtf8 $ artUploadPath <> ".sha1")
$ setRequestBodyLBS sha1Hash baseRequest
let md5CksumRequest
= setRequestHeader "Content-Type" [ "text/plain" ]
$ setRequestPath (encodeUtf8 $ artUploadPath <> ".md5")
$ setRequestBodyLBS md5Hash baseRequest
(_, _, _, _) <- Async.runConcurrently $ (,,,)
<$> Async.Concurrently (recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpNoBody request manager))
<*> Async.Concurrently (recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpNoBody pgpSigRequest manager))
<*> Async.Concurrently (recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpNoBody sha1CksumRequest manager))
<*> Async.Concurrently (recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpNoBody md5CksumRequest manager))
pure ()
$logInfo "Finished uploading artifacts"
-- Now 'finish' the staging and release to Maven Central
publishStagingRepo baseRequest manager comDamlStagingRepoId comDigitalAssetRepoId
prepareStagingRepo :: (MonadCI m) => Request -> Manager -> m (Text, Text)
prepareStagingRepo baseRequest manager = do
--
-- Note in Profile IDs
--
-- Currently the profile IDs are hardcoded. The IDs are fixed to the "namespaces" ('com.daml' and 'com.digitialasset')
-- attached to the Digitalasset accounts on the the Sonatype OSSRH.
--
--
-- Open the staging repository profile for uploads.
--
-- Opening the staging repositories explicitly instead of implicitly (by simply uploading the artifacts)
-- allows for better managability (i.e. independant of the current state of the remote repositories which
-- could be still open due to failures).
--
let startComDamlStagingRepoRequest
= setRequestMethod "POST"
$ setRequestPath "/service/local/staging/profiles/b6148ff96bfaaa/start" -- Profile key could be requested
$ setRequestHeader "content-type" [ "application/json" ]
$ setRequestHeader "accept" [ "application/json" ]
$ setRequestBodyLBS (BSL.fromStrict (encodeUtf8 "{\"data\":{\"description\":\"\"}}")) baseRequest
let startComDigitalassetStagingRepoRequest
= setRequestMethod "POST"
$ setRequestPath "/service/local/staging/profiles/b614bfdbd6b51f/start" -- Profile key could be requested
$ setRequestHeader "content-type" [ "application/json" ]
$ setRequestHeader "accept" [ "application/json" ]
$ setRequestBodyLBS (BSL.fromStrict (encodeUtf8 "{\"data\":{\"description\":\"\"}}")) baseRequest
(startComDamlStagingReosResponse, startComDigitalassetStagingRepoResponse) <- Async.runConcurrently $ (,)
<$> Async.Concurrently (recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpLbs startComDamlStagingRepoRequest manager))
<*> Async.Concurrently (recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpLbs startComDigitalassetStagingRepoRequest manager))
comDamlStagingRepoInfo <- decodeStagingPromoteResponse startComDamlStagingReosResponse
comDigitalassetStagingRepoInfo <- decodeStagingPromoteResponse startComDigitalassetStagingRepoResponse
return (stagedRepositoryId $ _data comDamlStagingRepoInfo, stagedRepositoryId $ _data comDigitalassetStagingRepoInfo)
publishStagingRepo :: (MonadCI m) => Request -> Manager -> Text -> Text -> m ()
publishStagingRepo baseRequest manager comDamlRepoId comDigitalassetRepoId = do
--
-- "Close" the staging profiles which initiates the running of the rules that check the uploaded artifacts
-- for compliance with the Maven Central requirements.
-- If all the rules pass then the status of the staging repository and profile will become "closed", if anything fails
-- then the status will remain set to "open".
--
let finishComDamlStagingRepoRequest
= setRequestMethod "POST"
$ setRequestPath "/service/local/staging/profiles/b6148ff96bfaaa/finish" -- Profile key could be requested
$ setRequestHeader "content-type" [ "application/json" ]
$ setRequestBodyLBS (textToLazyByteString $ "{\"data\":{\"stagedRepositoryId\":\"" <> comDamlRepoId <> "\",\"description\":\"\"}}") baseRequest
let finishComDigitalassetStagingRepoRequest
= setRequestMethod "POST"
$ setRequestPath "/service/local/staging/profiles/b614bfdbd6b51f/finish" -- Profile key could be requested
$ setRequestHeader "content-type" [ "application/json" ]
$ setRequestBodyLBS (textToLazyByteString $ "{\"data\":{\"stagedRepositoryId\":\"" <> comDigitalassetRepoId <> "\",\"description\":\"\"}}") baseRequest
(_, _) <- Async.runConcurrently $ (,)
<$> Async.Concurrently (recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpNoBody finishComDamlStagingRepoRequest manager))
<*> Async.Concurrently (recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpNoBody finishComDigitalassetStagingRepoRequest manager))
let comDamlStatusReposRequest
= setRequestMethod "GET"
$ setRequestPath (encodeUtf8 ("/service/local/staging/repository/" <> comDamlRepoId))
$ setRequestHeader "accept" [ "application/json" ] baseRequest
let comDigitalassetStatusReposRequest
= setRequestMethod "GET"
$ setRequestPath (encodeUtf8 ("/service/local/staging/repository/" <> comDigitalassetRepoId))
$ setRequestHeader "accept" [ "application/json" ] baseRequest
--
-- Poll until the staging repositories are closed or the staging repositories cease to be "transitioning" to a new state
--
(comDamlNotClosed, comDigitalassetNotClosed) <- Async.runConcurrently $ (,)
<$> Async.Concurrently (recovering checkStatusRetryPolicy [ httpResponseHandler, checkRepoStatusHandler ] (\_ -> handleStatusRequest comDamlStatusReposRequest manager))
<*> Async.Concurrently (recovering checkStatusRetryPolicy [ httpResponseHandler, checkRepoStatusHandler ] (\_ -> handleStatusRequest comDigitalassetStatusReposRequest manager))
--
-- Drop" (delete) both staging repositories if one or more fails the checks (and are not in the "closed" state)
--
when (comDamlNotClosed || comDigitalassetNotClosed) $ do
when comDamlNotClosed $ do logStagingRepositoryActivity baseRequest manager comDamlRepoId
when comDigitalassetNotClosed $ do logStagingRepositoryActivity baseRequest manager comDigitalassetRepoId
dropStagingRepositories baseRequest manager [ comDamlRepoId, comDigitalassetRepoId ]
throwIO $ RepoFailedToClose $ [ comDamlRepoId | comDamlNotClosed ] <> [ comDigitalassetRepoId | comDigitalassetNotClosed ]
--
-- Now the final step of releasing the staged artifacts into the wild...
--
let releaseStagingReposRequest
= setRequestMethod "POST"
$ setRequestPath "/staging/bulk/promote"
$ setRequestHeader "content-type" [ "application/json" ]
$ setRequestBodyLBS (textToLazyByteString $ "{\"data\":{\"stagedRepositoryIds\":[\"" <> comDamlRepoId <> "\",\"" <> comDigitalassetRepoId <> "\",\"description\":\"\",\"autoDropAfterRelease\":true}}") baseRequest
_ <- recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpNoBody releaseStagingReposRequest manager)
$logWarn "Published to Maven Central"
pure ()
-- Print out a log of the repository activity which includes details of which verification rule failed.
-- The output is not prettified as it should only be print in rare(ish) error cases.
logStagingRepositoryActivity :: (MonadCI m) => Request -> Manager -> Text -> m ()
logStagingRepositoryActivity baseRequest manager repoId = do
let repoActivityRequest
= setRequestMethod "GET"
$ setRequestPath (encodeUtf8 ("/service/local/staging/repository/" <> repoId <> "/activity"))
$ setRequestHeader "accept" [ "application/json" ] baseRequest
activityResponse <- recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpLbs repoActivityRequest manager)
repoActivity <- decodeRepoActivityResponse activityResponse
$logWarn ("Failed to process staging repository \"" <> repoId <> "\". \n" <> (T.intercalate "\n " $ map tshow repoActivity))
return ()
dropStagingRepositories :: (MonadCI m) => Request -> Manager -> [Text] -> m ()
dropStagingRepositories baseRequest manager repoIdList = do
--
-- Note: This is a "Bulk Drop" request used by the Nexus UI and not a Staging REST API.
--
let dropReposJson = "{\"data\":{\"description\":\"\",\"stagedRepositoryIds\":" <> tshow repoIdList <> "}}"
let dropReposRequest
= setRequestMethod "POST"
$ setRequestPath "/service/local/staging/bulk/drop"
$ setRequestHeader "content-type" [ "application/json" ]
$ setRequestHeader "accept" [ "application/json" ]
$ setRequestBodyLBS (BSL.fromStrict (encodeUtf8 dropReposJson)) baseRequest
_ <- recovering uploadRetryPolicy [ httpResponseHandler ] (\_ -> liftIO $ httpNoBody dropReposRequest manager)
return ()
decodeSigningKey :: (MonadCI m) => String -> m BS.ByteString
decodeSigningKey signingKey = case Base64.decode $ C8.pack signingKey of
Left err -> throwIO $ CannotDecodeSigningKey err
Right decodedData -> return decodedData
-- Note: Upload path is NOT documented in the REST API Guide.
uploadPath :: MavenCoords -> Text -> Text -> Text
uploadPath MavenCoords{..} comDamlStagingRepoId comDigitalassetRepoId = do
let stagingRepoId = if ["com", "daml"] `List.isPrefixOf` groupId then comDamlStagingRepoId else comDigitalassetRepoId
T.intercalate "/" ("/service/local/staging/deployByRepositoryId" : [stagingRepoId] <> groupId <> [artifactId, version, artifactId]) <> "-" <> version <> maybe "" ("-" <>) classifier <> "." <> artifactType
getContentType :: ArtifactType -> Text
getContentType t =
case t of
"jar" -> "application/java-archive"
"pom" -> "application/xml"
_ -> "application/octet-stream"
noVerifyTlsSettings :: TLSSettings
noVerifyTlsSettings = TLSSettingsSimple
{ settingDisableCertificateValidation = True
, settingDisableSession = True
, settingUseServerName = False
}
noVerifyTlsManagerSettings :: ManagerSettings
noVerifyTlsManagerSettings = mkManagerSettings noVerifyTlsSettings Nothing
chksumFileContents :: (MonadIO m) => Path Abs File -> m (BSL.ByteString, BSL.ByteString)
chksumFileContents file = do
contents <- liftIO $ BS.readFile $ fromAbsFile file
return (BSL.fromStrict (digestToHexByteString (hash contents :: Digest MD5)), BSL.fromStrict (digestToHexByteString (hash contents :: Digest SHA1)))
mavenConfigFromEnv :: (MonadIO m, E.MonadThrow m) => m MavenUploadConfig
mavenConfigFromEnv = do
url <- liftIO $ getEnv "MAVEN_URL"
user <- liftIO $ getEnv "MAVEN_USER"
password <- liftIO $ getEnv "MAVEN_PASSWORD"
mbAllowUnsecureTls <- liftIO $ lookupEnv "MAVEN_UNSECURE_TLS"
signingKey <- liftIO $ getEnv "GPG_KEY"
pure MavenUploadConfig
{ mucUrl = T.pack url
, mucUser = T.pack user
, mucPassword = T.pack password
, mucAllowUnsecureTls = MavenAllowUnsecureTls $ mbAllowUnsecureTls == Just "True"
, mucSigningKey = signingKey
}
textToLazyByteString :: Text -> BSL.ByteString
textToLazyByteString text = BSL.fromStrict $ encodeUtf8 text
--
-- HTTP Response Handlers
--
httpResponseHandler :: (MonadIO m, MonadLogger m) => RetryStatus -> E.Handler m Bool
httpResponseHandler status = logRetries shouldRetry logRetry status
checkRepoStatusHandler :: (MonadIO m, MonadLogger m) => RetryStatus -> E.Handler m Bool
checkRepoStatusHandler status = logRetries shouldStatusRetry logStatusRetry status
shouldRetry :: (MonadIO m) => HttpException -> m Bool
shouldRetry e = case e of
HttpExceptionRequest _ ConnectionTimeout -> return True
-- Don't retry POST requests if the response timeouts as the request might of been processed
HttpExceptionRequest request ResponseTimeout -> return (method request == "POST")
HttpExceptionRequest _ (StatusCodeException rsp _) ->
case statusCode (responseStatus rsp) of
408 {- requestTimeout -} -> return True
502 {- badGateway -} -> return True
503 {- serviceUnavailable -} -> return True
_ -> return False
_ -> return False
shouldStatusRetry :: (MonadIO m) => RepoNotClosed -> m Bool
shouldStatusRetry _ = return True
-- | For use with 'logRetries'.
logRetry :: (MonadIO m, MonadLogger m, E.Exception e) => Bool -> e -> RetryStatus -> m ()
logRetry shouldRetry err status = do
$logWarn (tshow err <> ". " <> " " <> tshow status <> " - " <> nextMsg)
return ()
where
nextMsg = if shouldRetry then "Retrying." else "Aborting after " <> (tshow $ rsCumulativeDelay status) <> "µs total delay."
logStatusRetry :: (MonadIO m, MonadLogger m, E.Exception e) => Bool -> e -> RetryStatus -> m ()
logStatusRetry shouldRetry _ status =
if shouldRetry
then
$logDebug ("Staging repository is still processing close request. Checked after " <> tshow (rsCumulativeDelay status) <> "µs")
else
$logDebug ("Aborting staging repository check after " <> (tshow $ rsCumulativeDelay status) <> "µs.")
uploadRetryPolicy :: RetryPolicy
uploadRetryPolicy = limitRetriesByCumulativeDelay (60 * 1000 * 1000) (exponentialBackoff (200 * 100))
-- The status of the staging repository can take a number of minutes to change it's
-- status to closed.
checkStatusRetryPolicy :: RetryPolicy
checkStatusRetryPolicy = limitRetriesByCumulativeDelay (5 * 60 * 1000 * 1000) (constantDelay (15 * 1000 * 1000))
handleStatusRequest :: (MonadIO m) => Request -> Manager -> m Bool
handleStatusRequest request manager = do
statusResponse <- liftIO $ httpLbs request manager
repoStatus <- liftIO $ decodeRepoStatus $ responseBody statusResponse
if transitioning repoStatus
then
throwIO RepoNotClosed
else
return $ status repoStatus == "open"
--
-- Data Transfer Objects for the Nexus Staging REST API.
-- Note that fields from the REST response that are not used do not need
-- to be defined as Aeson will simply ignore them.
-- See https://oss.sonatype.org/nexus-staging-plugin/default/docs/index.html
--
data RepoStatusResponse = RepoStatusResponse
{ repositoryId :: Text
, status :: Text
, transitioning :: Bool
} deriving Show
data StagingPromote = StagingPromote { stagedRepositoryId :: Text }
data StagingPromoteResponse = StagingPromoteResponse { _data :: StagingPromote }
data NameValue = NameValue
{ name :: Text
, value :: Text
}
instance Show NameValue where
show NameValue{..} = T.unpack $ " " <> name <> ": " <> value
data RepoActivityEvent = RepoActivityEvent
{ name :: Text
, properties :: [NameValue]
}
instance Show RepoActivityEvent where
show RepoActivityEvent{..} = do
T.unpack $ name <> intercalatedValues
where
intercalatedValues = T.intercalate "\n " ([""] <> map tshow properties <> [""])
data RepoActivityDetails = RepoActivityDetails
{ name :: Text
, events :: [RepoActivityEvent]
}
instance Show RepoActivityDetails where
show RepoActivityDetails{..} = do
T.unpack $ name <> intercalatedValues
where
intercalatedValues = T.intercalate "\n " ([""] <> map tshow events <> [""])
-- 'Manual' parsing of required fields as the API uses the Haskell reserved keyword 'type'
instance FromJSON RepoStatusResponse where
parseJSON (Object o) = RepoStatusResponse <$> o .: "repositoryId" <*> o .: "type" <*> o .: "transitioning"
parseJSON _ = fail "Expected an Object"
instance FromJSON StagingPromote where
parseJSON = withObject "StagingPromote" $ \o -> StagingPromote
<$> o .: "stagedRepositoryId"
-- 'Manual' parsing of required fields as the API uses the Haskell reserved keyword 'data'
instance FromJSON StagingPromoteResponse where
parseJSON (Object o) = StagingPromoteResponse <$> o .: "data"
parseJSON _ = fail "Expected an Object"
instance FromJSON RepoActivityDetails where
parseJSON = withObject "RepoActivityDetails" $ \o -> RepoActivityDetails
<$> o .: "name"
<*> o .: "events"
instance FromJSON RepoActivityEvent where
parseJSON = withObject "RepoActivityEvent" $ \o -> RepoActivityEvent
<$> o .: "name"
<*> o .: "properties"
instance FromJSON NameValue where
parseJSON = withObject "NameValue" $ \o -> NameValue
<$> o .: "name"
<*> o .: "value"
decodeRepoStatus :: (MonadIO m) => BSL.ByteString -> m RepoStatusResponse
decodeRepoStatus jsonString = case (eitherDecode jsonString :: Either String RepoStatusResponse) of
Left err -> throwIO $ ParseJsonException err
Right r -> return r
decodeStagingPromoteResponse :: (MonadIO m) => Response BSL.ByteString -> m StagingPromoteResponse
decodeStagingPromoteResponse response = case (eitherDecode $ responseBody response :: Either String StagingPromoteResponse) of
Left err -> throwIO $ ParseJsonException err
Right r -> return r
decodeRepoActivityResponse :: (MonadIO m) => Response BSL.ByteString -> m [RepoActivityDetails]
decodeRepoActivityResponse response = case (eitherDecode $ responseBody response :: Either String [RepoActivityDetails]) of
Left err -> throwIO $ ParseJsonException err
Right r -> return r
--
-- Error definitions
--
data UploadFailure
= ParseJsonException String
| CannotDecodeSigningKey String
| RepoFailedToClose [Text]
instance E.Exception UploadFailure
instance Show UploadFailure where
show (ParseJsonException msg) = "Cannot parse JSON data: " <> msg
show (CannotDecodeSigningKey msg) = "Cannot Base64 decode signing key: " <> msg
show (RepoFailedToClose repoIds) = "The staging repositories " <> show repoIds <> " failed to close"
data RepoNotClosed
= RepoNotClosed
deriving Show
instance E.Exception RepoNotClosed

View File

@ -16,13 +16,16 @@ module Util (
Artifact(..),
ArtifactLocation(..),
BazelLocations(..),
BazelTarget(..),
artifactFiles,
artifactCoords,
copyToReleaseDir,
buildTargets,
getBazelLocations,
resolvePomData
resolvePomData,
loggedProcess_,
) where
@ -66,14 +69,14 @@ data JarType
= Plain
-- ^ Plain java or scala library, without source jar.
| Lib
-- ^ A java library jar, with a source jar.
-- ^ A java library jar, with source and javadoc jars.
| Deploy
-- ^ Deploy jar, e.g. a fat jar containing transitive deps.
| Proto
-- ^ A java protobuf library (*-speed.jar).
| Scala
-- ^ A scala library jar, with a source jar. Use when source jar
-- is desired, otherwise use 'Plain'.
-- ^ A scala library jar, with source and scaladoc jars. Use when
-- source or scaladoc is desired, otherwise use 'Plain'.
deriving (Eq, Show)
instance FromJSON ReleaseType where
@ -94,6 +97,8 @@ data Artifact c = Artifact
, artPlatformDependent :: !PlatformDependent
, artBintrayPackage :: !BintrayPackage
-- ^ Defaults to sdk-components if not specified
, artMavenUpload :: MavenUpload
-- ^ Defaults to False if not specified
, artMetadata :: !c
} deriving Show
@ -103,6 +108,7 @@ instance FromJSON (Artifact (Maybe ArtifactLocation)) where
<*> o .: "type"
<*> (fromMaybe (PlatformDependent False) <$> o .:? "platformDependent")
<*> (fromMaybe PkgSdkComponents <$> o .:? "bintrayPackage")
<*> (fromMaybe (MavenUpload False) <$> o .:? "mavenUpload")
<*> o .:? "location"
data ArtifactLocation = ArtifactLocation
@ -129,7 +135,8 @@ buildTargets art@Artifact{..} =
in [jarTarget, pomTar] <>
[BazelTarget ("//" <> directory <> ":" <> srcJar) | Just srcJar <- pure (sourceJarName art)] <>
[BazelTarget ("//" <> directory <> ":" <> srcJar) | Just srcJar <- pure (scalaSourceJarName art)] <>
[BazelTarget ("//" <> directory <> ":" <> javadocJar) | Just javadocJar <- pure (javadocJarName art)]
[BazelTarget ("//" <> directory <> ":" <> javadocJar) | Just javadocJar <- pure (javadocJarName art)] <>
[BazelTarget ("//" <> directory <> ":" <> scaladocJar) | Just scaladocJar <- pure (scaladocJarName art)]
Zip -> [artTarget]
TarGz -> [artTarget]
@ -191,9 +198,9 @@ splitBazelTarget (BazelTarget t) =
_ -> error ("Malformed bazel target: " <> show t)
mainExt :: ReleaseType -> Text
mainExt Zip = ".zip"
mainExt TarGz = ".tar.gz"
mainExt Jar{} = ".jar"
mainExt Zip = "zip"
mainExt TarGz = "tar.gz"
mainExt Jar{} = "jar"
mainFileName :: ReleaseType -> Text -> Text
mainFileName releaseType name =
@ -217,6 +224,11 @@ scalaSourceJarName Artifact{..}
| Jar Scala <- artReleaseType = Just $ snd (splitBazelTarget artTarget) <> "_src.jar"
| otherwise = Nothing
scaladocJarName :: Artifact a -> Maybe Text
scaladocJarName Artifact{..}
| Jar Scala <- artReleaseType = Just $ snd (splitBazelTarget artTarget) <> "_scaladoc.jar"
| otherwise = Nothing
javadocJarName :: Artifact a -> Maybe Text
javadocJarName Artifact{..}
| Jar Lib <- artReleaseType = Just $ snd (splitBazelTarget artTarget) <> "_javadoc.jar"
@ -233,32 +245,61 @@ artifactFiles allArtifacts art@Artifact{..} = do
directory <- parseRelDir $ unpack directory
mainArtifactIn <- parseRelFile $ unpack $ mainFileName artReleaseType name
mainArtifactOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # mainExt artReleaseType))
mainArtifactOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # "." # mainExt artReleaseType))
pomFileIn <- parseRelFile (unpack (name <> "_pom.xml"))
pomFileOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion #".pom"))
mbSourceJarIn <- traverse (parseRelFile . unpack) (sourceJarName art)
sourceJarOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # "-sources" # mainExt artReleaseType))
sourceJarOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # "-sources" # "." # mainExt artReleaseType))
mbScalaSourceJarIn <- traverse (parseRelFile . unpack) (scalaSourceJarName art)
scalaSourceJarOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # "-sources" # mainExt artReleaseType))
scalaSourceJarOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # "-sources" # "." # mainExt artReleaseType))
mbJavadocJarIn <- traverse (parseRelFile . unpack) (javadocJarName art)
javadocJarOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # "-javadoc" # mainExt artReleaseType))
javadocJarOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # "-javadoc" # "." # mainExt artReleaseType))
mbScaladocJarIn <- traverse (parseRelFile . unpack) (scaladocJarName art)
scaladocJarOut <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # "-javadoc" # "." # mainExt artReleaseType))
let shouldReleasePlatInd = shouldRelease allArtifacts (PlatformDependent False)
pure $
[(directory </> mainArtifactIn, outDir </> mainArtifactOut) | shouldRelease allArtifacts artPlatformDependent] <>
[(directory </> pomFileIn, outDir </> pomFileOut) | isJar artReleaseType, shouldRelease allArtifacts (PlatformDependent False)] <>
[(directory </> sourceJarIn, outDir </> sourceJarOut) | shouldRelease allArtifacts (PlatformDependent False), Just sourceJarIn <- pure mbSourceJarIn] <>
[(directory </> scalaSourceJarIn, outDir </> scalaSourceJarOut) | shouldRelease allArtifacts (PlatformDependent False), Just scalaSourceJarIn <- pure mbScalaSourceJarIn] <>
[(directory </> javadocJarIn, outDir </> javadocJarOut) | shouldRelease allArtifacts (PlatformDependent False), Just javadocJarIn <- pure mbJavadocJarIn]
[(directory </> pomFileIn, outDir </> pomFileOut) | isJar artReleaseType, shouldReleasePlatInd] <>
[(directory </> sourceJarIn, outDir </> sourceJarOut) | shouldReleasePlatInd, Just sourceJarIn <- pure mbSourceJarIn] <>
[(directory </> scalaSourceJarIn, outDir </> scalaSourceJarOut) | shouldReleasePlatInd, Just scalaSourceJarIn <- pure mbScalaSourceJarIn] <>
[(directory </> javadocJarIn, outDir </> javadocJarOut) | shouldReleasePlatInd, Just javadocJarIn <- pure mbJavadocJarIn] <>
[(directory </> scaladocJarIn, outDir </> scaladocJarOut) | shouldReleasePlatInd, Just scaladocJarIn <- pure mbScaladocJarIn]
-- ^ Note that the Scaladoc is specified with the "javadoc" classifier.
-- | Given an artifact, produce a list of pairs of an input file and the Maven coordinates
artifactCoords :: E.MonadThrow m => AllArtifacts -> Artifact PomData -> m [(MavenCoords, Path Rel File)]
artifactCoords allArtifacts Artifact{..} = do
let PomData{..} = artMetadata
let jarClassifier = if getPlatformDependent artPlatformDependent then Just osName else Nothing
outDir <- parseRelDir $ unpack $
T.intercalate "/" pomGroupId #"/"# pomArtifactId #"/"# pomVersion #"/"
let ostxt = if getPlatformDependent artPlatformDependent then "-" <> osName else ""
mainArtifactFile <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion # ostxt # "." # mainExt artReleaseType))
pomFile <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion #".pom"))
sourcesFile <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion #"-sources.jar"))
javadocFile <- parseRelFile (unpack (pomArtifactId #"-"# pomVersion #"-javadoc.jar"))
let mavenCoords classifier artifactType =
MavenCoords { groupId = pomGroupId, artifactId = pomArtifactId, version = pomVersion, classifier, artifactType }
let shouldReleasePlatInd = shouldRelease allArtifacts (PlatformDependent False)
pure $ [ (mavenCoords jarClassifier $ mainExt artReleaseType, outDir </> mainArtifactFile) | shouldReleasePlatInd] <>
[ (mavenCoords Nothing "pom", outDir </> pomFile) | isJar artReleaseType, shouldReleasePlatInd] <>
[ (mavenCoords (Just "sources") "jar", outDir </> sourcesFile) | isJar artReleaseType, shouldReleasePlatInd] <>
[ (mavenCoords (Just "javadoc") "jar", outDir </> javadocFile) | isJar artReleaseType, shouldReleasePlatInd]
shouldRelease :: AllArtifacts -> PlatformDependent -> Bool
shouldRelease (AllArtifacts allArtifacts) (PlatformDependent platformDependent) =
allArtifacts || platformDependent || osName == "linux"
copyToReleaseDir :: (MonadLogger m, MonadIO m) => BazelLocations -> Path Abs Dir -> Path Rel File -> Path Rel File -> m ()
copyToReleaseDir BazelLocations{..} releaseDir inp out = do
binExists <- doesFileExist (bazelBin </> inp)