Merge pull request #126162 from fabaff/bump-boto

python3Packages.mypy-boto3-builder: 4.12.0 -> 4.14.1
This commit is contained in:
Fabian Affolter 2021-06-10 13:45:19 +02:00 committed by GitHub
commit 3f7c764f0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 152 additions and 82 deletions

View File

@ -13,11 +13,11 @@
buildPythonPackage rec {
pname = "boto3";
version = "1.17.52"; # N.B: if you change this, change botocore and awscli to a matching version
version = "1.17.88"; # N.B: if you change this, change botocore and awscli to a matching version
src = fetchPypi {
inherit pname version;
sha256 = "sha256-7WQMF8l68om+RpN0DBy/laRW6cSV45c6Htb1GjloRtI=";
sha256 = "sha256-pxXKbERX1W6j4+/em9yL5BwpsvKpBPvRK+/bnLXiieQ=";
};
propagatedBuildInputs = [ botocore jmespath s3transfer ] ++ lib.optionals (!isPy3k) [ futures ];

View File

@ -13,11 +13,11 @@
buildPythonPackage rec {
pname = "botocore";
version = "1.20.52"; # N.B: if you change this, change boto3 and awscli to a matching version
version = "1.20.88"; # N.B: if you change this, change boto3 and awscli to a matching version
src = fetchPypi {
inherit pname version;
sha256 = "sha256-3V9YCOxIqZm5Y0s4etarehojuh+XEqh1Bm0jSAj4qmI=";
sha256 = "sha256-vJie2rUtR4iq3Y0a/5JfXGp8vGiQC/2443mWWurBcxc=";
};
propagatedBuildInputs = [

View File

@ -1,7 +1,7 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, isPy27
, pythonOlder
, w3lib
, parsel
, jmespath
@ -12,8 +12,7 @@
buildPythonPackage rec {
pname = "itemloaders";
version = "1.0.4";
disabled = isPy27;
disabled = pythonOlder "3.6";
# Tests not included in PyPI tarball
src = fetchFromGitHub {
@ -27,6 +26,14 @@ buildPythonPackage rec {
checkInputs = [ pytestCheckHook ];
disabledTests = [
# Test are failing (AssertionError: Lists differ: ...)
"test_nested_css"
"test_nested_xpath"
];
pythonImportsCheck = [ "itemloaders" ];
meta = with lib; {
description = "Base library for scrapy's ItemLoader";
homepage = "https://github.com/scrapy/itemloaders";

View File

@ -14,14 +14,14 @@
buildPythonPackage rec {
pname = "mypy-boto3-builder";
version = "4.12.0";
version = "4.14.1";
disabled = pythonOlder "3.6";
src = fetchFromGitHub {
owner = "vemel";
repo = "mypy_boto3_builder";
rev = version;
sha256 = "09kbmrpnph5kbxlqqavpxg3l85dmk3vzmwawa4s09y9gknxxkcv9";
sha256 = "sha256-y55bPi70ldd528Olr2atXHm5JHiLNBZ396D9qwbBmkc=";
};
propagatedBuildInputs = [

View File

@ -8,12 +8,12 @@
buildPythonPackage rec {
pname = "mypy-boto3-s3";
version = "1.17.71";
version = "1.17.88";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
sha256 = "0zgx3f41j80xy203jwms7j72svxy10ry5v9w3ql817ai4lcrspnn";
sha256 = "sha256-ba7ohsScaag3tDnUbs1eWYO/YSWcgJlo9VpkdU40x5o=";
};
propagatedBuildInputs = [

View File

@ -1,14 +1,13 @@
{ lib
, buildPythonPackage
, cssselect
, fetchPypi
, pytest
, pytestrunner
, functools32
, isPy27
, lxml
, pytestCheckHook
, six
, w3lib
, lxml
, cssselect
, isPy27
}:
buildPythonPackage rec {
@ -20,17 +19,47 @@ buildPythonPackage rec {
sha256 = "0yawf9r3r863lwxj0n89i7h3n8xjbsl5b7n6xg76r68scl5yzvvh";
};
checkInputs = [ pytest pytestrunner ];
propagatedBuildInputs = [ six w3lib lxml cssselect ] ++ lib.optionals isPy27 [ functools32 ];
propagatedBuildInputs = [
cssselect
lxml
six
w3lib
] ++ lib.optionals isPy27 [
functools32
];
checkPhase = ''
py.test
checkInputs = [
pytestCheckHook
];
postPatch = ''
substituteInPlace setup.py \
--replace "'pytest-runner'," ""
'';
disabledTests = [
# Test are out-dated and are failing (AssertionError: Lists differ: ...)
# https://github.com/scrapy/parsel/pull/174
"test_differences_parsing_xml_vs_html"
"test_nested_selectors"
"test_re"
"test_replacement_null_char_from_body"
"test_select_on_text_nodes"
"test_selector_get_alias"
"test_selector_getall_alias"
"test_selector_over_text"
"test_selectorlist_get_alias"
"test_selectorlist_getall_alias"
"test_slicing"
"test_text_pseudo_element"
];
pythonImportsCheck = [ "parsel" ];
meta = with lib; {
homepage = "https://github.com/scrapy/parsel";
description = "Parsel is a library to extract data from HTML and XML using XPath and CSS selectors";
description = "Python library to extract data from HTML and XML using XPath and CSS selectors";
license = licenses.bsd3;
maintainers = with maintainers; [ fab ];
};
}

View File

@ -14,11 +14,11 @@
buildPythonPackage rec {
pname = "s3transfer";
version = "0.3.6";
version = "0.4.2";
src = fetchPypi {
inherit pname version;
sha256 = "c5dadf598762899d8cfaecf68eba649cd25b0ce93b6c954b156aaa3eed160547";
sha256 = "sha256-ywIvSxZVHt67sxo3fT8JYA262nNj2MXbeXbn9Hcy4bI=";
};
propagatedBuildInputs =

View File

@ -1,64 +1,89 @@
{ lib
, stdenv
, buildPythonPackage
, isPy27
, fetchPypi
, glibcLocales
, pytestCheckHook
, testfixtures
, pillow
, twisted
, cryptography
, w3lib
, lxml
, queuelib
, pyopenssl
, service-identity
, parsel
, pydispatcher
, cssselect
, zope_interface
, protego
, jmespath
, sybil
, pytest-twisted
, botocore
, buildPythonPackage
, cryptography
, cssselect
, fetchFromGitHub
, fetchpatch
, glibcLocales
, installShellFiles
, itemadapter
, itemloaders
, jmespath
, lxml
, parsel
, pillow
, protego
, pydispatcher
, pyopenssl
, pytest-twisted
, pytestCheckHook
, pythonOlder
, queuelib
, service-identity
, sybil
, testfixtures
, twisted
, w3lib
, zope_interface
}:
buildPythonPackage rec {
version = "2.4.1";
pname = "Scrapy";
pname = "scrapy";
version = "2.5.0";
disabled = pythonOlder "3.6";
disabled = isPy27;
src = fetchFromGitHub {
owner = pname;
repo = pname;
rev = version;
sha256 = "09lxnjz1cw37i9bgk8sci2xxknj20gi2lq8l7i0b3xw7q8bxzp7h";
};
nativeBuildInputs = [
installShellFiles
];
propagatedBuildInputs = [
cryptography
cssselect
itemadapter
itemloaders
lxml
parsel
protego
pydispatcher
pyopenssl
queuelib
service-identity
twisted
w3lib
zope_interface
];
checkInputs = [
botocore
glibcLocales
jmespath
pytestCheckHook
sybil
testfixtures
pillow
pytest-twisted
botocore
];
propagatedBuildInputs = [
twisted
cryptography
cssselect
lxml
parsel
pydispatcher
pyopenssl
queuelib
service-identity
w3lib
zope_interface
protego
itemadapter
itemloaders
patches = [
# Require setuptools, https://github.com/scrapy/scrapy/pull/5122
(fetchpatch {
name = "add-setuptools.patch";
url = "https://github.com/scrapy/scrapy/commit/4f500342c8ad4674b191e1fab0d1b2ac944d7d3e.patch";
sha256 = "14030sfv1cf7dy4yww02b49mg39cfcg4bv7ys1iwycfqag3xcjda";
})
# Make Twisted[http2] installation optional, https://github.com/scrapy/scrapy/pull/5113
(fetchpatch {
name = "remove-h2.patch";
url = "https://github.com/scrapy/scrapy/commit/c5b1ee810167266fcd259f263dbfc0fe0204761a.patch";
sha256 = "1gw28wg8qcb0al59rz214hm17smspi6j5kg62nr1r850pykyrsqk";
})
];
LC_ALL = "en_US.UTF-8";
@ -68,39 +93,48 @@ buildPythonPackage rec {
substituteInPlace pytest.ini --replace "--doctest-modules" ""
'';
pytestFlagsArray = [
"--ignore=tests/test_proxy_connect.py"
"--ignore=tests/test_utils_display.py"
"--ignore=tests/test_command_check.py"
disabledTestPaths = [
"tests/test_proxy_connect.py"
"tests/test_utils_display.py"
"tests/test_command_check.py"
# Don't test the documentation
"docs"
];
disabledTests = [
# It's unclear if the failures are related to libxml2, https://github.com/NixOS/nixpkgs/pull/123890
"test_nested_css"
"test_nested_xpath"
"test_flavor_detection"
# Requires network access
"FTPFeedStorageTest"
"test_noconnect"
"test_retry_dns_error"
"FeedExportTest"
"test_custom_asyncio_loop_enabled_true"
"test_custom_loop_asyncio"
"test_custom_loop_asyncio_deferred_signal"
"FileFeedStoragePreFeedOptionsTest" # https://github.com/scrapy/scrapy/issues/5157
] ++ lib.optionals stdenv.isDarwin [
"test_xmliter_encoding"
"test_download"
];
src = fetchPypi {
inherit pname version;
sha256 = "68c48f01a58636bdf0f6fcd5035a19ecf277b58af24bd70c36dc6e556df3e005";
};
postInstall = ''
install -m 644 -D extras/scrapy.1 $out/share/man/man1/scrapy.1
installManPage extras/scrapy.1
install -m 644 -D extras/scrapy_bash_completion $out/share/bash-completion/completions/scrapy
install -m 644 -D extras/scrapy_zsh_completion $out/share/zsh/site-functions/_scrapy
'';
pythonImportsCheck = [ "scrapy" ];
__darwinAllowLocalNetworking = true;
meta = with lib; {
description = "A fast high-level web crawling and web scraping framework, used to crawl websites and extract structured data from their pages";
description = "High-level web crawling and web scraping framework";
longDescription = ''
Scrapy is a fast high-level web crawling and web scraping framework, used to crawl
websites and extract structured data from their pages. It can be used for a wide
range of purposes, from data mining to monitoring and automated testing.
'';
homepage = "https://scrapy.org/";
license = licenses.bsd3;
maintainers = with maintainers; [ drewkett marsam ];

View File

@ -21,11 +21,11 @@ let
in
with py.pkgs; buildPythonApplication rec {
pname = "awscli";
version = "1.19.52"; # N.B: if you change this, change botocore and boto3 to a matching version too
version = "1.19.88"; # N.B: if you change this, change botocore and boto3 to a matching version too
src = fetchPypi {
inherit pname version;
sha256 = "sha256-keKyuNeDC/90pn89KjoDTO8AGsmI8nqfDNSeGyM6iHQ=";
sha256 = "sha256-LfWSE3dDTJ0BHcaaY49Nd9RAZgj5b++NFeYhkIfwQX0=";
};
# https://github.com/aws/aws-cli/issues/4837