Merge pull request #327626 from emilazy/push-yvluzttstnzz

python3Packages.av: fix build
This commit is contained in:
Martin Weinelt 2024-07-17 00:16:36 +02:00 committed by GitHub
commit db8e14c479
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 85 additions and 61 deletions

View File

@ -4,6 +4,8 @@
buildPythonPackage,
cython,
fetchFromGitHub,
fetchurl,
linkFarm,
ffmpeg_6-headless,
numpy,
pillow,
@ -35,10 +37,18 @@ buildPythonPackage rec {
buildInputs = [ ffmpeg_6-headless ];
preCheck = ''
# ensure we import the built version
rm -r av
'';
preCheck =
let
# Update with `./update-test-samples.bash` if necessary.
testSamples = linkFarm "pyav-test-samples" (
lib.mapAttrs (_: fetchurl) (lib.importTOML ./test-samples.toml)
);
in
''
# ensure we import the built version
rm -r av
ln -s ${testSamples} tests/assets
'';
nativeCheckInputs = [
numpy
@ -46,61 +56,13 @@ buildPythonPackage rec {
pytestCheckHook
];
disabledTests =
[
# urlopen fails during DNS resolution
"test_writing_to_custom_io"
"test_decode_close_then_use"
# Tests that want to download FATE data, https://github.com/PyAV-Org/PyAV/issues/955
"test_vobsub"
"test_transcode"
"test_stream_tuples"
"test_stream_seek"
"test_stream_probing"
"test_seek_start"
"test_seek_middle"
"test_seek_int64"
"test_seek_float"
"test_seek_end"
"test_roundtrip"
"test_reading_from_write_readonl"
"test_reading_from_pipe_readonly"
"test_reading_from_file"
"test_reading_from_buffer"
"test_reading_from_buffer_no_see"
"test_parse"
"test_movtext"
"test_encoding_xvid"
"test_encoding_tiff"
"test_encoding_png"
"test_encoding_pcm_s24le"
"test_encoding_mpeg4"
"test_encoding_mpeg1video"
"test_encoding_mp2"
"test_encoding_mjpeg"
"test_encoding_h264"
"test_encoding_dvvideo"
"test_encoding_dnxhd"
"test_encoding_aac"
"test_decoded_video_frame_count"
"test_decoded_time_base"
"test_decoded_motion_vectors"
"test_decode_half"
"test_decode_audio_sample_count"
"test_data"
"test_container_probing"
"test_codec_tag"
"test_selection"
]
++ lib.optionals (stdenv.isDarwin) [
# Segmentation Faults
"test_encoding_with_pts"
"test_bayer_write"
];
disabledTests = [
# av.error.InvalidDataError: [Errno 1094995529] Invalid data found when processing input: 'custom_io_output.mpd'
"test_writing_to_custom_io_dash"
];
disabledTestPaths = [
# urlopen fails during DNS resolution
"tests/test_doctests.py"
# `__darwinAllowLocalNetworking` doesnt work for these; not sure why.
disabledTestPaths = lib.optionals stdenv.isDarwin [
"tests/test_timeout.py"
];

View File

@ -0,0 +1,17 @@
"fate-suite/aac/latm_stereo_to_51.ts" = { url = "http://fate.ffmpeg.org/fate-suite/aac/latm_stereo_to_51.ts", hash = "sha256-lVz0iby2IEUVdwKYamv4HVm8EUGHJS/cWY+QFBMaCBY=" }
"fate-suite/amv/MTV_high_res_320x240_sample_Penguin_Joke_MTV_from_WMV.amv" = { url = "http://fate.ffmpeg.org/fate-suite/amv/MTV_high_res_320x240_sample_Penguin_Joke_MTV_from_WMV.amv", hash = "sha256-O9YMj0+0bM4YyZNGgkZJL8E2aG+Y3lq8/c+DVht0McI=" }
"fate-suite/audio-reference/chorusnoise_2ch_44kHz_s16.wav" = { url = "http://fate.ffmpeg.org/fate-suite/audio-reference/chorusnoise_2ch_44kHz_s16.wav", hash = "sha256-KodB5hQkBFtfkI+L7hnkSonPM+IuOCNrTV3Vsy1bvhs=" }
"fate-suite/h264/interlaced_crop.mp4" = { url = "http://fate.ffmpeg.org/fate-suite/h264/interlaced_crop.mp4", hash = "sha256-SVWWaOcfOp718dvgkpgOWCYoV9Ylomv8MBYzbRqvbBE=" }
"fate-suite/hap/HAPQA_NoSnappy_127x1.mov" = { url = "http://fate.ffmpeg.org/fate-suite/hap/HAPQA_NoSnappy_127x1.mov", hash = "sha256-WMUqg9o84ki2AIIsGhY8P10KBc3qgCsmljqJXXRHbs8=" }
"fate-suite/mkv/codec_delay_opus.mkv" = { url = "http://fate.ffmpeg.org/fate-suite/mkv/codec_delay_opus.mkv", hash = "sha256-GanpfRyGKN36NLAa7pZehcM1F2VDCW3g6hhO26vFg1I=" }
"fate-suite/mov/displaymatrix.mov" = { url = "http://fate.ffmpeg.org/fate-suite/mov/displaymatrix.mov", hash = "sha256-Aq0/zcKKHI2dgThIKfYMXCjNI6WoVwy7VtD5Bke6krQ=" }
"fate-suite/mov/mov-1elist-ends-last-bframe.mov" = { url = "http://fate.ffmpeg.org/fate-suite/mov/mov-1elist-ends-last-bframe.mov", hash = "sha256-1g488WPvvzzeHXoQg7xZnrVCoGCz7sOUSpzZj6qWnhI=" }
"fate-suite/mov/white_zombie_scrunch-part.mov" = { url = "http://fate.ffmpeg.org/fate-suite/mov/white_zombie_scrunch-part.mov", hash = "sha256-apoPuBxsjqkjCaSdtgTJhpYFXMp5LbtZQz+lo3o9jx8=" }
"fate-suite/mpeg2/mpeg2_field_encoding.ts" = { url = "http://fate.ffmpeg.org/fate-suite/mpeg2/mpeg2_field_encoding.ts", hash = "sha256-logzOhRbniwhObnKts1JZqzRl4j9YgtLMdtcKJFUfLg=" }
"fate-suite/mxf/track_01_v02.mxf" = { url = "http://fate.ffmpeg.org/fate-suite/mxf/track_01_v02.mxf", hash = "sha256-AQ+UxFVfBgQwbEtoMautd02BL5kC6pAbsXD3SVRd9xE=" }
"fate-suite/png1/55c99e750a5fd6_50314226.png" = { url = "http://fate.ffmpeg.org/fate-suite/png1/55c99e750a5fd6_50314226.png", hash = "sha256-yADmkBgeMB5wv140gwnaDMjvErRTXZhXdBz02HZSMBc=" }
"fate-suite/qtrle/aletrek-rle.mov" = { url = "http://fate.ffmpeg.org/fate-suite/qtrle/aletrek-rle.mov", hash = "sha256-uXUvVkwuPbfs/rzT896ty3RZfvGoSPj3su+sjLPU09g=" }
"fate-suite/sub/MovText_capability_tester.mp4" = { url = "http://fate.ffmpeg.org/fate-suite/sub/MovText_capability_tester.mp4", hash = "sha256-Y2uhvfGrZaPebD6ZsJemzpOk+XHX6ukBceVauEit9h8=" }
"fate-suite/sub/vobsub.sub" = { url = "http://fate.ffmpeg.org/fate-suite/sub/vobsub.sub", hash = "sha256-X2rEMyTlo1xuUlqgx2uvqd2WWhfOCID9fraeGbaFPIs=" }
"pyav-curated/pexels/time-lapse-video-of-night-sky-857195.mp4" = { url = "https://pyav.org/datasets/pexels/time-lapse-video-of-night-sky-857195.mp4", hash = "sha256-6307VwepfoVNKeGm8WEMZtfowJZ27Hv2fwgp6J/Q0oE=" }
"pyav-curated/pexels/time-lapse-video-of-sunset-by-the-sea-854400.mp4" = { url = "https://pyav.org/datasets/pexels/time-lapse-video-of-sunset-by-the-sea-854400.mp4", hash = "sha256-2RWphhz5KWPSnJh8ARrC7aPjBa77DJO1Fv0/I4kWxYg=" }

View File

@ -0,0 +1,44 @@
#!/usr/bin/env bash
set -o errexit
set -o nounset
if test "$#" != 1; then
printf >&2 'usage: update-test-samples.bash /path/to/PyAV/source\n'
exit 2
fi
pyav_source=$1
exec > "$(dirname "$(readlink -f "$0")")/test-samples.toml"
fetch() {
path=$1
url=$2
prefetch_json=$(nix store prefetch-file --json "${url}")
sri_hash=$(jq -r .hash <<< "${prefetch_json}")
printf '"%s" = { url = "%s", hash = "%s" }\n' "${path}" "${url}" "${sri_hash}"
}
fetch_all() {
function=$1
base_path=$2
base_url=$3
samples=$(
rg \
--only-matching \
--no-filename \
"\\b${function}\\([\"']([^\"']+)[\"']\\)" \
--replace '$1' \
"${pyav_source}"
)
unique_samples=$(sort -u <<< "${samples}")
while IFS= read -r sample; do
fetch "${base_path}/${sample}" "${base_url}/${sample}"
done <<< "${unique_samples}"
}
fetch_all fate_suite fate-suite "http://fate.ffmpeg.org/fate-suite"
fetch_all curated pyav-curated "https://pyav.org/datasets"

View File

@ -770,6 +770,7 @@
aiodhcpwatcher
aiodiscover
async-upnp-client
av
bleak
bleak-retry-connector
bluetooth-adapters
@ -778,7 +779,6 @@
cached-ipaddress
dbus-fast
fnv-hash-fast
ha-av
ha-ffmpeg
habluetooth
hass-nabucasa
@ -1483,7 +1483,7 @@
aio-georss-gdacs
];
"generic" = ps: with ps; [
ha-av
av
pillow
];
"generic_hygrostat" = ps: with ps; [
@ -4300,7 +4300,7 @@
"stookwijzer" = ps: with ps; [
]; # missing inputs: stookwijzer
"stream" = ps: with ps; [
ha-av
av
numpy_1
pyturbojpeg
];

View File

@ -41,6 +41,7 @@ PKG_SET = "home-assistant.python.pkgs"
PKG_PREFERENCES = {
"fiblary3": "fiblary3-fork", # https://github.com/home-assistant/core/issues/66466
"HAP-python": "hap-python",
"ha-av": "av",
"numpy": "numpy_1",
"ollama-hass": "ollama",
"paho-mqtt": "paho-mqtt_1",