Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions pkgs/build-support/fetchurl/builder.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi

tryDownload() {
local url="$1"
local target="$2"
echo
echo "trying $url"
local curlexit=18;
Expand All @@ -44,7 +45,7 @@ tryDownload() {
# if we get error code 18, resume partial download
while [ $curlexit -eq 18 ]; do
# keep this inside an if statement, since on failure it doesn't abort the script
if "${curl[@]}" -C - --fail "$url" --output "$downloadedFile"; then
if "${curl[@]}" -C - --fail "$url" --output "$target"; then
success=1
break
else
Expand Down Expand Up @@ -81,7 +82,9 @@ tryHashedMirrors() {
if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \
--fail --silent --show-error --head "$url" \
--write-out "%{http_code}" --output /dev/null > code 2> log; then
tryDownload "$url"
# Directly download to $out, because postFetch doesn't need to run,
# since hashed mirrors provide pre-built derivation outputs.
tryDownload "$url" "$out"

# We skip postFetch here, because hashed-mirrors are
# already content addressed. So if $outputHash is in the
Expand Down Expand Up @@ -156,7 +159,7 @@ for url in $urls; do
;;
esac
fi
tryDownload "$url"
tryDownload "$url" "$downloadedFile"
if test -n "$success"; then finish; fi
done

Expand Down
20 changes: 20 additions & 0 deletions pkgs/build-support/fetchurl/tests.nix
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,24 @@
${jq}/bin/jq -r '.headers.Hello' $out | ${moreutils}/bin/sponge $out
'';
};
# Tests that downloadToTemp works with hashedMirrors
no-skipPostFetch = testers.invalidateFetcherByDrvHash fetchurl {
# Make sure that we can only download from hashed mirrors
url = "http://broken";
# A file with this hash is definitely on tarballs.nixos.org
sha256 = "1j1y3cq6ys30m734axc0brdm2q9n2as4h32jws15r7w5fwr991km";

# No chance
curlOptsList = [
"--retry"
"0"
];

downloadToTemp = true;
# Usually postFetch is needed with downloadToTemp to populate $out from
# $downloadedFile, but here we know that because the URL is broken, it will
# have to fallback to fetching the previously-built derivation from
# tarballs.nixos.org, which provides pre-built derivation outputs.

};
}
Loading