fetchers: uniformly support the hash
attribute (#342173)
This commit is contained in:
commit
d590835329
@ -1,15 +1,16 @@
|
||||
{ stdenvNoCC, breezy }:
|
||||
{ url, rev, sha256 }:
|
||||
{ lib, stdenvNoCC, breezy }:
|
||||
lib.fetchers.withNormalizedHash { } (
|
||||
{ url, rev, outputHash, outputHashAlgo }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "bzr-export";
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "bzr-export";
|
||||
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [ breezy ];
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [ breezy ];
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHashMode = "recursive";
|
||||
outputHash = sha256;
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
|
||||
inherit url rev;
|
||||
}
|
||||
inherit url rev;
|
||||
}
|
||||
)
|
||||
|
@ -9,7 +9,7 @@ if test -z "$hashType"; then
|
||||
hashType=sha256
|
||||
fi
|
||||
if test -z "$hashFormat"; then
|
||||
hashFormat=--base32
|
||||
hashFormat=--sri
|
||||
fi
|
||||
|
||||
if test -z "$url"; then
|
||||
|
@ -6,17 +6,18 @@
|
||||
{stdenvNoCC, cvs, openssh, lib}:
|
||||
|
||||
lib.makeOverridable (
|
||||
{cvsRoot, module, tag ? null, date ? null, sha256}:
|
||||
lib.fetchers.withNormalizedHash { } (
|
||||
{cvsRoot, module, tag ? null, date ? null, outputHash, outputHashAlgo}:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "cvs-export";
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [cvs openssh];
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "cvs-export";
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [cvs openssh];
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHashMode = "recursive";
|
||||
outputHash = sha256;
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
|
||||
inherit cvsRoot module sha256 tag date;
|
||||
}
|
||||
inherit cvsRoot module tag date;
|
||||
}
|
||||
)
|
||||
)
|
||||
|
@ -59,7 +59,7 @@ if test -z "$finalPath"; then
|
||||
(cd "$tmpPath" && cvs -f -z0 -d $cvsRoot export "${args[*]}" -d cvs-export $module >&2)
|
||||
|
||||
# Compute the hash.
|
||||
hash=$(nix-hash --type $hashType $hashFormat $tmpFile)
|
||||
hash=$(nix-hash --type $hashType ${hashFormat:-"--sri"} $tmpFile)
|
||||
if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
|
||||
|
||||
# Add the downloaded file to the Nix store.
|
||||
|
@ -1,21 +1,23 @@
|
||||
{stdenvNoCC, darcs, cacert, lib}:
|
||||
|
||||
lib.makeOverridable (
|
||||
{ url
|
||||
, rev ? null
|
||||
, context ? null
|
||||
, sha256 ? ""
|
||||
, name ? "fetchdarcs"
|
||||
}:
|
||||
lib.fetchers.withNormalizedHash { } (
|
||||
{ url
|
||||
, rev ? null
|
||||
, context ? null
|
||||
, outputHash ? lib.fakeHash
|
||||
, outputHashAlgo ? null
|
||||
, name ? "fetchdarcs"
|
||||
}:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [cacert darcs];
|
||||
stdenvNoCC.mkDerivation {
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [cacert darcs];
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHashMode = "recursive";
|
||||
outputHash = sha256;
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
|
||||
inherit url rev context name;
|
||||
}
|
||||
inherit url rev context name;
|
||||
}
|
||||
)
|
||||
)
|
||||
|
@ -1,33 +1,26 @@
|
||||
{stdenv, lib, fossil, cacert}:
|
||||
|
||||
{ name ? null
|
||||
, url
|
||||
, rev
|
||||
, sha256 ? ""
|
||||
, hash ? ""
|
||||
}:
|
||||
lib.fetchers.withNormalizedHash { } (
|
||||
{ name ? null
|
||||
, url
|
||||
, rev
|
||||
, outputHash ? lib.fakeHash
|
||||
, outputHashAlgo ? null
|
||||
}:
|
||||
|
||||
if hash != "" && sha256 != "" then
|
||||
throw "Only one of sha256 or hash can be set"
|
||||
else
|
||||
stdenv.mkDerivation {
|
||||
name = "fossil-archive" + (lib.optionalString (name != null) "-${name}");
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [fossil cacert];
|
||||
stdenv.mkDerivation {
|
||||
name = "fossil-archive" + (lib.optionalString (name != null) "-${name}");
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [fossil cacert];
|
||||
|
||||
# Envvar docs are hard to find. A link for the future:
|
||||
# https://www.fossil-scm.org/index.html/doc/trunk/www/env-opts.md
|
||||
impureEnvVars = [ "http_proxy" ];
|
||||
# Envvar docs are hard to find. A link for the future:
|
||||
# https://www.fossil-scm.org/index.html/doc/trunk/www/env-opts.md
|
||||
impureEnvVars = [ "http_proxy" ];
|
||||
|
||||
outputHashAlgo = if hash != "" then null else "sha256";
|
||||
outputHashMode = "recursive";
|
||||
outputHash = if hash != "" then
|
||||
hash
|
||||
else if sha256 != "" then
|
||||
sha256
|
||||
else
|
||||
lib.fakeSha256;
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
|
||||
inherit url rev;
|
||||
preferLocalBuild = true;
|
||||
}
|
||||
inherit url rev;
|
||||
preferLocalBuild = true;
|
||||
}
|
||||
)
|
||||
|
@ -1,30 +1,31 @@
|
||||
{ stdenvNoCC, gx, gx-go, go, cacert }:
|
||||
{ lib, stdenvNoCC, gx, gx-go, go, cacert }:
|
||||
|
||||
{ name, src, sha256 }:
|
||||
lib.fetchers.withNormalizedHash { } (
|
||||
{ name, src, outputHash, outputHashAlgo }:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "${name}-gxdeps";
|
||||
inherit src;
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "${name}-gxdeps";
|
||||
inherit src;
|
||||
|
||||
nativeBuildInputs = [ cacert go gx gx-go ];
|
||||
nativeBuildInputs = [ cacert go gx gx-go ];
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHashMode = "recursive";
|
||||
outputHash = sha256;
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
|
||||
dontConfigure = true;
|
||||
doCheck = false;
|
||||
doInstallCheck = false;
|
||||
dontConfigure = true;
|
||||
doCheck = false;
|
||||
doInstallCheck = false;
|
||||
|
||||
buildPhase = ''
|
||||
export GOPATH=$(pwd)/vendor
|
||||
mkdir -p vendor
|
||||
gx install
|
||||
'';
|
||||
buildPhase = ''
|
||||
export GOPATH=$(pwd)/vendor
|
||||
mkdir -p vendor
|
||||
gx install
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mv vendor $out
|
||||
'';
|
||||
installPhase = ''
|
||||
mv vendor $out
|
||||
'';
|
||||
|
||||
preferLocalBuild = true;
|
||||
}
|
||||
preferLocalBuild = true;
|
||||
}
|
||||
)
|
||||
|
@ -1,50 +1,36 @@
|
||||
{ stdenv
|
||||
{ lib
|
||||
, stdenv
|
||||
, curl
|
||||
}:
|
||||
lib.fetchers.withNormalizedHash { hashTypes = [ "sha1" "sha256" "sha512" ]; } (
|
||||
{ ipfs
|
||||
, url ? ""
|
||||
, curlOpts ? ""
|
||||
, outputHash
|
||||
, outputHashAlgo
|
||||
, meta ? {}
|
||||
, port ? "8080"
|
||||
, postFetch ? ""
|
||||
, preferLocalBuild ? true
|
||||
}:
|
||||
stdenv.mkDerivation {
|
||||
name = ipfs;
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [ curl ];
|
||||
|
||||
{ ipfs
|
||||
, url ? ""
|
||||
, curlOpts ? ""
|
||||
, outputHash ? ""
|
||||
, outputHashAlgo ? ""
|
||||
, md5 ? ""
|
||||
, sha1 ? ""
|
||||
, sha256 ? ""
|
||||
, sha512 ? ""
|
||||
, meta ? {}
|
||||
, port ? "8080"
|
||||
, postFetch ? ""
|
||||
, preferLocalBuild ? true
|
||||
}:
|
||||
# New-style output content requirements.
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
|
||||
let
|
||||
inherit curlOpts
|
||||
postFetch
|
||||
ipfs
|
||||
url
|
||||
port
|
||||
meta;
|
||||
|
||||
hasHash = (outputHash != "" && outputHashAlgo != "")
|
||||
|| md5 != "" || sha1 != "" || sha256 != "" || sha512 != "";
|
||||
|
||||
in
|
||||
|
||||
if (!hasHash) then throw "Specify sha for fetchipfs fixed-output derivation" else stdenv.mkDerivation {
|
||||
name = ipfs;
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [ curl ];
|
||||
|
||||
# New-style output content requirements.
|
||||
outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else
|
||||
if sha512 != "" then "sha512" else if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5";
|
||||
outputHash = if outputHash != "" then outputHash else
|
||||
if sha512 != "" then sha512 else if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
|
||||
|
||||
outputHashMode = "recursive";
|
||||
|
||||
inherit curlOpts
|
||||
postFetch
|
||||
ipfs
|
||||
url
|
||||
port
|
||||
meta;
|
||||
|
||||
# Doing the download on a remote machine just duplicates network
|
||||
# traffic, so don't do that.
|
||||
inherit preferLocalBuild;
|
||||
}
|
||||
# Doing the download on a remote machine just duplicates network
|
||||
# traffic, so don't do that.
|
||||
inherit preferLocalBuild;
|
||||
}
|
||||
)
|
||||
|
@ -1,25 +1,24 @@
|
||||
# You can specify some extra mirrors and a cache DB via options
|
||||
{lib, stdenvNoCC, monotone, defaultDBMirrors ? [], cacheDB ? "./mtn-checkout.db"}:
|
||||
# dbs is a list of strings
|
||||
# each is an url for sync
|
||||
|
||||
# selector is mtn selector, like h:org.example.branch
|
||||
#
|
||||
{name ? "mtn-checkout", dbs ? [], sha256
|
||||
, selector ? "h:" + branch, branch}:
|
||||
lib.fetchers.withNormalizedHash { } (
|
||||
# dbs is a list of strings, each is an url for sync
|
||||
# selector is mtn selector, like h:org.example.branch
|
||||
{name ? "mtn-checkout", dbs ? []
|
||||
, outputHash, outputHashAlgo
|
||||
, selector ? "h:" + branch, branch}:
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [monotone];
|
||||
stdenvNoCC.mkDerivation {
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [monotone];
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHashMode = "recursive";
|
||||
outputHash = sha256;
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
|
||||
dbs = defaultDBMirrors ++ dbs;
|
||||
inherit branch cacheDB name selector;
|
||||
dbs = defaultDBMirrors ++ dbs;
|
||||
inherit branch cacheDB name selector;
|
||||
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars;
|
||||
|
||||
}
|
||||
impureEnvVars = lib.fetchers.proxyImpureEnvVars;
|
||||
|
||||
}
|
||||
)
|
||||
|
@ -45,12 +45,12 @@ else
|
||||
runHook postFixup
|
||||
'';
|
||||
|
||||
outputHashAlgo = if hash != "" then null else "sha256";
|
||||
outputHashAlgo = null;
|
||||
outputHashMode = "recursive";
|
||||
outputHash = if hash != "" then
|
||||
hash
|
||||
else
|
||||
lib.fakeSha256;
|
||||
lib.fakeHash;
|
||||
|
||||
inherit url change state channel;
|
||||
|
||||
|
@ -1,36 +1,37 @@
|
||||
{ lib, runCommand, awscli }:
|
||||
lib.fetchers.withNormalizedHash { } (
|
||||
{ s3url
|
||||
, name ? builtins.baseNameOf s3url
|
||||
, outputHash
|
||||
, outputHashAlgo
|
||||
, region ? "us-east-1"
|
||||
, credentials ? null # Default to looking at local EC2 metadata service
|
||||
, recursiveHash ? false
|
||||
, postFetch ? null
|
||||
}:
|
||||
|
||||
{ s3url
|
||||
, name ? builtins.baseNameOf s3url
|
||||
, sha256
|
||||
, region ? "us-east-1"
|
||||
, credentials ? null # Default to looking at local EC2 metadata service
|
||||
, recursiveHash ? false
|
||||
, postFetch ? null
|
||||
}:
|
||||
let
|
||||
mkCredentials = { access_key_id, secret_access_key, session_token ? null }: {
|
||||
AWS_ACCESS_KEY_ID = access_key_id;
|
||||
AWS_SECRET_ACCESS_KEY = secret_access_key;
|
||||
AWS_SESSION_TOKEN = session_token;
|
||||
};
|
||||
|
||||
let
|
||||
mkCredentials = { access_key_id, secret_access_key, session_token ? null }: {
|
||||
AWS_ACCESS_KEY_ID = access_key_id;
|
||||
AWS_SECRET_ACCESS_KEY = secret_access_key;
|
||||
AWS_SESSION_TOKEN = session_token;
|
||||
};
|
||||
credentialAttrs = lib.optionalAttrs (credentials != null) (mkCredentials credentials);
|
||||
in runCommand name ({
|
||||
nativeBuildInputs = [ awscli ];
|
||||
|
||||
credentialAttrs = lib.optionalAttrs (credentials != null) (mkCredentials credentials);
|
||||
in runCommand name ({
|
||||
nativeBuildInputs = [ awscli ];
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = if recursiveHash then "recursive" else "flat";
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHash = sha256;
|
||||
outputHashMode = if recursiveHash then "recursive" else "flat";
|
||||
preferLocalBuild = true;
|
||||
|
||||
preferLocalBuild = true;
|
||||
|
||||
AWS_DEFAULT_REGION = region;
|
||||
} // credentialAttrs) (if postFetch != null then ''
|
||||
downloadedFile="$(mktemp)"
|
||||
aws s3 cp ${s3url} $downloadedFile
|
||||
${postFetch}
|
||||
'' else ''
|
||||
aws s3 cp ${s3url} $out
|
||||
'')
|
||||
AWS_DEFAULT_REGION = region;
|
||||
} // credentialAttrs) (if postFetch != null then ''
|
||||
downloadedFile="$(mktemp)"
|
||||
aws s3 cp ${s3url} $downloadedFile
|
||||
${postFetch}
|
||||
'' else ''
|
||||
aws s3 cp ${s3url} $out
|
||||
'')
|
||||
)
|
||||
|
@ -1,17 +1,19 @@
|
||||
{stdenvNoCC, subversion, sshSupport ? true, openssh ? null, expect}:
|
||||
{username, password, url, rev ? "HEAD", sha256 ? ""}:
|
||||
{lib, stdenvNoCC, subversion, sshSupport ? true, openssh ? null, expect}:
|
||||
{username, password
|
||||
, url, rev ? "HEAD"
|
||||
, outputHash ? lib.fakeHash, outputHashAlgo ? null}:
|
||||
|
||||
lib.fetchers.withNormalizedHash { } (
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "svn-export-ssh";
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [subversion expect];
|
||||
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "svn-export-ssh";
|
||||
builder = ./builder.sh;
|
||||
nativeBuildInputs = [subversion expect];
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHashMode = "recursive";
|
||||
outputHash = sha256;
|
||||
sshSubversion = ./sshsubversion.exp;
|
||||
|
||||
sshSubversion = ./sshsubversion.exp;
|
||||
|
||||
inherit username password url rev sshSupport openssh;
|
||||
}
|
||||
inherit username password url rev sshSupport openssh;
|
||||
}
|
||||
)
|
||||
|
@ -9,7 +9,7 @@ stdenv.mkDerivation rec {
|
||||
src = fetchbzr {
|
||||
url = "lp:gweled";
|
||||
rev = "108";
|
||||
sha256 = "sha256-rM4dgbYfSrVqZwi+xzKuEtmtjK3HVvqeutmni1vleLo=";
|
||||
hash = "sha256-rM4dgbYfSrVqZwi+xzKuEtmtjK3HVvqeutmni1vleLo=";
|
||||
};
|
||||
|
||||
doCheck = false;
|
||||
|
@ -8,5 +8,5 @@ fetchcvs {
|
||||
cvsRoot = ":pserver:anoncvs@anoncvs.NetBSD.org:/cvsroot";
|
||||
module = "src";
|
||||
tag = "netbsd-${lib.replaceStrings [ "." ] [ "-" ] version}-RELEASE";
|
||||
sha256 = "sha256-+onT/ajWayaKALucaZBqoiEkvBBI400Fs2OCtMf/bYU=";
|
||||
hash = "sha256-+onT/ajWayaKALucaZBqoiEkvBBI400Fs2OCtMf/bYU=";
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ stdenv.mkDerivation rec {
|
||||
src = fetchfossil {
|
||||
url = "https://sqlite.org/althttpd/";
|
||||
rev = "c0bdc68e6c56ef25";
|
||||
sha256 = "sha256-VoDR5MlVlvar9wYA0kUhvDQVjxDwsZlqrNR3u4Tqw5c=";
|
||||
hash = "sha256-VoDR5MlVlvar9wYA0kUhvDQVjxDwsZlqrNR3u4Tqw5c=";
|
||||
};
|
||||
|
||||
buildInputs = [ openssl ];
|
||||
|
@ -1133,9 +1133,7 @@ with pkgs;
|
||||
|
||||
fetchRepoProject = callPackage ../build-support/fetchrepoproject { };
|
||||
|
||||
fetchipfs = import ../build-support/fetchipfs {
|
||||
inherit curl stdenv;
|
||||
};
|
||||
fetchipfs = callPackage ../build-support/fetchipfs { };
|
||||
|
||||
fetchit = callPackage ../applications/networking/cluster/fetchit { };
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user