Merge master into staging-next

This commit is contained in:
github-actions[bot] 2021-01-11 18:42:29 +00:00 committed by GitHub
commit 757ad34b82
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
70 changed files with 1147 additions and 413 deletions

View File

@ -6397,6 +6397,12 @@
githubId = 364510; githubId = 364510;
name = "Tobias Geerinckx-Rice"; name = "Tobias Geerinckx-Rice";
}; };
ndl = {
email = "ndl@endl.ch";
github = "ndl";
githubId = 137805;
name = "Alexander Tsvyashchenko";
};
neeasade = { neeasade = {
email = "nathanisom27@gmail.com"; email = "nathanisom27@gmail.com";
github = "neeasade"; github = "neeasade";

View File

@ -402,6 +402,18 @@ http://some.json-exporter.host:7979/probe?target=https://example.com/some/json/e
SDK licenses if your project requires it. See the androidenv documentation for more details. SDK licenses if your project requires it. See the androidenv documentation for more details.
</para> </para>
</listitem> </listitem>
<listitem>
<para>
The Searx module has been updated with the ability to configure the
service declaratively and uWSGI integration.
The option <literal>services.searx.configFile</literal> has been renamed
to <xref linkend="opt-services.searx.settingsFile"/> for consistency with
the new <xref linkend="opt-services.searx.settings"/>. In addition, the
<literal>searx</literal> uid and gid reservations have been removed
since they were not necessary: the service is now running with a
dynamically allocated uid.
</para>
</listitem>
</itemizedlist> </itemizedlist>
</section> </section>

View File

@ -143,7 +143,7 @@ in
nix-ssh = 104; nix-ssh = 104;
dictd = 105; dictd = 105;
couchdb = 106; couchdb = 106;
searx = 107; #searx = 107; # dynamically allocated as of 2020-10-27
kippo = 108; kippo = 108;
jenkins = 109; jenkins = 109;
systemd-journal-gateway = 110; systemd-journal-gateway = 110;
@ -457,7 +457,7 @@ in
#nix-ssh = 104; # unused #nix-ssh = 104; # unused
dictd = 105; dictd = 105;
couchdb = 106; couchdb = 106;
searx = 107; #searx = 107; # dynamically allocated as of 2020-10-27
kippo = 108; kippo = 108;
jenkins = 109; jenkins = 109;
systemd-journal-gateway = 110; systemd-journal-gateway = 110;

View File

@ -3,32 +3,133 @@
with lib; with lib;
let let
runDir = "/run/searx";
cfg = config.services.searx; cfg = config.services.searx;
configFile = cfg.configFile; hasEngines =
builtins.hasAttr "engines" cfg.settings &&
cfg.settings.engines != { };
# Script to merge NixOS settings with
# the default settings.yml bundled in searx.
mergeConfig = ''
cd ${runDir}
# find the default settings.yml
default=$(find '${cfg.package}/' -name settings.yml)
# write NixOS settings as JSON
cat <<'EOF' > settings.json
${builtins.toJSON cfg.settings}
EOF
${optionalString hasEngines ''
# extract and convert the default engines array to an object
${pkgs.yq-go}/bin/yq r "$default" engines -j | \
${pkgs.jq}/bin/jq 'reduce .[] as $e ({}; .[$e.name] = $e)' \
> engines.json
# merge and update the NixOS engines with the newly created object
cp settings.json temp.json
${pkgs.jq}/bin/jq -s '. as [$s, $e] | $s | .engines |=
($e * . | to_entries | map (.value))' \
temp.json engines.json > settings.json
# clean up temporary files
rm {engines,temp}.json
''}
# merge the default and NixOS settings
${pkgs.yq-go}/bin/yq m -P settings.json "$default" > settings.yml
rm settings.json
# substitute environment variables
env -0 | while IFS='=' read -r -d ''' n v; do
sed "s#@$n@#$v#g" -i settings.yml
done
# set strict permissions
chmod 400 settings.yml
'';
in in
{ {
imports = [
(mkRenamedOptionModule
[ "services" "searx" "configFile" ]
[ "services" "searx" "settingsFile" ])
];
###### interface ###### interface
options = { options = {
services.searx = { services.searx = {
enable = mkEnableOption enable = mkOption {
"the searx server. See https://github.com/asciimoo/searx"; type = types.bool;
default = false;
relatedPackages = [ "searx" ];
description = "Whether to enable Searx, the meta search engine.";
};
configFile = mkOption { environmentFile = mkOption {
type = types.nullOr types.path; type = types.nullOr types.path;
default = null; default = null;
description = " description = ''
The path of the Searx server configuration file. If no file Environment file (see <literal>systemd.exec(5)</literal>
is specified, a default file is used (default config file has "EnvironmentFile=" section for the syntax) to define variables for
debug mode enabled). Searx. This option can be used to safely include secret keys into the
"; Searx configuration.
'';
};
settings = mkOption {
type = types.attrs;
default = { };
example = literalExample ''
{ server.port = 8080;
server.bind_address = "0.0.0.0";
server.secret_key = "@SEARX_SECRET_KEY@";
engines.wolframalpha =
{ shortcut = "wa";
api_key = "@WOLFRAM_API_KEY@";
engine = "wolframalpha_api";
};
}
'';
description = ''
Searx settings. These will be merged with (taking precedence over)
the default configuration. It's also possible to refer to
environment variables
(defined in <xref linkend="opt-services.searx.environmentFile"/>)
using the syntax <literal>@VARIABLE_NAME@</literal>.
<note>
<para>
For available settings, see the Searx
<link xlink:href="https://searx.github.io/searx/admin/settings.html">docs</link>.
</para>
</note>
'';
};
settingsFile = mkOption {
type = types.path;
default = "${runDir}/settings.yml";
description = ''
The path of the Searx server settings.yml file. If no file is
specified, a default file is used (default config file has debug mode
enabled). Note: setting this options overrides
<xref linkend="opt-services.searx.settings"/>.
<warning>
<para>
This file, along with any secret key it contains, will be copied
into the world-readable Nix store.
</para>
</warning>
'';
}; };
package = mkOption { package = mkOption {
@ -38,6 +139,38 @@ in
description = "searx package to use."; description = "searx package to use.";
}; };
runInUwsgi = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run searx in uWSGI as a "vassal", instead of using its
built-in HTTP server. This is the recommended mode for public or
large instances, but is unecessary for LAN or local-only use.
<warning>
<para>
The built-in HTTP server logs all queries by default.
</para>
</warning>
'';
};
uwsgiConfig = mkOption {
type = types.attrs;
default = { http = ":8080"; };
example = lib.literalExample ''
{
disable-logging = true;
http = ":8080"; # serve via HTTP...
socket = "/run/searx/searx.sock"; # ...or UNIX socket
}
'';
description = ''
Additional configuration of the uWSGI vassal running searx. It
should notably specify on which interfaces and ports the vassal
should listen.
'';
};
}; };
}; };
@ -45,33 +178,66 @@ in
###### implementation ###### implementation
config = mkIf config.services.searx.enable { config = mkIf cfg.enable {
environment.systemPackages = [ cfg.package ];
users.users.searx = users.users.searx =
{ uid = config.ids.uids.searx; { description = "Searx daemon user";
description = "Searx user"; group = "searx";
createHome = true; isSystemUser = true;
home = "/var/lib/searx";
}; };
users.groups.searx = users.groups.searx = { };
{ gid = config.ids.gids.searx;
systemd.services.searx-init = {
description = "Initialise Searx settings";
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
User = "searx";
RuntimeDirectory = "searx";
RuntimeDirectoryMode = "750";
} // optionalAttrs (cfg.environmentFile != null)
{ EnvironmentFile = builtins.toPath cfg.environmentFile; };
script = mergeConfig;
};
systemd.services.searx = mkIf (!cfg.runInUwsgi) {
description = "Searx server, the meta search engine.";
wantedBy = [ "network.target" "multi-user.target" ];
requires = [ "searx-init.service" ];
after = [ "searx-init.service" ];
serviceConfig = {
User = "searx";
Group = "searx";
ExecStart = "${cfg.package}/bin/searx-run";
} // optionalAttrs (cfg.environmentFile != null)
{ EnvironmentFile = builtins.toPath cfg.environmentFile; };
environment.SEARX_SETTINGS_PATH = cfg.settingsFile;
};
systemd.services.uwsgi = mkIf (cfg.runInUwsgi)
{ requires = [ "searx-init.service" ];
after = [ "searx-init.service" ];
}; };
systemd.services.searx = services.uwsgi = mkIf (cfg.runInUwsgi) {
{ enable = true;
description = "Searx server, the meta search engine."; plugins = [ "python3" ];
after = [ "network.target" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
User = "searx";
ExecStart = "${cfg.package}/bin/searx-run";
};
} // (optionalAttrs (configFile != null) {
environment.SEARX_SETTINGS_PATH = configFile;
});
environment.systemPackages = [ cfg.package ]; instance.type = "emperor";
instance.vassals.searx = {
type = "normal";
strict = true;
immediate-uid = "searx";
immediate-gid = "searx";
lazy-apps = true;
enable-threads = true;
module = "searx.webapp";
env = [ "SEARX_SETTINGS_PATH=${cfg.settingsFile}" ];
pythonPackages = self: [ cfg.package ];
} // cfg.uwsgiConfig;
};
}; };

View File

@ -329,24 +329,24 @@ let self = {
"20.03".ap-east-1.hvm-ebs = "ami-0d18fdd309cdefa86"; "20.03".ap-east-1.hvm-ebs = "ami-0d18fdd309cdefa86";
"20.03".sa-east-1.hvm-ebs = "ami-09859378158ae971d"; "20.03".sa-east-1.hvm-ebs = "ami-09859378158ae971d";
# 20.09.1632.a6a3a368dda # 20.09.2016.19db3e5ea27
"20.09".eu-west-1.hvm-ebs = "ami-01a79d5ce435f4db3"; "20.09".eu-west-1.hvm-ebs = "ami-0057cb7d614329fa2";
"20.09".eu-west-2.hvm-ebs = "ami-0cbe14f32904e6331"; "20.09".eu-west-2.hvm-ebs = "ami-0d46f16e0bb0ec8fd";
"20.09".eu-west-3.hvm-ebs = "ami-07f493412d6213de6"; "20.09".eu-west-3.hvm-ebs = "ami-0e8985c3ea42f87fe";
"20.09".eu-central-1.hvm-ebs = "ami-01d4a0c2248cbfe38"; "20.09".eu-central-1.hvm-ebs = "ami-0eed77c38432886d2";
"20.09".eu-north-1.hvm-ebs = "ami-0003f54dd99d68e0f"; "20.09".eu-north-1.hvm-ebs = "ami-0be5bcadd632bea14";
"20.09".us-east-1.hvm-ebs = "ami-068a62d478710462d"; "20.09".us-east-1.hvm-ebs = "ami-0a2cce52b42daccc8";
"20.09".us-east-2.hvm-ebs = "ami-01ac677ff61399caa"; "20.09".us-east-2.hvm-ebs = "ami-09378bf487b07a4d8";
"20.09".us-west-1.hvm-ebs = "ami-04befdb203b4b17f6"; "20.09".us-west-1.hvm-ebs = "ami-09b4337b2a9e77485";
"20.09".us-west-2.hvm-ebs = "ami-0fb7bd4a43261c6b2"; "20.09".us-west-2.hvm-ebs = "ami-081d3bb5fbee0a1ac";
"20.09".ca-central-1.hvm-ebs = "ami-06d5ee429f153f856"; "20.09".ca-central-1.hvm-ebs = "ami-020c24c6c607e7ac7";
"20.09".ap-southeast-1.hvm-ebs = "ami-0db0304e23c535b2a"; "20.09".ap-southeast-1.hvm-ebs = "ami-08f648d5db009e67d";
"20.09".ap-southeast-2.hvm-ebs = "ami-045983c4db7e36447"; "20.09".ap-southeast-2.hvm-ebs = "ami-0be390efaccbd40f9";
"20.09".ap-northeast-1.hvm-ebs = "ami-0beb18d632cf64e5a"; "20.09".ap-northeast-1.hvm-ebs = "ami-0c3311601cbe8f927";
"20.09".ap-northeast-2.hvm-ebs = "ami-0dd0316af578862db"; "20.09".ap-northeast-2.hvm-ebs = "ami-0020146701f4d56cf";
"20.09".ap-south-1.hvm-ebs = "ami-008d15ced81c88aed"; "20.09".ap-south-1.hvm-ebs = "ami-0117e2bd876bb40d1";
"20.09".ap-east-1.hvm-ebs = "ami-071f49713f86ea965"; "20.09".ap-east-1.hvm-ebs = "ami-0c42f97e5b1fda92f";
"20.09".sa-east-1.hvm-ebs = "ami-05ded1ae35209b5a8"; "20.09".sa-east-1.hvm-ebs = "ami-021637976b094959d";
latest = self."20.09"; latest = self."20.09";
}; in self }; in self

View File

@ -342,6 +342,7 @@ in
sbt-extras = handleTest ./sbt-extras.nix {}; sbt-extras = handleTest ./sbt-extras.nix {};
scala = handleTest ./scala.nix {}; scala = handleTest ./scala.nix {};
sddm = handleTest ./sddm.nix {}; sddm = handleTest ./sddm.nix {};
searx = handleTest ./searx.nix {};
service-runner = handleTest ./service-runner.nix {}; service-runner = handleTest ./service-runner.nix {};
shadow = handleTest ./shadow.nix {}; shadow = handleTest ./shadow.nix {};
shadowsocks = handleTest ./shadowsocks {}; shadowsocks = handleTest ./shadowsocks {};

109
nixos/tests/searx.nix Normal file
View File

@ -0,0 +1,109 @@
import ./make-test-python.nix ({ pkgs, ...} :
{
name = "searx";
meta = with pkgs.stdenv.lib.maintainers; {
maintainers = [ rnhmjoj ];
};
# basic setup: searx running the built-in webserver
nodes.base = { ... }: {
imports = [ ../modules/profiles/minimal.nix ];
services.searx = {
enable = true;
environmentFile = pkgs.writeText "secrets" ''
WOLFRAM_API_KEY = sometoken
SEARX_SECRET_KEY = somesecret
'';
settings.server =
{ port = "8080";
bind_address = "0.0.0.0";
secret_key = "@SEARX_SECRET_KEY@";
};
settings.engines = {
wolframalpha =
{ api_key = "@WOLFRAM_API_KEY@";
engine = "wolframalpha_api";
};
startpage.shortcut = "start";
};
};
};
# fancy setup: run in uWSGI and use nginx as proxy
nodes.fancy = { ... }: {
imports = [ ../modules/profiles/minimal.nix ];
services.searx = {
enable = true;
runInUwsgi = true;
uwsgiConfig = {
# serve using the uwsgi protocol
socket = "/run/searx/uwsgi.sock";
chmod-socket = "660";
# use /searx as url "mountpoint"
mount = "/searx=searx.webapp:application";
module = "";
manage-script-name = true;
};
};
# use nginx as reverse proxy
services.nginx.enable = true;
services.nginx.virtualHosts.localhost = {
locations."/searx".extraConfig =
''
include ${pkgs.nginx}/conf/uwsgi_params;
uwsgi_pass unix:/run/searx/uwsgi.sock;
'';
locations."/searx/static/".alias = "${pkgs.searx}/share/static/";
};
# allow nginx access to the searx socket
users.users.nginx.extraGroups = [ "searx" ];
};
testScript =
''
base.start()
with subtest("Settings have been merged"):
base.wait_for_unit("searx-init")
base.wait_for_file("/run/searx/settings.yml")
output = base.succeed(
"${pkgs.yq-go}/bin/yq r /run/searx/settings.yml"
" 'engines.(name==startpage).shortcut'"
).strip()
assert output == "start", "Settings not merged"
with subtest("Environment variables have been substituted"):
base.succeed("grep -q somesecret /run/searx/settings.yml")
base.succeed("grep -q sometoken /run/searx/settings.yml")
base.copy_from_vm("/run/searx/settings.yml")
with subtest("Basic setup is working"):
base.wait_for_open_port(8080)
base.wait_for_unit("searx")
base.succeed(
"${pkgs.curl}/bin/curl --fail http://localhost:8080"
)
base.shutdown()
with subtest("Nginx+uWSGI setup is working"):
fancy.start()
fancy.wait_for_open_port(80)
fancy.wait_for_unit("uwsgi")
fancy.succeed(
"${pkgs.curl}/bin/curl --fail http://localhost/searx >&2"
)
fancy.succeed(
"${pkgs.curl}/bin/curl --fail http://localhost/searx/static/js/bootstrap.min.js >&2"
)
'';
})

View File

@ -8,7 +8,7 @@
, monero, miniupnpc, unbound, readline , monero, miniupnpc, unbound, readline
, boost, libunwind, libsodium, pcsclite , boost, libunwind, libsodium, pcsclite
, randomx, zeromq, libgcrypt, libgpgerror , randomx, zeromq, libgcrypt, libgpgerror
, hidapi, rapidjson , hidapi, rapidjson, quirc
, trezorSupport ? true , trezorSupport ? true
, libusb1 ? null , libusb1 ? null
, protobuf ? null , protobuf ? null
@ -28,13 +28,13 @@ in
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "monero-gui"; pname = "monero-gui";
version = "0.17.1.8"; version = "0.17.1.9";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "monero-project"; owner = "monero-project";
repo = "monero-gui"; repo = "monero-gui";
rev = "v${version}"; rev = "v${version}";
sha256 = "13cjrfdkr7c2ff8j2rg8hvhlc00af38vcs67wlx2109i2baq4pp3"; sha256 = "0143mmxk0jfb5pmjlx6v0knvf8v49kmkpjxlp6rw8lwnlf71xadn";
}; };
nativeBuildInputs = [ nativeBuildInputs = [
@ -49,7 +49,7 @@ stdenv.mkDerivation rec {
monero miniupnpc unbound readline monero miniupnpc unbound readline
randomx libgcrypt libgpgerror randomx libgcrypt libgpgerror
boost libunwind libsodium pcsclite boost libunwind libsodium pcsclite
zeromq hidapi rapidjson zeromq hidapi rapidjson quirc
] ++ optionals trezorSupport [ libusb1 protobuf python3 ] ] ++ optionals trezorSupport [ libusb1 protobuf python3 ]
++ optionals stdenv.isDarwin [ qtmacextras ]; ++ optionals stdenv.isDarwin [ qtmacextras ];
@ -75,6 +75,10 @@ stdenv.mkDerivation rec {
substituteInPlace CMakeLists.txt \ substituteInPlace CMakeLists.txt \
--replace 'add_subdirectory(monero)' \ --replace 'add_subdirectory(monero)' \
'add_subdirectory(monero EXCLUDE_FROM_ALL)' 'add_subdirectory(monero EXCLUDE_FROM_ALL)'
# use nixpkgs quirc
substituteInPlace CMakeLists.txt \
--replace 'add_subdirectory(external)' ""
''; '';
cmakeFlags = [ "-DARCH=${arch}" ]; cmakeFlags = [ "-DARCH=${arch}" ];

View File

@ -17,13 +17,13 @@ assert trezorSupport -> all (x: x!=null) [ libusb1 protobuf python3 ];
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "monero"; pname = "monero";
version = "0.17.1.8"; version = "0.17.1.9";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "monero-project"; owner = "monero-project";
repo = "monero"; repo = "monero";
rev = "v${version}"; rev = "v${version}";
sha256 = "10blazbk1602slx3wrmw4jfgkdry55iclrhm5drdficc5v3h735g"; sha256 = "0jqss4csvkcrhrmaa3vrnyv6yiwqpbfw7037clx9xcfm4qrrfiwy";
fetchSubmodules = true; fetchSubmodules = true;
}; };

View File

@ -4,12 +4,13 @@ assert stdenv.lib.asserts.assertOneOf "fzf" fzf.pname [ "fzf" "skim" ];
stdenv.mkDerivation { stdenv.mkDerivation {
name = "kak-fzf"; name = "kak-fzf";
version = "2020-05-24"; version = "2020-07-26";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "andreyorst"; owner = "andreyorst";
repo = "fzf.kak"; repo = "fzf.kak";
rev = "b2aeb26473962ab0bf3b51ba5c81c50c1d8253d3"; rev = "f23daa698ad95493fbd675ae153e3cac13ef34e9";
sha256 = "0bg845i814xh4y688p2zx726rsg0pd6nb4a7qv2fckmk639f4wzc"; hash = "sha256-BfXHTJ371ThOizMI/4BAbdJoaltGSP586hz4HqX1KWA=";
}; };
configurePhase = '' configurePhase = ''

View File

@ -74,7 +74,7 @@ with python3.pkgs; buildPythonApplication rec {
description = "Private cmdline bookmark manager"; description = "Private cmdline bookmark manager";
homepage = "https://github.com/jarun/Buku"; homepage = "https://github.com/jarun/Buku";
license = licenses.gpl3; license = licenses.gpl3;
platforms = platforms.linux; platforms = platforms.unix;
maintainers = with maintainers; [ matthiasbeyer infinisil ]; maintainers = with maintainers; [ matthiasbeyer infinisil ];
}; };
} }

View File

@ -13,13 +13,13 @@
mkDerivation rec { mkDerivation rec {
pname = "mediaelch"; pname = "mediaelch";
version = "2.8.2"; version = "2.8.4";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "Komet"; owner = "Komet";
repo = "MediaElch"; repo = "MediaElch";
rev = "v${version}"; rev = "v${version}";
sha256 = "0y26vfgrdym461lzmm5x3z5ai9ky09vlk3cy4sq6hwlj7mzcz0k7"; sha256 = "00jwmpdwbn6rgaha0iimcbwg9pwb8ilpjgxhv0p13j2c6dcisjzh";
fetchSubmodules = true; fetchSubmodules = true;
}; };

View File

@ -0,0 +1,36 @@
{ stdenv
, fetchFromGitHub
, rustPlatform
, openssl
, pkg-config
, Security
}:
rustPlatform.buildRustPackage rec {
version = "0.2.2";
pname = "reddsaver";
src = fetchFromGitHub {
owner = "manojkarthick";
repo = "reddsaver";
rev = "v${version}";
sha256 = "0802jz503jhyz5q6mg1fj2bvkl4nggvs8y03zddd298ymplx5dbx";
};
cargoSha256 = "0z8q187331j3rxj8hzym25pwrikxbd0r829v29y8w6v5n0hb47fs";
nativeBuildInputs = [ pkg-config ];
buildInputs = [ openssl ]
++ stdenv.lib.optional stdenv.isDarwin Security;
# package does not contain tests as of v0.2.2
docCheck = false;
meta = with stdenv.lib; {
description = "CLI tool to download saved images from Reddit";
homepage = "https://github.com/manojkarthick/reddsaver";
license = with licenses; [ mit /* or */ asl20 ];
maintainers = [ maintainers.manojkarthick ];
};
}

View File

@ -19,18 +19,22 @@ python3Packages.buildPythonApplication rec {
setuptools setuptools
]; ];
# no tests
doCheck = false;
pythonImportsCheck = [ "lieer" ];
meta = with lib; { meta = with lib; {
description = "Fast email-fetching and two-way tag synchronization between notmuch and GMail"; description = "Fast email-fetching and two-way tag synchronization between notmuch and GMail";
longDescription = '' longDescription = ''
This program can pull email and labels (and changes to labels) This program can pull email and labels (and changes to labels)
from your GMail account and store them locally in a maildir with from your GMail account and store them locally in a maildir with
the labels synchronized with a notmuch database. The changes to the labels synchronized with a notmuch database. The changes to
tags in the notmuch database may be pushed back remotely to your tags in the notmuch database may be pushed back remotely to your
GMail account. GMail account.
''; '';
homepage = "https://lieer.gaute.vetsj.com/"; homepage = "https://lieer.gaute.vetsj.com/";
repositories.git = "https://github.com/gauteh/lieer.git"; repositories.git = "https://github.com/gauteh/lieer.git";
license = licenses.gpl3Plus; license = licenses.gpl3Plus;
maintainers = with maintainers; [ flokli kaiha ]; maintainers = with maintainers; [ flokli kaiha ];
}; };
} }

View File

@ -24,11 +24,11 @@ let
in in
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "PortfolioPerformance"; pname = "PortfolioPerformance";
version = "0.49.3"; version = "0.50.0";
src = fetchurl { src = fetchurl {
url = "https://github.com/buchen/portfolio/releases/download/${version}/PortfolioPerformance-${version}-linux.gtk.x86_64.tar.gz"; url = "https://github.com/buchen/portfolio/releases/download/${version}/PortfolioPerformance-${version}-linux.gtk.x86_64.tar.gz";
sha256 = "1j8d3bih2hs1c1a6pjqpmdlh2hbj76s00srl0f850d06jhldg3p6"; sha256 = "1jq4if5hx3fwag1dz38sj87av2na1kv4c36hai1gyz9w5qhjv7j8";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View File

@ -1,6 +1,7 @@
{ lib, stdenv { lib, stdenv
, fetchFromGitHub , fetchFromGitHub
, pkgconfig , pkgconfig
, fetchpatch
, python3 , python3
, meson , meson
, ninja , ninja
@ -31,6 +32,16 @@ stdenv.mkDerivation rec {
fetchSubmodules = true; fetchSubmodules = true;
}; };
patches = [
# fix compilation with gcc10
(fetchpatch {
url = "https://github.com/parnold-x/libqalculate/commit/4fa8f2cceada128ef19f82407226b2c230b780d5.patch";
extraPrefix = "subprojects/libqalculate/";
stripLen = "1";
sha256 = "0kbff623zl0s6yx5avx068f2apwzxzvihjahja4qhlkqkhhzj9dm";
})
];
nativeBuildInputs = [ nativeBuildInputs = [
glib # post_install.py glib # post_install.py
gtk3 # post_install.py gtk3 # post_install.py

View File

@ -20,6 +20,9 @@ buildPythonApplication rec {
gappsWrapperArgs+=(--prefix PATH : ${lib.makeBinPath [ ffmpeg_3 ]}) gappsWrapperArgs+=(--prefix PATH : ${lib.makeBinPath [ ffmpeg_3 ]})
''; '';
# no tests
doCheck = false;
meta = with lib; { meta = with lib; {
description = "A native Linux GUI for Chromecasting local files"; description = "A native Linux GUI for Chromecasting local files";
homepage = "https://github.com/keredson/gnomecast"; homepage = "https://github.com/keredson/gnomecast";

View File

@ -8,6 +8,7 @@ header "exporting $url (rev $rev) into $out"
$SHELL $fetcher --builder --url "$url" --out "$out" --rev "$rev" \ $SHELL $fetcher --builder --url "$url" --out "$out" --rev "$rev" \
${leaveDotGit:+--leave-dotGit} \ ${leaveDotGit:+--leave-dotGit} \
${fetchLFS:+--fetch-lfs} \
${deepClone:+--deepClone} \ ${deepClone:+--deepClone} \
${fetchSubmodules:+--fetch-submodules} \ ${fetchSubmodules:+--fetch-submodules} \
${branchName:+--branch-name "$branchName"} ${branchName:+--branch-name "$branchName"}

View File

@ -1,4 +1,4 @@
{stdenvNoCC, git, cacert}: let {stdenvNoCC, git, git-lfs, cacert}: let
urlToName = url: rev: let urlToName = url: rev: let
inherit (stdenvNoCC.lib) removeSuffix splitString last; inherit (stdenvNoCC.lib) removeSuffix splitString last;
base = last (splitString ":" (baseNameOf (removeSuffix "/" url))); base = last (splitString ":" (baseNameOf (removeSuffix "/" url)));
@ -20,6 +20,7 @@ in
# successfully. This can do things like check or transform the file. # successfully. This can do things like check or transform the file.
postFetch ? "" postFetch ? ""
, preferLocalBuild ? true , preferLocalBuild ? true
, fetchLFS ? false
}: }:
/* NOTE: /* NOTE:
@ -53,13 +54,15 @@ stdenvNoCC.mkDerivation {
inherit name; inherit name;
builder = ./builder.sh; builder = ./builder.sh;
fetcher = ./nix-prefetch-git; # This must be a string to ensure it's called with bash. fetcher = ./nix-prefetch-git; # This must be a string to ensure it's called with bash.
nativeBuildInputs = [git];
nativeBuildInputs = [ git ]
++ stdenvNoCC.lib.optionals fetchLFS [ git-lfs ];
outputHashAlgo = "sha256"; outputHashAlgo = "sha256";
outputHashMode = "recursive"; outputHashMode = "recursive";
outputHash = sha256; outputHash = sha256;
inherit url rev leaveDotGit fetchSubmodules deepClone branchName postFetch; inherit url rev leaveDotGit fetchLFS fetchSubmodules deepClone branchName postFetch;
GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt"; GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt";

View File

@ -9,6 +9,7 @@ hashType=$NIX_HASH_ALGO
deepClone=$NIX_PREFETCH_GIT_DEEP_CLONE deepClone=$NIX_PREFETCH_GIT_DEEP_CLONE
leaveDotGit=$NIX_PREFETCH_GIT_LEAVE_DOT_GIT leaveDotGit=$NIX_PREFETCH_GIT_LEAVE_DOT_GIT
fetchSubmodules= fetchSubmodules=
fetchLFS=
builder= builder=
branchName=$NIX_PREFETCH_GIT_BRANCH_NAME branchName=$NIX_PREFETCH_GIT_BRANCH_NAME
@ -72,6 +73,7 @@ for arg; do
--quiet) QUIET=true;; --quiet) QUIET=true;;
--no-deepClone) deepClone=;; --no-deepClone) deepClone=;;
--leave-dotGit) leaveDotGit=true;; --leave-dotGit) leaveDotGit=true;;
--fetch-lfs) fetchLFS=true;;
--fetch-submodules) fetchSubmodules=true;; --fetch-submodules) fetchSubmodules=true;;
--builder) builder=true;; --builder) builder=true;;
-h|--help) usage; exit;; -h|--help) usage; exit;;
@ -283,6 +285,11 @@ clone_user_rev() {
local url="$2" local url="$2"
local rev="${3:-HEAD}" local rev="${3:-HEAD}"
if [ -n "$fetchLFS" ]; then
HOME=$TMPDIR
git lfs install
fi
# Perform the checkout. # Perform the checkout.
case "$rev" in case "$rev" in
HEAD|refs/*) HEAD|refs/*)

View File

@ -8,13 +8,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "marwaita-peppermint"; pname = "marwaita-peppermint";
version = "0.5"; version = "0.6";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "darkomarko42"; owner = "darkomarko42";
repo = pname; repo = pname;
rev = version; rev = version;
sha256 = "04j210nw9w4m8n49cd8y3l0qp60rn00i8wdr6kvc7lhkvqwhpnlg"; sha256 = "0mhkkx2qa66z4b2h5iynhy63flwdf6b2phd21r1j8kp4m08dynms";
}; };
buildInputs = [ buildInputs = [

View File

@ -8,13 +8,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "marwaita-pop_os"; pname = "marwaita-pop_os";
version = "0.9"; version = "1.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "darkomarko42"; owner = "darkomarko42";
repo = pname; repo = pname;
rev = version; rev = version;
sha256 = "1fpzsch9rpq7dmg01ny7jc2vd6dks0fqxxp2rb9jcs0vx5d2fdc6"; sha256 = "1nwfyy3jnfsdlqgj7ig9gbawazdm76g02b0hrfsll17j5498d59y";
}; };
buildInputs = [ buildInputs = [

View File

@ -8,13 +8,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "marwaita-ubuntu"; pname = "marwaita-ubuntu";
version = "1.5"; version = "1.7";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "darkomarko42"; owner = "darkomarko42";
repo = pname; repo = pname;
rev = version; rev = version;
sha256 = "0mld78s6gl5kfsdaqa7xs5mvfng9600pd2d9sp2b2q5axx7wjay5"; sha256 = "024b0817jilvi5ilq9sbwg19ql5i2x3jfgc1awk7zzv6sqii639x";
}; };
buildInputs = [ buildInputs = [

View File

@ -33,7 +33,7 @@ in
, profilingDetail ? "exported-functions" , profilingDetail ? "exported-functions"
# TODO enable shared libs for cross-compiling # TODO enable shared libs for cross-compiling
, enableSharedExecutables ? false , enableSharedExecutables ? false
, enableSharedLibraries ? (ghc.enableShared or false) , enableSharedLibraries ? !stdenv.hostPlatform.isStatic && (ghc.enableShared or false)
, enableDeadCodeElimination ? (!stdenv.isDarwin) # TODO: use -dead_strip for darwin , enableDeadCodeElimination ? (!stdenv.isDarwin) # TODO: use -dead_strip for darwin
, enableStaticLibraries ? !(stdenv.hostPlatform.isWindows or stdenv.hostPlatform.isWasm) , enableStaticLibraries ? !(stdenv.hostPlatform.isWindows or stdenv.hostPlatform.isWasm)
, enableHsc2hsViaAsm ? stdenv.hostPlatform.isWindows && stdenv.lib.versionAtLeast ghc.version "8.4" , enableHsc2hsViaAsm ? stdenv.hostPlatform.isWindows && stdenv.lib.versionAtLeast ghc.version "8.4"

View File

@ -7,12 +7,11 @@
, six , six
, stevedore , stevedore
, pyyaml , pyyaml
, unicodecsv
, cmd2 , cmd2
, pytest , pytestCheckHook
, mock
, testtools , testtools
, fixtures , fixtures
, which
}: }:
buildPythonPackage rec { buildPythonPackage rec {
@ -32,20 +31,21 @@ buildPythonPackage rec {
stevedore stevedore
pyyaml pyyaml
cmd2 cmd2
unicodecsv
]; ];
# remove version constraints
postPatch = '' postPatch = ''
sed -i '/cmd2/c\cmd2' requirements.txt sed -i -e '/cmd2/c\cmd2' -e '/PrettyTable/c\PrettyTable' requirements.txt
''; '';
checkInputs = [ fixtures mock pytest testtools ]; checkInputs = [ fixtures pytestCheckHook testtools which ];
# add some tests # add some tests
checkPhase = '' pytestFlagsArray = [
pytest cliff/tests/test_{utils,app,command,help,lister}.py \ "cliff/tests/test_utils.py"
-k 'not interactive_mode' "cliff/tests/test_app.py"
''; "cliff/tests/test_command.py"
"cliff/tests/test_help.py"
"cliff/tests/test_lister.py"
];
meta = with lib; { meta = with lib; {
description = "Command Line Interface Formulation Framework"; description = "Command Line Interface Formulation Framework";

View File

@ -8,6 +8,10 @@ buildPythonPackage rec {
sourceRoot = "source/python"; sourceRoot = "source/python";
# flatbuffers needs VERSION environment variable for setting the correct
# version, otherwise it uses the current date.
VERSION = "${version}";
pythonImportsCheck = [ "flatbuffers" ]; pythonImportsCheck = [ "flatbuffers" ];
meta = flatbuffers.meta // { meta = flatbuffers.meta // {

View File

@ -29,10 +29,6 @@ buildPythonPackage rec {
}) })
]; ];
checkInputs = [ pytestCheckHook Rtree ];
disabledTests = [ "web" ];
pytestFlagsArray = [ "geopandas" ];
propagatedBuildInputs = [ propagatedBuildInputs = [
pandas pandas
shapely shapely
@ -41,6 +37,11 @@ buildPythonPackage rec {
pyproj pyproj
]; ];
doCheck = !stdenv.isDarwin;
checkInputs = [ pytestCheckHook Rtree ];
disabledTests = [ "web" ];
pytestFlagsArray = [ "geopandas" ];
meta = with lib; { meta = with lib; {
description = "Python geospatial data analysis framework"; description = "Python geospatial data analysis framework";
homepage = "https://geopandas.org"; homepage = "https://geopandas.org";

View File

@ -11,6 +11,10 @@ buildPythonPackage rec {
propagatedBuildInputs = [ libguestfs qemu ]; propagatedBuildInputs = [ libguestfs qemu ];
# no tests
doCheck = false;
pythonImportsCheck = [ "guestfs" ];
meta = with lib; { meta = with lib; {
homepage = "https://libguestfs.org/guestfs-python.3.html"; homepage = "https://libguestfs.org/guestfs-python.3.html";
description = "Use libguestfs from Python"; description = "Use libguestfs from Python";

View File

@ -0,0 +1,26 @@
{ lib, fetchPypi, buildPythonPackage
, six
}:
buildPythonPackage rec {
pname = "gviz_api";
version = "1.9.0";
format = "wheel";
src = fetchPypi {
inherit pname version;
format = "wheel";
sha256 = "1yag559lpmwfdxpxn679a6ajifcbpgljr5n6k5b7rrj38k2xq7jg";
};
propagatedBuildInputs = [
six
];
meta = with lib; {
description = "Python API for Google Visualization";
homepage = https://developers.google.com/chart/interactive/docs/dev/gviz_api_lib;
license = licenses.asl20;
maintainers = with maintainers; [ ndl ];
};
}

View File

@ -24,6 +24,7 @@ buildPythonPackage rec {
]; ];
# no tests # no tests
doCheck = false;
pythonImportsCheck = [ "keep" ]; pythonImportsCheck = [ "keep" ];
meta = with lib; { meta = with lib; {

View File

@ -0,0 +1,25 @@
{ lib
, buildPythonPackage
, fetchPypi
}:
buildPythonPackage rec {
pname = "mulpyplexer";
version = "0.08";
src = fetchPypi {
inherit pname version;
sha256 = "1zn5d1vyhfjp8x9z5mr9gv8m8gmi3s3jv3kqb790xzi1kqi0p4ya";
};
# Project has no tests
doCheck = false;
pythonImportsCheck = [ "mulpyplexer" ];
meta = with lib; {
description = "Multiplex interactions with lists of Python objects";
homepage = "https://github.com/zardus/mulpyplexer";
license = with licenses; [ bsd2 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -1,4 +1,5 @@
{ lib, stdenv { lib
, stdenv
, buildPythonPackage , buildPythonPackage
, notmuch , notmuch
, python , python
@ -13,6 +14,10 @@ buildPythonPackage {
buildInputs = [ python notmuch cffi ]; buildInputs = [ python notmuch cffi ];
# no tests
doCheck = false;
pythonImportsCheck = [ "notmuch2" ];
meta = with lib; { meta = with lib; {
description = "Pythonic bindings for the notmuch mail database using CFFI"; description = "Pythonic bindings for the notmuch mail database using CFFI";
homepage = "https://notmuchmail.org/"; homepage = "https://notmuchmail.org/";

View File

@ -1,4 +1,5 @@
{ lib, stdenv { lib
, stdenv
, buildPythonPackage , buildPythonPackage
, notmuch , notmuch
, python , python
@ -16,6 +17,10 @@ buildPythonPackage {
notmuch/globals.py notmuch/globals.py
''; '';
# no tests
doCheck = false;
pythonImportsCheck = [ "notmuch" ];
meta = with lib; { meta = with lib; {
description = "A Python wrapper around notmuch"; description = "A Python wrapper around notmuch";
homepage = "https://notmuchmail.org/"; homepage = "https://notmuchmail.org/";

View File

@ -1,24 +1,16 @@
{ lib { lib
, buildPythonPackage , buildPythonPackage
, fetchPypi , fetchPypi
, isPy3k , pythonOlder
# propagatedBuildInputs
, aiohttp , aiohttp
, async-timeout , async-timeout
, semantic-version , semantic-version
# buildInputs
, pytestrunner , pytestrunner
# checkInputs
, pytest
, pytest-asyncio
, aresponses
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "pyhaversion"; pname = "pyhaversion";
version = "3.4.2"; version = "3.4.2";
disabled = pythonOlder "3.8";
# needs aiohttp which is py3k-only
disabled = !isPy3k;
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
@ -35,15 +27,14 @@ buildPythonPackage rec {
pytestrunner pytestrunner
]; ];
checkInputs = [ # no tests
pytest doCheck = false;
pytest-asyncio pythonImportsCheck = [ "pyhaversion" ];
aresponses
];
meta = with lib; { meta = with lib; {
description = "A python module to the newest version number of Home Assistant"; description = "A python module to the newest version number of Home Assistant";
homepage = "https://github.com/ludeeus/pyhaversion"; homepage = "https://github.com/ludeeus/pyhaversion";
license = with licenses; [ mit ];
maintainers = [ maintainers.makefu ]; maintainers = [ maintainers.makefu ];
}; };
} }

View File

@ -0,0 +1,24 @@
{ lib, buildPythonPackage, fetchPypi, python-language-server }:
buildPythonPackage rec {
pname = "pyls-spyder";
version = "0.3.0";
src = fetchPypi {
inherit pname version;
sha256 = "07apxh12b8ybkx5izr7pg8kbg5g5wgzw7vh5iy2n8dhiqarzp7s1";
};
propagatedBuildInputs = [ python-language-server ];
# no tests
doCheck = false;
pythonImportsCheck = [ "pyls_spyder" ];
meta = with lib; {
description = "Spyder extensions for the python-language-server";
homepage = "https://github.com/spyder-ide/pyls-spyder";
license = licenses.mit;
maintainers = with maintainers; [ SuperSandro2000 ];
};
}

View File

@ -1,43 +1,25 @@
{ lib, stdenv, buildPythonPackage, fetchFromGitHub, pythonOlder { stdenv, lib, buildPythonPackage, fetchFromGitHub, pythonOlder
, pytest, mock, pytestcov, coverage , pytestCheckHook, mock, pytestcov, coverage
, future, futures, ujson, isPy38 , future, futures, ujson, isPy38
, fetchpatch
}: }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "python-jsonrpc-server"; pname = "python-jsonrpc-server";
version = "0.3.4"; version = "0.4.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "palantir"; owner = "palantir";
repo = "python-jsonrpc-server"; repo = "python-jsonrpc-server";
rev = version; rev = version;
sha256 = "027sx5pv4i9a192kr00bjjcxxprh2xyr8q5372q8ghff3xryk9dd"; sha256 = "0pcf50qvcxqnz3db58whqd8z89cdph19pfs1whgfm0zmwbwk0lw6";
}; };
postPatch = '' postPatch = ''
sed -i "s/version=versioneer.get_version(),/version=\"$version\",/g" setup.py sed -i "s/version=versioneer.get_version(),/version=\"$version\",/g" setup.py
# https://github.com/palantir/python-jsonrpc-server/issues/36
sed -iEe "s!'ujson.*\$!'ujson',!" setup.py
''; '';
checkInputs = [ checkInputs = [
pytest mock pytestcov coverage pytestCheckHook mock pytestcov coverage
];
checkPhase = ''
pytest
'';
patches = [
(fetchpatch {
url = "https://github.com/palantir/python-jsonrpc-server/commit/0a04cc4e9d44233b1038b12d63cd3bd437c2374e.patch";
sha256 = "177zdnp1808r2pg189bvzab44l8i2alsgv04kmrlhhnv40h66qyg";
})
(fetchpatch {
url = "https://github.com/palantir/python-jsonrpc-server/commit/5af6e43d0c1fb9a6a29b96d38cfd6dbeec85d0ea.patch";
sha256 = "1gx7lc1jxar1ngqqfkdn21s46y1mfnjf7ky2886ydk53nkaba91m";
})
]; ];
propagatedBuildInputs = [ future ujson ] propagatedBuildInputs = [ future ujson ]

View File

@ -1,6 +1,6 @@
{ lib, stdenv, buildPythonPackage, fetchFromGitHub, fetchpatch, pythonOlder, isPy27 { stdenv, lib, buildPythonPackage, fetchFromGitHub, pythonOlder, isPy27
, backports_functools_lru_cache, configparser, futures, future, jedi, pluggy, python-jsonrpc-server, flake8 , backports_functools_lru_cache, configparser, futures, future, jedi, pluggy, python-jsonrpc-server, flake8
, pytestCheckHook, mock, pytestcov, coverage, setuptools, ujson , pytestCheckHook, mock, pytestcov, coverage, setuptools, ujson, flaky
, # Allow building a limited set of providers, e.g. ["pycodestyle"]. , # Allow building a limited set of providers, e.g. ["pycodestyle"].
providers ? ["*"] providers ? ["*"]
# The following packages are optional and # The following packages are optional and
@ -21,33 +21,33 @@ in
buildPythonPackage rec { buildPythonPackage rec {
pname = "python-language-server"; pname = "python-language-server";
version = "0.34.1"; version = "0.36.2";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "palantir"; owner = "palantir";
repo = "python-language-server"; repo = "python-language-server";
rev = version; rev = version;
sha256 = "sha256-/tVzaoyUO6+7DSvnf3JxpcTY0rU+hHBu5qlru/ZTpxU="; sha256 = "07x6jr4z20jxn03bxblwc8vk0ywha492cgwfhj7q97nb5cm7kx0q";
}; };
patches = [ propagatedBuildInputs = [ setuptools jedi pluggy future python-jsonrpc-server flake8 ujson ]
# https://github.com/palantir/python-language-server/pull/851 ++ stdenv.lib.optional (withProvider "autopep8") autopep8
(fetchpatch { ++ stdenv.lib.optional (withProvider "mccabe") mccabe
url = "https://github.com/palantir/python-language-server/commit/f513f3297132492dd41e001d943980e6c4f40809.patch"; ++ stdenv.lib.optional (withProvider "pycodestyle") pycodestyle
sha256 = "04c9hrb3dzlfchjk4625ipazyfcbq6qq2kj2hg3zf2xsny2jcvi5"; ++ stdenv.lib.optional (withProvider "pydocstyle") pydocstyle
}) ++ stdenv.lib.optional (withProvider "pyflakes") pyflakes
]; ++ stdenv.lib.optional (withProvider "pylint") pylint
++ stdenv.lib.optional (withProvider "rope") rope
++ stdenv.lib.optional (withProvider "yapf") yapf
++ stdenv.lib.optional isPy27 configparser
++ stdenv.lib.optionals (pythonOlder "3.2") [ backports_functools_lru_cache futures ];
postPatch = ''
# https://github.com/palantir/python-jsonrpc-server/issues/36
sed -i -e 's!ujson<=!ujson>=!' setup.py
'';
# The tests require all the providers, disable otherwise. # The tests require all the providers, disable otherwise.
doCheck = providers == ["*"]; doCheck = providers == ["*"];
checkInputs = [ checkInputs = [
pytestCheckHook mock pytestcov coverage pytestCheckHook mock pytestcov coverage flaky
# rope is technically a dependency, but we don't add it by default since we # rope is technically a dependency, but we don't add it by default since we
# already have jedi, which is the preferred option # already have jedi, which is the preferred option
rope rope
@ -67,20 +67,9 @@ buildPythonPackage rec {
"test_matplotlib_completions" "test_matplotlib_completions"
"test_snippet_parsing" "test_snippet_parsing"
"test_numpy_hover" "test_numpy_hover"
"test_symbols"
] ++ stdenv.lib.optional isPy27 "test_flake8_lint"; ] ++ stdenv.lib.optional isPy27 "test_flake8_lint";
propagatedBuildInputs = [ setuptools jedi pluggy future python-jsonrpc-server flake8 ujson ]
++ stdenv.lib.optional (withProvider "autopep8") autopep8
++ stdenv.lib.optional (withProvider "mccabe") mccabe
++ stdenv.lib.optional (withProvider "pycodestyle") pycodestyle
++ stdenv.lib.optional (withProvider "pydocstyle") pydocstyle
++ stdenv.lib.optional (withProvider "pyflakes") pyflakes
++ stdenv.lib.optional (withProvider "pylint") pylint
++ stdenv.lib.optional (withProvider "rope") rope
++ stdenv.lib.optional (withProvider "yapf") yapf
++ stdenv.lib.optional isPy27 configparser
++ stdenv.lib.optionals (pythonOlder "3.2") [ backports_functools_lru_cache futures ];
meta = with lib; { meta = with lib; {
homepage = "https://github.com/palantir/python-language-server"; homepage = "https://github.com/palantir/python-language-server";
description = "An implementation of the Language Server Protocol for Python"; description = "An implementation of the Language Server Protocol for Python";

View File

@ -43,6 +43,8 @@ buildPythonPackage rec {
pytestFlagsArray = [ "--ignore examples/" ]; pytestFlagsArray = [ "--ignore examples/" ];
pythonImportsCheck = [ "pytile" ]; pythonImportsCheck = [ "pytile" ];
__darwinAllowLocalNetworking = true;
meta = with lib; { meta = with lib; {
description = " Python API for Tile Bluetooth trackers"; description = " Python API for Tile Bluetooth trackers";
longDescription = '' longDescription = ''

View File

@ -1,4 +1,4 @@
{ lib, stdenv, fetchPypi, buildPythonPackage, pytest, scrapy, bsddb3 }: { stdenv, lib, fetchPypi, buildPythonPackage, scrapy, bsddb3 }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "scrapy-deltafetch"; pname = "scrapy-deltafetch";
@ -11,7 +11,9 @@ buildPythonPackage rec {
propagatedBuildInputs = [ bsddb3 scrapy ]; propagatedBuildInputs = [ bsddb3 scrapy ];
checkInputs = [ pytest ]; # no tests
doCheck = false;
pythonImportsCheck = [ "scrapy_deltafetch" ];
meta = with lib; { meta = with lib; {
description = "Scrapy spider middleware to ignore requests to pages containing items seen in previous crawls"; description = "Scrapy spider middleware to ignore requests to pages containing items seen in previous crawls";

View File

@ -1,4 +1,4 @@
{ lib, stdenv, fetchPypi, buildPythonPackage, pytest, hypothesis, scrapy }: { stdenv, lib, fetchPypi, buildPythonPackage, scrapy, six }:
buildPythonPackage rec { buildPythonPackage rec {
pname = "scrapy-splash"; pname = "scrapy-splash";
@ -9,7 +9,11 @@ buildPythonPackage rec {
sha256 = "1dg7csdza2hzqskd9b9gx0v3saqsch4f0fwdp0a3p0822aqqi488"; sha256 = "1dg7csdza2hzqskd9b9gx0v3saqsch4f0fwdp0a3p0822aqqi488";
}; };
checkInputs = [ pytest hypothesis scrapy ]; propagatedBuildInputs = [ scrapy six ];
# no tests
doCheck = false;
pythonImportsCheck = [ "scrapy_splash" ];
meta = with lib; { meta = with lib; {
description = "Scrapy+Splash for JavaScript integration"; description = "Scrapy+Splash for JavaScript integration";

View File

@ -1,4 +1,5 @@
{ lib, stdenv { lib
, stdenv
, buildPythonPackage , buildPythonPackage
, isPy27 , isPy27
, fetchPypi , fetchPypi
@ -79,7 +80,10 @@ buildPythonPackage rec {
"test_retry_dns_error" "test_retry_dns_error"
"test_custom_asyncio_loop_enabled_true" "test_custom_asyncio_loop_enabled_true"
"test_custom_loop_asyncio" "test_custom_loop_asyncio"
] ++ stdenv.lib.optionals stdenv.isDarwin [ "test_xmliter_encoding" ]; ] ++ stdenv.lib.optionals stdenv.isDarwin [
"test_xmliter_encoding"
"test_download"
];
src = fetchPypi { src = fetchPypi {
inherit pname version; inherit pname version;
@ -92,6 +96,8 @@ buildPythonPackage rec {
install -m 644 -D extras/scrapy_zsh_completion $out/share/zsh/site-functions/_scrapy install -m 644 -D extras/scrapy_zsh_completion $out/share/zsh/site-functions/_scrapy
''; '';
__darwinAllowLocalNetworking = true;
meta = with lib; { meta = with lib; {
description = "A fast high-level web crawling and web scraping framework, used to crawl websites and extract structured data from their pages"; description = "A fast high-level web crawling and web scraping framework, used to crawl websites and extract structured data from their pages";
homepage = "https://scrapy.org/"; homepage = "https://scrapy.org/";

View File

@ -2,7 +2,7 @@
psutil, pyflakes, rope, numpy, scipy, matplotlib, pylint, keyring, numpydoc, psutil, pyflakes, rope, numpy, scipy, matplotlib, pylint, keyring, numpydoc,
qtconsole, qtawesome, nbconvert, mccabe, pyopengl, cloudpickle, pygments, qtconsole, qtawesome, nbconvert, mccabe, pyopengl, cloudpickle, pygments,
spyder-kernels, qtpy, pyzmq, chardet, qdarkstyle, watchdog, python-language-server spyder-kernels, qtpy, pyzmq, chardet, qdarkstyle, watchdog, python-language-server
, pyqtwebengine, atomicwrites, pyxdg, diff-match-patch , pyqtwebengine, atomicwrites, pyxdg, diff-match-patch, three-merge, pyls-black, pyls-spyder
}: }:
buildPythonPackage rec { buildPythonPackage rec {
@ -22,7 +22,7 @@ buildPythonPackage rec {
intervaltree jedi pycodestyle psutil pyflakes rope numpy scipy matplotlib pylint keyring intervaltree jedi pycodestyle psutil pyflakes rope numpy scipy matplotlib pylint keyring
numpydoc qtconsole qtawesome nbconvert mccabe pyopengl cloudpickle spyder-kernels numpydoc qtconsole qtawesome nbconvert mccabe pyopengl cloudpickle spyder-kernels
pygments qtpy pyzmq chardet pyqtwebengine qdarkstyle watchdog python-language-server pygments qtpy pyzmq chardet pyqtwebengine qdarkstyle watchdog python-language-server
atomicwrites pyxdg diff-match-patch atomicwrites pyxdg diff-match-patch three-merge pyls-black pyls-spyder
]; ];
# There is no test for spyder # There is no test for spyder
@ -51,7 +51,7 @@ buildPythonPackage rec {
postInstall = '' postInstall = ''
# add Python libs to env so Spyder subprocesses # add Python libs to env so Spyder subprocesses
# created to run compute kernels don't fail with ImportErrors # created to run compute kernels don't fail with ImportErrors
wrapProgram $out/bin/spyder3 --prefix PYTHONPATH : "$PYTHONPATH" wrapProgram $out/bin/spyder --prefix PYTHONPATH : "$PYTHONPATH"
# Create desktop item # Create desktop item
mkdir -p $out/share/icons mkdir -p $out/share/icons

View File

@ -0,0 +1,26 @@
{ lib
, buildPythonPackage
, fetchPypi
, pytestCheckHook
}:
buildPythonPackage rec {
pname = "stdiomask";
version = "0.0.6";
src = fetchPypi {
inherit pname version;
sha256 = "19m3p6i7fj7nmkbsjhiha3f2l7d05j9gf9ha2pd0pqfrx9lp1r61";
};
# tests are not published: https://github.com/asweigart/stdiomask/issues/5
doCheck = false;
pythonImportsCheck = [ "stdiomask" ];
meta = with lib; {
description = "Python module for masking passwords";
homepage = "https://github.com/asweigart/stdiomask";
license = with licenses; [ gpl3Plus ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -0,0 +1,44 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, aiohttp
, asynctest
, stdiomask
, cryptography
, pytestcov
, pytest-asyncio
, pytestCheckHook
}:
buildPythonPackage rec {
pname = "subarulink";
version = "0.3.11";
src = fetchFromGitHub {
owner = "G-Two";
repo = pname;
rev = "subaru-v${version}";
sha256 = "1ink9bhph6blidnfsqwq01grhp7ghacmkd4vzgb9hnhl9l52s1jq";
};
propagatedBuildInputs = [ aiohttp stdiomask ];
checkInputs = [
asynctest
cryptography
pytest-asyncio
pytestcov
pytestCheckHook
];
__darwinAllowLocalNetworking = true;
pythonImportsCheck = [ "subarulink" ];
meta = with lib; {
description = "Python module for interacting with STARLINK-enabled vehicle";
homepage = "https://github.com/G-Two/subarulink";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View File

@ -18,7 +18,8 @@ buildPythonPackage rec {
preConfigure = '' preConfigure = ''
substituteInPlace setup.py \ substituteInPlace setup.py \
--replace 'requests>=2.24.0' 'requests~=2.23' --replace 'requests>=2.24.0' 'requests~=2.23' \
--replace 'flask-cors==3.0.8' 'flask-cors'
''; '';
checkPhase = '' checkPhase = ''
@ -27,6 +28,8 @@ buildPythonPackage rec {
checkInputs = [ pytestCheckHook mock ]; checkInputs = [ pytestCheckHook mock ];
pythonImportsCheck = [ "SwSpotify" ];
meta = with lib; { meta = with lib; {
homepage = "https://github.com/SwagLyrics/SwSpotify"; homepage = "https://github.com/SwagLyrics/SwSpotify";
description = "Library to get the currently playing song and artist from Spotify"; description = "Library to get the currently playing song and artist from Spotify";

View File

@ -0,0 +1,31 @@
{ lib, fetchPypi, buildPythonPackage
, gviz-api
, protobuf
, werkzeug
}:
buildPythonPackage rec {
pname = "tensorboard_plugin_profile";
version = "2.4.0";
format = "wheel";
src = fetchPypi {
inherit pname version;
format = "wheel";
python = "py3";
sha256 = "0z6dcjvkk3pzmmmjxi2ybawnfshz5qa3ga92kqj69ld1g9k3i9bj";
};
propagatedBuildInputs = [
gviz-api
protobuf
werkzeug
];
meta = with lib; {
description = "Profile Tensorboard Plugin.";
homepage = http://tensorflow.org;
license = licenses.asl20;
maintainers = with maintainers; [ ndl ];
};
}

View File

@ -0,0 +1,22 @@
{ lib, fetchPypi, buildPythonPackage
}:
buildPythonPackage rec {
pname = "tensorboard_plugin_wit";
version = "1.7.0";
format = "wheel";
src = fetchPypi {
inherit pname version;
format = "wheel";
python = "py3";
sha256 = "0nv855qm2fav70lndsrv810pqgg41sbmd70fk86wk18ih825yxzf";
};
meta = with lib; {
description = "What-If Tool TensorBoard plugin.";
homepage = http://tensorflow.org;
license = licenses.asl20;
maintainers = with maintainers; [ ndl ];
};
}

View File

@ -6,13 +6,13 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "tensorflow-estimator"; pname = "tensorflow-estimator";
version = "2.3.0"; version = "2.4.0";
format = "wheel"; format = "wheel";
src = fetchPypi { src = fetchPypi {
pname = "tensorflow_estimator"; pname = "tensorflow_estimator";
inherit version format; inherit version format;
sha256 = "11n4sl9wfr00fv1i837b7a36ink86ggmlsgj7i06kcfc011h6pmp"; sha256 = "1w0pkcslm6934qqd6m5gxyjdlnb4pbl47k6s99wsh6dyvvr7nysv";
}; };
propagatedBuildInputs = [ mock numpy absl-py ]; propagatedBuildInputs = [ mock numpy absl-py ];

View File

@ -5,9 +5,10 @@
, protobuf , protobuf
, grpcio , grpcio
, markdown , markdown
, futures
, absl-py , absl-py
, google-auth-oauthlib , google-auth-oauthlib
, tensorboard-plugin-wit
, tensorboard-plugin-profile
}: }:
# tensorflow/tensorboard is built from a downloaded wheel, because # tensorflow/tensorboard is built from a downloaded wheel, because
@ -16,19 +17,16 @@
buildPythonPackage rec { buildPythonPackage rec {
pname = "tensorflow-tensorboard"; pname = "tensorflow-tensorboard";
version = "2.1.0"; version = "2.4.0";
format = "wheel"; format = "wheel";
disabled = !isPy3k;
src = fetchPypi ({ src = fetchPypi {
pname = "tensorboard"; pname = "tensorboard";
inherit version format; inherit version format;
} // (if isPy3k then {
python = "py3"; python = "py3";
sha256 = "1wpjdzhjpcdkyaahzd4bl71k4l30z5c55280ndiwj32hw70lxrp6"; sha256 = "0f17h6i398n8maam0r3rssqvdqnqbwjyf96nnhf482anm1iwdq6d";
} else { };
python = "py2";
sha256 = "1f805839xa36wxb7xac9fyxzaww92vw4d50vs6g61wnlr4byp00w";
}));
propagatedBuildInputs = [ propagatedBuildInputs = [
numpy numpy
@ -38,10 +36,12 @@ buildPythonPackage rec {
grpcio grpcio
absl-py absl-py
google-auth-oauthlib google-auth-oauthlib
tensorboard-plugin-profile
tensorboard-plugin-wit
# not declared in install_requires, but used at runtime # not declared in install_requires, but used at runtime
# https://github.com/NixOS/nixpkgs/issues/73840 # https://github.com/NixOS/nixpkgs/issues/73840
wheel wheel
] ++ lib.optional (!isPy3k) futures; ];
# in the absence of a real test suite, run cli and imports # in the absence of a real test suite, run cli and imports
checkPhase = '' checkPhase = ''

View File

@ -1,24 +1,25 @@
{ stdenv, pkgs, bazel_3, buildBazelPackage, lib, fetchFromGitHub, fetchpatch, symlinkJoin { stdenv, bazel_3, buildBazelPackage, isPy3k, lib, fetchFromGitHub, symlinkJoin
, addOpenGLRunpath , addOpenGLRunpath
# Python deps # Python deps
, buildPythonPackage, isPy3k, isPy27, pythonOlder, pythonAtLeast, python , buildPythonPackage, pythonOlder, pythonAtLeast, python
# Python libraries # Python libraries
, numpy, tensorflow-tensorboard_2, backports_weakref, mock, enum34, absl-py , numpy, tensorflow-tensorboard_2, absl-py
, future, setuptools, wheel, keras-preprocessing, keras-applications, google-pasta , future, setuptools, wheel, keras-preprocessing, google-pasta
, functools32
, opt-einsum, astunparse, h5py , opt-einsum, astunparse, h5py
, termcolor, grpcio, six, wrapt, protobuf, tensorflow-estimator_2 , termcolor, grpcio, six, wrapt, protobuf, tensorflow-estimator_2
, dill, flatbuffers-python, tblib, typing-extensions
# Common deps # Common deps
, git, swig, which, binutils, glibcLocales, cython , git, pybind11, which, binutils, glibcLocales, cython, perl
# Common libraries # Common libraries
, jemalloc, openmpi, astor, gast, grpc, sqlite, openssl, jsoncpp, re2 , jemalloc, openmpi, gast, grpc, sqlite, boringssl, jsoncpp
, curl, snappy, flatbuffers, icu, double-conversion, libpng, libjpeg, giflib , curl, snappy, flatbuffers-core, lmdb-core, icu, double-conversion, libpng, libjpeg_turbo, giflib
# Upsteam by default includes cuda support since tensorflow 1.15. We could do # Upsteam by default includes cuda support since tensorflow 1.15. We could do
# that in nix as well. It would make some things easier and less confusing, but # that in nix as well. It would make some things easier and less confusing, but
# it would also make the default tensorflow package unfree. See # it would also make the default tensorflow package unfree. See
# https://groups.google.com/a/tensorflow.org/forum/#!topic/developers/iRCt5m4qUz0 # https://groups.google.com/a/tensorflow.org/forum/#!topic/developers/iRCt5m4qUz0
, cudaSupport ? false, cudatoolkit ? null, cudnn ? null, nccl ? null , cudaSupport ? false, cudatoolkit ? null, cudnn ? null, nccl ? null
, mklSupport ? false, mkl ? null , mklSupport ? false, mkl ? null
, tensorboardSupport ? true
# XLA without CUDA is broken # XLA without CUDA is broken
, xlaSupport ? cudaSupport , xlaSupport ? cudaSupport
# Default from ./configure script # Default from ./configure script
@ -39,7 +40,7 @@ assert ! (stdenv.isDarwin && cudaSupport);
assert mklSupport -> mkl != null; assert mklSupport -> mkl != null;
let let
withTensorboard = pythonOlder "3.6"; withTensorboard = (pythonOlder "3.6") || tensorboardSupport;
cudatoolkit_joined = symlinkJoin { cudatoolkit_joined = symlinkJoin {
name = "${cudatoolkit.name}-merged"; name = "${cudatoolkit.name}-merged";
@ -65,34 +66,40 @@ let
includes_joined = symlinkJoin { includes_joined = symlinkJoin {
name = "tensorflow-deps-merged"; name = "tensorflow-deps-merged";
paths = [ paths = [
pkgs.protobuf
jsoncpp jsoncpp
]; ];
}; };
tfFeature = x: if x then "1" else "0"; tfFeature = x: if x then "1" else "0";
version = "2.3.2"; version = "2.4.0";
variant = if cudaSupport then "-gpu" else ""; variant = if cudaSupport then "-gpu" else "";
pname = "tensorflow${variant}"; pname = "tensorflow${variant}";
pythonEnv = python.withPackages (_: pythonEnv = python.withPackages (_:
[ # python deps needed during wheel build time (not runtime, see the buildPythonPackage part for that) [ # python deps needed during wheel build time (not runtime, see the buildPythonPackage part for that)
numpy # This list can likely be shortened, but each trial takes multiple hours so won't bother for now.
keras-preprocessing
protobuf
wrapt
gast
astor
absl-py absl-py
termcolor astunparse
keras-applications dill
flatbuffers-python
gast
google-pasta
grpcio
h5py
keras-preprocessing
numpy
opt-einsum
protobuf
setuptools setuptools
six
tblib
tensorflow-estimator_2
tensorflow-tensorboard_2
termcolor
typing-extensions
wheel wheel
] ++ lib.optionals (!isPy3k) wrapt
[ future
functools32
mock
]); ]);
bazel-build = buildBazelPackage { bazel-build = buildBazelPackage {
@ -103,27 +110,21 @@ let
owner = "tensorflow"; owner = "tensorflow";
repo = "tensorflow"; repo = "tensorflow";
rev = "v${version}"; rev = "v${version}";
sha256 = "sha256-ncwIkqLDqrB33pB9/FTlBklsIJUEvnDUmyAeUfufCFs="; sha256 = "0yl06aypfxrcs35828xf04mkidz1x0j89v0q5h4d2xps1cb5rv3f";
}; };
patches = [ patches = [
# Fixes for NixOS jsoncpp # Relax too strict Python packages versions dependencies.
./system-jsoncpp.patch
./relax-dependencies.patch ./relax-dependencies.patch
# Add missing `io_bazel_rules_docker` dependency.
# see https://github.com/tensorflow/tensorflow/issues/40688 ./workspace.patch
(fetchpatch {
url = "https://github.com/tensorflow/tensorflow/commit/75ea0b31477d6ba9e990e296bbbd8ca4e7eebadf.patch";
sha256 = "1xp1icacig0xm0nmb05sbrf4nw4xbln9fhc308birrv8286zx7wv";
})
]; ];
# On update, it can be useful to steal the changes from gentoo # On update, it can be useful to steal the changes from gentoo
# https://gitweb.gentoo.org/repo/gentoo.git/tree/sci-libs/tensorflow # https://gitweb.gentoo.org/repo/gentoo.git/tree/sci-libs/tensorflow
nativeBuildInputs = [ nativeBuildInputs = [
swig which pythonEnv which pythonEnv cython perl
] ++ lib.optional cudaSupport addOpenGLRunpath; ] ++ lib.optional cudaSupport addOpenGLRunpath;
buildInputs = [ buildInputs = [
@ -135,19 +136,18 @@ let
# libs taken from system through the TF_SYS_LIBS mechanism # libs taken from system through the TF_SYS_LIBS mechanism
grpc grpc
sqlite sqlite
openssl boringssl
jsoncpp jsoncpp
pkgs.protobuf
curl curl
pybind11
snappy snappy
flatbuffers flatbuffers-core
icu icu
double-conversion double-conversion
libpng libpng
libjpeg libjpeg_turbo
giflib giflib
re2 lmdb-core
pkgs.lmdb
] ++ lib.optionals cudaSupport [ ] ++ lib.optionals cudaSupport [
cudatoolkit cudatoolkit
cudnn cudnn
@ -173,10 +173,17 @@ let
# "com_github_googleapis_googleapis" # "com_github_googleapis_googleapis"
# "com_github_googlecloudplatform_google_cloud_cpp" # "com_github_googlecloudplatform_google_cloud_cpp"
"com_github_grpc_grpc" "com_github_grpc_grpc"
"com_google_protobuf" # Multiple issues with custom protobuf.
"com_googlesource_code_re2" # First `com_github_googleapis` fails to configure. Can be worked around by disabling `com_github_googleapis`
# and related functionality, but then the next error is about "dangling symbolic link", and in general
# looks like that's only the beginning: see
# https://stackoverflow.com/questions/55578884/how-to-build-tensorflow-1-13-1-with-custom-protobuf
# "com_google_protobuf"
# Fails with the error: external/org_tensorflow/tensorflow/core/profiler/utils/tf_op_utils.cc:46:49: error: no matching function for call to 're2::RE2::FullMatch(absl::lts_2020_02_25::string_view&, re2::RE2&)'
# "com_googlesource_code_re2"
"curl" "curl"
"cython" "cython"
"dill_archive"
"double_conversion" "double_conversion"
"enum34_archive" "enum34_archive"
"flatbuffers" "flatbuffers"
@ -198,8 +205,9 @@ let
"pybind11" "pybind11"
"six_archive" "six_archive"
"snappy" "snappy"
"swig" "tblib_archive"
"termcolor_archive" "termcolor_archive"
"typing_extensions_archive"
"wrapt" "wrapt"
"zlib" "zlib"
]; ];
@ -224,16 +232,13 @@ let
TF_CUDA_COMPUTE_CAPABILITIES = lib.concatStringsSep "," cudaCapabilities; TF_CUDA_COMPUTE_CAPABILITIES = lib.concatStringsSep "," cudaCapabilities;
postPatch = '' postPatch = ''
# bazel 3.3 should work just as well as bazel 3.1
rm -f .bazelversion
'' + lib.optionalString (!withTensorboard) ''
# Tensorboard pulls in a bunch of dependencies, some of which may # Tensorboard pulls in a bunch of dependencies, some of which may
# include security vulnerabilities. So we make it optional. # include security vulnerabilities. So we make it optional.
# https://github.com/tensorflow/tensorflow/issues/20280#issuecomment-400230560 # https://github.com/tensorflow/tensorflow/issues/20280#issuecomment-400230560
sed -i '/tensorboard >=/d' tensorflow/tools/pip_package/setup.py sed -i '/tensorboard ~=/d' tensorflow/tools/pip_package/setup.py
# numpy 1.19 added in https://github.com/tensorflow/tensorflow/commit/75ea0b31477d6ba9e990e296bbbd8ca4e7eebadf.patch
sed -i 's/numpy >= 1.16.0, < 1.19.0/numpy >= 1.16.0/' tensorflow/tools/pip_package/setup.py
# bazel 3.3 should work just as well as bazel 3.1
rm -f .bazelversion
''; '';
# https://github.com/tensorflow/tensorflow/pull/39470 # https://github.com/tensorflow/tensorflow/pull/39470
@ -277,16 +282,15 @@ let
bazelTarget = "//tensorflow/tools/pip_package:build_pip_package //tensorflow/tools/lib_package:libtensorflow"; bazelTarget = "//tensorflow/tools/pip_package:build_pip_package //tensorflow/tools/lib_package:libtensorflow";
removeRulesCC = false; removeRulesCC = false;
# Without this Bazel complaints about sandbox violations.
dontAddBazelOpts = true;
fetchAttrs = { fetchAttrs = {
# So that checksums don't depend on these.
TF_SYSTEM_LIBS = null;
# cudaSupport causes fetch of ncclArchive, resulting in different hashes # cudaSupport causes fetch of ncclArchive, resulting in different hashes
sha256 = if cudaSupport then sha256 = if cudaSupport then
"sha256-lEdPA9vhYO6vd5FgPMbFp2PkRvDBurPidYsxtJLXcbQ=" "0vyy1hv0jy5pqwvnc8pxb9isgnbw07c4a4d4wn61db00np114crz"
else else
"sha256-ZEY/bWo5M3Juw1x3CwhXYXZHD4q5LzWDlhgXnh4P95U="; "0vczv5f9s4dxgwdkmf1y9b9ybh5d3y1nllqhb5q8aj9kq73izyn9";
}; };
buildAttrs = { buildAttrs = {
@ -329,15 +333,13 @@ let
license = licenses.asl20; license = licenses.asl20;
maintainers = with maintainers; [ jyp abbradar ]; maintainers = with maintainers; [ jyp abbradar ];
platforms = with platforms; linux ++ darwin; platforms = with platforms; linux ++ darwin;
# The py2 build fails due to some issue importing protobuf. Possibly related to the fix in broken = !(xlaSupport -> cudaSupport);
# https://github.com/akesandgren/easybuild-easyblocks/commit/1f2e517ddfd1b00a342c6abb55aef3fd93671a2b
broken = !(xlaSupport -> cudaSupport) || !isPy3k;
}; };
}; };
in buildPythonPackage { in buildPythonPackage {
inherit version pname; inherit version pname;
disabled = isPy27; disabled = !isPy3k;
src = bazel-build.python; src = bazel-build.python;
@ -354,27 +356,23 @@ in buildPythonPackage {
# tensorflow/tools/pip_package/setup.py # tensorflow/tools/pip_package/setup.py
propagatedBuildInputs = [ propagatedBuildInputs = [
absl-py absl-py
astor astunparse
dill
flatbuffers-python
gast gast
google-pasta google-pasta
keras-applications grpcio
h5py
keras-preprocessing keras-preprocessing
numpy numpy
six opt-einsum
protobuf protobuf
six
tblib
tensorflow-estimator_2 tensorflow-estimator_2
termcolor termcolor
typing-extensions
wrapt wrapt
grpcio
opt-einsum
astunparse
h5py
] ++ lib.optionals (!isPy3k) [
mock
future
functools32
] ++ lib.optionals (pythonOlder "3.4") [
backports_weakref enum34
] ++ lib.optionals withTensorboard [ ] ++ lib.optionals withTensorboard [
tensorflow-tensorboard_2 tensorflow-tensorboard_2
]; ];

View File

@ -1,16 +1,51 @@
diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py
index 594e74f40c0..bfbf010144f 100644 index 65133afdafe..8ef6364ff7e 100644
--- a/tensorflow/tools/pip_package/setup.py --- a/tensorflow/tools/pip_package/setup.py
+++ b/tensorflow/tools/pip_package/setup.py +++ b/tensorflow/tools/pip_package/setup.py
@@ -54,9 +54,9 @@ _VERSION = '2.3.1' @@ -75,23 +75,23 @@ if '--project_name' in sys.argv:
# comment the versioning scheme.
# NOTE: Please add test only packages to `TEST_PACKAGES` below.
REQUIRED_PACKAGES = [ REQUIRED_PACKAGES = [
'absl-py >= 0.7.0', - 'absl-py ~= 0.10',
'astunparse == 1.6.3', - 'astunparse ~= 1.6.3',
- 'flatbuffers ~= 1.12.0',
- 'google_pasta ~= 0.2',
- 'h5py ~= 2.10.0',
- 'keras_preprocessing ~= 1.1.2',
- 'numpy ~= 1.19.2',
- 'opt_einsum ~= 3.3.0',
+ 'absl-py >= 0.10',
+ 'astunparse >= 1.6.3',
+ 'flatbuffers >= 1.12.0',
+ 'google_pasta >= 0.2',
+ 'h5py >= 2.10.0',
+ 'keras_preprocessing >= 1.1.2',
+ 'numpy >= 1.19.1',
+ 'opt_einsum >= 3.3.0',
'protobuf >= 3.9.2',
- 'six ~= 1.15.0',
- 'termcolor ~= 1.1.0',
- 'typing_extensions ~= 3.7.4',
- 'wheel ~= 0.35',
- 'wrapt ~= 1.12.1',
+ 'six >= 1.15.0',
+ 'termcolor >= 1.1.0',
+ 'typing_extensions >= 3.7.4',
+ 'wheel >= 0.34.2',
+ 'wrapt >= 1.12.1',
# These packages needs to be pinned exactly as newer versions are
# incompatible with the rest of the ecosystem
- 'gast == 0.3.3', - 'gast == 0.3.3',
+ 'gast >= 0.3.3', + 'gast >= 0.3.3',
'google_pasta >= 0.1.8', # TensorFlow ecosystem packages that TF exposes API for
- 'h5py >= 2.10.0, < 2.11.0', # These need to be in sync with the existing TF version
+ 'h5py >= 2.10.0', # They are updated during the release process
'keras_preprocessing >= 1.1.1, < 1.2', @@ -118,7 +118,7 @@ if 'tf_nightly' in project_name:
# TODO(mihaimaruseac): numpy 1.19.0 has ABI breakage # BoringSSL support.
# https://github.com/numpy/numpy/pull/15355 # See https://github.com/tensorflow/tensorflow/issues/17882.
if sys.byteorder == 'little':
- REQUIRED_PACKAGES.append('grpcio ~= 1.32.0')
+ REQUIRED_PACKAGES.append('grpcio >= 1.31.0')
# Packages which are only needed for testing code.

View File

@ -1,21 +0,0 @@
diff --git a/third_party/systemlibs/jsoncpp.BUILD b/third_party/systemlibs/jsoncpp.BUILD
index 526fd0c418..646f3fdcea 100644
--- a/third_party/systemlibs/jsoncpp.BUILD
+++ b/third_party/systemlibs/jsoncpp.BUILD
@@ -7,6 +7,7 @@ filegroup(
HEADERS = [
"include/json/autolink.h",
+ "include/json/allocator.h",
"include/json/config.h",
"include/json/features.h",
"include/json/forwards.h",
@@ -23,7 +24,7 @@ genrule(
cmd = """
for i in $(OUTS); do
i=$${i##*/}
- ln -sf $(INCLUDEDIR)/jsoncpp/json/$$i $(@D)/include/json/$$i
+ ln -sf $(INCLUDEDIR)/json/$$i $(@D)/include/json/$$i
done
""",
)

View File

@ -0,0 +1,18 @@
diff --git a/WORKSPACE b/WORKSPACE
index 9db1d9b80eb..c46f13f4ca4 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -12,6 +12,13 @@ http_archive(
],
)
+http_archive(
+ name = "io_bazel_rules_docker",
+ sha256 = "1698624e878b0607052ae6131aa216d45ebb63871ec497f26c67455b34119c80",
+ strip_prefix = "rules_docker-0.15.0",
+ urls = ["https://github.com/bazelbuild/rules_docker/releases/download/v0.15.0/rules_docker-v0.15.0.tar.gz"],
+)
+
# Load tf_repositories() before loading dependencies for other repository so
# that dependencies like com_google_protobuf won't be overridden.
load("//tensorflow:workspace.bzl", "tf_repositories")

View File

@ -0,0 +1,24 @@
{ lib, buildPythonPackage, fetchPypi, diff-match-patch }:
buildPythonPackage rec {
pname = "three-merge";
version = "0.1.1";
src = fetchPypi {
inherit pname version;
sha256 = "0w6rv7rv1zm901wbjkmm6d3vkwyf3csja9p37bb60mar8khszxk0";
};
propagatedBuildInputs = [ diff-match-patch ];
dontUseSetuptoolsCheck = true;
pythonImportsCheck = [ "three_merge" ];
meta = with lib; {
description = "Simple library for merging two strings with respect to a base one";
homepage = "https://github.com/spyder-ide/three-merge";
license = licenses.mit;
maintainers = with maintainers; [ SuperSandro2000 ];
};
}

View File

@ -3,7 +3,6 @@
, pythonOlder , pythonOlder
, isPy27 , isPy27
, fetchFromGitHub , fetchFromGitHub
, nose
, noise , noise
, numpy , numpy
, pyplatec , pyplatec
@ -11,6 +10,7 @@
, purepng , purepng
, h5py , h5py
, gdal , gdal
, pytestCheckHook
}: }:
buildPythonPackage rec { buildPythonPackage rec {
@ -47,13 +47,12 @@ buildPythonPackage rec {
--replace 'PyPlatec==1.4.0' 'PyPlatec' \ --replace 'PyPlatec==1.4.0' 'PyPlatec' \
''; '';
# with python<3.5, unittest fails to discover tests because of their filenames
# so nose is used instead.
doCheck = !isPy27; # google namespace clash doCheck = !isPy27; # google namespace clash
checkInputs = stdenv.lib.optional (pythonOlder "3.5") [ nose ]; checkInputs = [ pytestCheckHook ];
postCheck = stdenv.lib.optionalString (pythonOlder "3.5") ''
nosetests tests disabledTests = [
''; "TestSerialization"
];
meta = with lib; { meta = with lib; {
homepage = "http://world-engine.org"; homepage = "http://world-engine.org";

View File

@ -20,7 +20,8 @@ buildPythonPackage rec {
''; '';
# https://github.com/NixOS/nixpkgs/pull/107872#issuecomment-752175866 # https://github.com/NixOS/nixpkgs/pull/107872#issuecomment-752175866
doCheck = stdenv.isLinux; # cannot import name '_gi' from partially initialized module 'gi' (most likely due to a circular import)
doCheck = false;
meta = with lib; { meta = with lib; {
description = "An interactive viewer for graphs written in Graphviz's dot"; description = "An interactive viewer for graphs written in Graphviz's dot";

View File

@ -0,0 +1,33 @@
From 9dbfa680db6bfd1033772dda753120fe4452e0d9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Milan=20P=C3=A4ssler?= <milan@petabyte.dev>
Date: Fri, 8 Jan 2021 04:49:14 +0100
Subject: [PATCH] fix include path for SDL2 on linux
---
.../src/main/java/arc/backend/sdl/jni/SDL.java | 8 --------
1 file changed, 8 deletions(-)
diff --git a/backends/backend-sdl/src/main/java/arc/backend/sdl/jni/SDL.java b/backends/backend-sdl/src/main/java/arc/backend/sdl/jni/SDL.java
index 62d9286a..2853119d 100644
--- a/Arc/backends/backend-sdl/src/main/java/arc/backend/sdl/jni/SDL.java
+++ b/Arc/backends/backend-sdl/src/main/java/arc/backend/sdl/jni/SDL.java
@@ -8,16 +8,8 @@ import java.nio.*;
public class SDL{
/*JNI
- #ifdef __APPLE__
-
#include "SDL2/SDL.h"
- #else
-
- #include "SDL.h"
-
- #endif
-
*/
static{
--
2.29.2

View File

@ -1,12 +1,20 @@
{ lib, stdenv { lib, stdenv
, makeWrapper , makeWrapper
, makeDesktopItem , makeDesktopItem
, copyDesktopItems
, fetchFromGitHub , fetchFromGitHub
, fetchpatch
, gradleGen , gradleGen
, jdk , jdk
, perl , perl
, jre
# for arc
, SDL2
, pkg-config
, stb
, ant
, alsaLib , alsaLib
, glew
# Make the build version easily overridable. # Make the build version easily overridable.
# Server and client build versions must match, and an empty build version means # Server and client build versions must match, and an empty build version means
@ -25,12 +33,52 @@ let
version = "122.1"; version = "122.1";
buildVersion = makeBuildVersion version; buildVersion = makeBuildVersion version;
src = fetchFromGitHub { Mindustry = fetchFromGitHub {
owner = "Anuken"; owner = "Anuken";
repo = "Mindustry"; repo = "Mindustry";
rev = "v${version}"; rev = "v${version}";
sha256 = "18m4s81cfb2cr2fj61nf6spiln7cbvx25g42w6fypfikflv3qd8y"; sha256 = "18m4s81cfb2cr2fj61nf6spiln7cbvx25g42w6fypfikflv3qd8y";
}; };
Arc = fetchFromGitHub {
owner = "Anuken";
repo = "Arc";
rev = "v${version}";
sha256 = "0inzyj01442da7794cpxlaab7di9gv1snc97cbffqsdxgin16i7d";
};
soloud = fetchFromGitHub {
owner = "Anuken";
repo = "soloud";
# this is never pinned in upstream, see https://github.com/Anuken/Arc/issues/39
rev = "8553049c6fb0d1eaa7f57c1793b96219c84e8ba5";
sha256 = "076vnjs2qxd65qq5i37gbmj5v5i04a1vw0kznq986gv9190jj531";
};
patches = [
./0001-fix-include-path-for-SDL2-on-linux.patch
# upstream fix for https://github.com/Anuken/Arc/issues/40, remove on next release
(fetchpatch {
url = "https://github.com/Anuken/Arc/commit/b2f3d212c1a88a62f140f5cb04f4c86e61332d1c.patch";
sha256 = "1yjp4drv7lk3kinzy47g8jhb2qazr92b85vbc79vsqrs8sycskan";
extraPrefix = "Arc/";
stripLen = 1;
})
# add resolveDependencies task, remove when and if it gets added upstream in a future release
(fetchpatch {
url = "https://github.com/Anuken/Mindustry/pull/4302.patch";
sha256 = "0yp42sray4fxkajhpdljal0wss8jh9rvmclysw6cixsa94pw5khq";
extraPrefix = "Mindustry/";
stripLen = 1;
})
];
unpackPhase = ''
cp -r ${Mindustry} Mindustry
cp -r ${Arc} Arc
chmod -R u+w -- Mindustry Arc
cp ${stb.src}/stb_image.h Arc/arc-core/csrc/
cp -r ${soloud} Arc/arc-core/csrc/soloud
chmod -R u+w -- Arc
'';
desktopItem = makeDesktopItem { desktopItem = makeDesktopItem {
type = "Application"; type = "Application";
@ -40,7 +88,9 @@ let
icon = "mindustry"; icon = "mindustry";
}; };
postPatch = '' cleanupMindustrySrc = ''
pushd Mindustry
# Remove unbuildable iOS stuff # Remove unbuildable iOS stuff
sed -i '/^project(":ios"){/,/^}/d' build.gradle sed -i '/^project(":ios"){/,/^}/d' build.gradle
sed -i '/robo(vm|VM)/d' build.gradle sed -i '/robo(vm|VM)/d' build.gradle
@ -48,12 +98,11 @@ let
# Pin 'SNAPSHOT' versions # Pin 'SNAPSHOT' versions
sed -i 's/com.github.anuken:packr:-SNAPSHOT/com.github.anuken:packr:034efe51781d2d8faa90370492133241bfb0283c/' build.gradle sed -i 's/com.github.anuken:packr:-SNAPSHOT/com.github.anuken:packr:034efe51781d2d8faa90370492133241bfb0283c/' build.gradle
popd
''; '';
preBuild = '' preBuild = ''
# Arc is run at build time for sprite packing, and it needs to see
# the runtime libraries
${stdenv.lib.optionalString stdenv.isLinux "export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${alsaLib}/lib"}
export GRADLE_USER_HOME=$(mktemp -d) export GRADLE_USER_HOME=$(mktemp -d)
''; '';
@ -63,15 +112,17 @@ let
# fake build to pre-download deps into fixed-output derivation # fake build to pre-download deps into fixed-output derivation
deps = stdenv.mkDerivation { deps = stdenv.mkDerivation {
pname = "${pname}-deps"; pname = "${pname}-deps";
inherit version src postPatch; inherit version unpackPhase patches;
postPatch = cleanupMindustrySrc;
nativeBuildInputs = [ gradle_6 perl ]; nativeBuildInputs = [ gradle_6 perl ];
# Here we build both the server and the client so we only have to specify # Here we download dependencies for both the server and the client so
# one hash for 'deps'. Deps can be garbage collected after the build, # we only have to specify one hash for 'deps'. Deps can be garbage
# so this is not really an issue. # collected after the build, so this is not really an issue.
buildPhase = '' buildPhase = preBuild + ''
${preBuild} pushd Mindustry
gradle --no-daemon desktop:dist -Pbuildversion=${buildVersion} gradle --no-daemon resolveDependencies
gradle --no-daemon server:dist -Pbuildversion=${buildVersion} popd
''; '';
# perl code mavenizes pathes (com.squareup.okio/okio/1.13.0/a9283170b7305c8d92d25aff02a6ab7e45d06cbe/okio-1.13.0.jar -> com/squareup/okio/okio/1.13.0/okio-1.13.0.jar) # perl code mavenizes pathes (com.squareup.okio/okio/1.13.0/a9283170b7305c8d92d25aff02a6ab7e45d06cbe/okio-1.13.0.jar -> com/squareup/okio/okio/1.13.0/okio-1.13.0.jar)
installPhase = '' installPhase = ''
@ -81,51 +132,65 @@ let
''; '';
outputHashAlgo = "sha256"; outputHashAlgo = "sha256";
outputHashMode = "recursive"; outputHashMode = "recursive";
outputHash = "0vzck6hsrvs438s3ikk66qmpak88bmqcb8inqbbjwy7x87d2qsvj"; outputHash = "09rwyrg2yv8r499b0dk1bzvymsf98d4j5b95bwd9s4xvrz71is3l";
}; };
# Separate commands for building and installing the server and the client
buildClient = ''
gradle --offline --no-daemon desktop:dist -Pbuildversion=${buildVersion}
'';
buildServer = ''
gradle --offline --no-daemon server:dist -Pbuildversion=${buildVersion}
'';
installClient = ''
install -Dm644 desktop/build/libs/Mindustry.jar $out/share/mindustry.jar
mkdir -p $out/bin
makeWrapper ${jre}/bin/java $out/bin/mindustry \
${stdenv.lib.optionalString stdenv.isLinux "--prefix LD_LIBRARY_PATH : ${alsaLib}/lib"} \
--add-flags "-jar $out/share/mindustry.jar"
install -Dm644 core/assets/icons/icon_64.png $out/share/icons/hicolor/64x64/apps/mindustry.png
install -Dm644 ${desktopItem}/share/applications/Mindustry.desktop $out/share/applications/Mindustry.desktop
'';
installServer = ''
install -Dm644 server/build/libs/server-release.jar $out/share/mindustry-server.jar
mkdir -p $out/bin
makeWrapper ${jre}/bin/java $out/bin/mindustry-server \
--add-flags "-jar $out/share/mindustry-server.jar"
'';
in in
assert stdenv.lib.assertMsg (enableClient || enableServer) assert stdenv.lib.assertMsg (enableClient || enableServer)
"mindustry: at least one of 'enableClient' and 'enableServer' must be true"; "mindustry: at least one of 'enableClient' and 'enableServer' must be true";
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
inherit pname version src postPatch; inherit pname version unpackPhase patches;
nativeBuildInputs = [ gradle_6 makeWrapper ]; postPatch = ''
# ensure the prebuilt shared objects don't accidentally get shipped
rm Arc/natives/natives-desktop/libs/libarc*.so
rm Arc/backends/backend-sdl/libs/linux64/libsdl-arc*.so
'' + cleanupMindustrySrc;
buildPhase = with stdenv.lib; '' buildInputs = [
${preBuild} SDL2
glew
alsaLib
];
nativeBuildInputs = [
pkg-config
gradle_6
makeWrapper
jdk
ant
copyDesktopItems
];
desktopItems = [ desktopItem ];
buildPhase = with stdenv.lib; preBuild + ''
# point to offline repo # point to offline repo
sed -ie "s#mavenLocal()#mavenLocal(); maven { url '${deps}' }#g" build.gradle sed -ie "s#mavenLocal()#mavenLocal(); maven { url '${deps}' }#g" Mindustry/build.gradle
${optionalString enableClient buildClient} sed -ie "s#mavenCentral()#mavenCentral(); maven { url '${deps}' }#g" Arc/build.gradle
${optionalString enableServer buildServer}
pushd Mindustry
'' + optionalString enableClient ''
gradle --offline --no-daemon jnigenBuild -Pbuildversion=${buildVersion}
gradle --offline --no-daemon sdlnatives -Pdynamic -Pbuildversion=${buildVersion}
patchelf ../Arc/backends/backend-sdl/libs/linux64/libsdl-arc*.so \
--add-needed ${glew.out}/lib/libGLEW.so \
--add-needed ${SDL2}/lib/libSDL2.so
gradle --offline --no-daemon desktop:dist -Pbuildversion=${buildVersion}
'' + optionalString enableServer ''
gradle --offline --no-daemon server:dist -Pbuildversion=${buildVersion}
''; '';
installPhase = with stdenv.lib; '' installPhase = with stdenv.lib; optionalString enableClient ''
${optionalString enableClient installClient} install -Dm644 desktop/build/libs/Mindustry.jar $out/share/mindustry.jar
${optionalString enableServer installServer} mkdir -p $out/bin
makeWrapper ${jdk}/bin/java $out/bin/mindustry \
--add-flags "-jar $out/share/mindustry.jar"
install -Dm644 core/assets/icons/icon_64.png $out/share/icons/hicolor/64x64/apps/mindustry.png
'' + optionalString enableServer ''
install -Dm644 server/build/libs/server-release.jar $out/share/mindustry-server.jar
mkdir -p $out/bin
makeWrapper ${jdk}/bin/java $out/bin/mindustry-server \
--add-flags "-jar $out/share/mindustry-server.jar"
''; '';
meta = with lib; { meta = with lib; {
@ -133,14 +198,10 @@ stdenv.mkDerivation rec {
downloadPage = "https://github.com/Anuken/Mindustry/releases"; downloadPage = "https://github.com/Anuken/Mindustry/releases";
description = "A sandbox tower defense game"; description = "A sandbox tower defense game";
license = licenses.gpl3Plus; license = licenses.gpl3Plus;
maintainers = with maintainers; [ fgaz ]; maintainers = with maintainers; [ fgaz petabyteboy ];
platforms = platforms.all; platforms = platforms.x86_64;
# Hash mismatch on darwin: # Hash mismatch on darwin:
# https://github.com/NixOS/nixpkgs/pull/105590#issuecomment-737120293 # https://github.com/NixOS/nixpkgs/pull/105590#issuecomment-737120293
# Problems with native libraries in aarch64: broken = stdenv.isDarwin;
# https://github.com/NixOS/nixpkgs/pull/107646
# https://logs.nix.ci/?key=nixos/nixpkgs.107646&attempt_id=3032c060-72e9-4a76-8186-4739544397dd
broken = stdenv.isDarwin ||
stdenv.isAarch64;
}; };
} }

View File

@ -1,30 +1,45 @@
From 3c6b7c0922370e9d0c1705706e7c47dcd234e6c8 Mon Sep 17 00:00:00 2001 From 3d0ce353cf62efea11aa88f814aa23bf8c04acc9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Milan=20P=C3=A4ssler?= <milan@petabyte.dev> From: =?UTF-8?q?Milan=20P=C3=A4ssler?= <milan@petabyte.dev>
Date: Wed, 30 Dec 2020 11:49:16 +0100 Date: Mon, 11 Jan 2021 15:13:10 +0100
Subject: [PATCH] configs/rpi: allow for bigger kernels Subject: [PATCH] configs/rpi: allow for bigger kernels
--- ---
include/configs/rpi.h | 8 ++++---- include/configs/rpi.h | 16 ++++++++--------
1 file changed, 4 insertions(+), 4 deletions(-) 1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/include/configs/rpi.h b/include/configs/rpi.h diff --git a/include/configs/rpi.h b/include/configs/rpi.h
index 834f1cd2..b63ee96f 100644 index 834f1cd..10ab1e7 100644
--- a/include/configs/rpi.h --- a/include/configs/rpi.h
+++ b/include/configs/rpi.h +++ b/include/configs/rpi.h
@@ -163,10 +163,10 @@ @@ -153,20 +153,20 @@
"fdt_high=" FDT_HIGH "\0" \ * more than ~700M away from the start of the kernel image but this number can
"initrd_high=" INITRD_HIGH "\0" \ * be larger OR smaller depending on e.g. the 'vmalloc=xxxM' command line
"kernel_addr_r=0x00080000\0" \ * parameter given to the kernel. So reserving memory from low to high
- "scriptaddr=0x02400000\0" \ - * satisfies this constraint again. Reserving 1M at 0x02600000-0x02700000 for
- "pxefile_addr_r=0x02500000\0" \ - * the DTB leaves rest of the free RAM to the initrd starting at 0x02700000.
- "fdt_addr_r=0x02600000\0" \ + * satisfies this constraint again. Reserving 1M at 0x02e00000-0x02f00000 for
- "ramdisk_addr_r=0x02700000\0" + * the DTB leaves rest of the free RAM to the initrd starting at 0x02f00000.
+ "scriptaddr=0x02c00000\0" \ * Even with the smallest possible CPU-GPU memory split of the CPU getting
+ "pxefile_addr_r=0x02d00000\0" \ - * only 64M, the remaining 25M starting at 0x02700000 should allow quite
+ "fdt_addr_r=0x02e00000\0" \ - * large initrds before they start colliding with U-Boot.
+ "ramdisk_addr_r=0x02f00000\0" + * only 64M, the remaining 17M starting at 0x02f00000 should allow reasonably
+ * sized initrds before they start colliding with U-Boot.
*/
#define ENV_MEM_LAYOUT_SETTINGS \
"fdt_high=" FDT_HIGH "\0" \
"initrd_high=" INITRD_HIGH "\0" \
"kernel_addr_r=0x00080000\0" \
- "scriptaddr=0x02400000\0" \
- "pxefile_addr_r=0x02500000\0" \
- "fdt_addr_r=0x02600000\0" \
- "ramdisk_addr_r=0x02700000\0"
+ "scriptaddr=0x02c00000\0" \
+ "pxefile_addr_r=0x02d00000\0" \
+ "fdt_addr_r=0x02e00000\0" \
+ "ramdisk_addr_r=0x02f00000\0"
#if CONFIG_IS_ENABLED(CMD_MMC) #if CONFIG_IS_ENABLED(CMD_MMC)
#define BOOT_TARGET_MMC(func) \ #define BOOT_TARGET_MMC(func) \
-- --
2.29.2 2.29.2

View File

@ -424,7 +424,7 @@ let
MODULE_SIG = no; # r13y, generates a random key during build and bakes it in MODULE_SIG = no; # r13y, generates a random key during build and bakes it in
# Depends on MODULE_SIG and only really helps when you sign your modules # Depends on MODULE_SIG and only really helps when you sign your modules
# and enforce signatures which we don't do by default. # and enforce signatures which we don't do by default.
SECURITY_LOCKDOWN_LSM = no; SECURITY_LOCKDOWN_LSM = option no;
} // optionalAttrs (!stdenv.hostPlatform.isAarch32) { } // optionalAttrs (!stdenv.hostPlatform.isAarch32) {
# Detect buffer overflows on the stack # Detect buffer overflows on the stack

View File

@ -3,7 +3,7 @@
with stdenv.lib; with stdenv.lib;
buildLinux (args // rec { buildLinux (args // rec {
version = "5.11-rc2"; version = "5.11-rc3";
extraMeta.branch = "5.11"; extraMeta.branch = "5.11";
# modDirVersion needs to be x.y.z, will always add .0 # modDirVersion needs to be x.y.z, will always add .0
@ -11,7 +11,7 @@ buildLinux (args // rec {
src = fetchurl { src = fetchurl {
url = "https://git.kernel.org/torvalds/t/linux-${version}.tar.gz"; url = "https://git.kernel.org/torvalds/t/linux-${version}.tar.gz";
sha256 = "092jgmzqfpylwbwhv7j8hy5y0ai14b6wm7p7vw6pxj7alixyynq0"; sha256 = "15dfgvicp7s9xqaa3w8lmfffzyjsqrq1fa2gs1a8awzs5rxgsn61";
}; };
# Should the testing kernels ever be built on Hydra? # Should the testing kernels ever be built on Hydra?

View File

@ -105,14 +105,4 @@
name = "mac_nvme_t2"; name = "mac_nvme_t2";
patch = ./mac-nvme-t2.patch; patch = ./mac-nvme-t2.patch;
}; };
# https://lkml.org/lkml/2020/12/18/461
wireless_syntax_error = rec {
name = "wireless-syntax_error";
patch = fetchpatch {
name = name + ".patch";
url = "https://lkml.org/lkml/diff/2020/12/18/461/1";
sha256 = "11rnw9z7311crsx37sk68b71q51cni70lzf40ildqjnnn71m3q58";
};
};
} }

View File

@ -2,7 +2,7 @@
# Do not edit! # Do not edit!
{ {
version = "2021.1.0"; version = "2021.1.1";
components = { components = {
"abode" = ps: with ps; [ abodepy ]; "abode" = ps: with ps; [ abodepy ];
"accuweather" = ps: with ps; [ accuweather ]; "accuweather" = ps: with ps; [ accuweather ];
@ -11,7 +11,7 @@
"actiontec" = ps: with ps; [ ]; "actiontec" = ps: with ps; [ ];
"adguard" = ps: with ps; [ adguardhome ]; "adguard" = ps: with ps; [ adguardhome ];
"ads" = ps: with ps; [ pyads ]; "ads" = ps: with ps; [ pyads ];
"advantage_air" = ps: with ps; [ advantage_air ]; "advantage_air" = ps: with ps; [ advantage-air ];
"aftership" = ps: with ps; [ pyaftership ]; "aftership" = ps: with ps; [ pyaftership ];
"agent_dvr" = ps: with ps; [ ]; # missing inputs: agent-py "agent_dvr" = ps: with ps; [ ]; # missing inputs: agent-py
"air_quality" = ps: with ps; [ ]; "air_quality" = ps: with ps; [ ];
@ -827,7 +827,7 @@
"telnet" = ps: with ps; [ ]; "telnet" = ps: with ps; [ ];
"temper" = ps: with ps; [ ]; # missing inputs: temperusb "temper" = ps: with ps; [ ]; # missing inputs: temperusb
"template" = ps: with ps; [ ]; "template" = ps: with ps; [ ];
"tensorflow" = ps: with ps; [ numpy pillow tensorflow-build_2 ]; # missing inputs: pycocotools tf-models-official "tensorflow" = ps: with ps; [ numpy pillow tensorflow ]; # missing inputs: pycocotools tf-models-official
"tesla" = ps: with ps; [ teslajsonpy ]; "tesla" = ps: with ps; [ teslajsonpy ];
"tfiac" = ps: with ps; [ ]; # missing inputs: pytfiac "tfiac" = ps: with ps; [ ]; # missing inputs: pytfiac
"thermoworks_smoke" = ps: with ps; [ stringcase ]; # missing inputs: thermoworks_smoke "thermoworks_smoke" = ps: with ps; [ stringcase ]; # missing inputs: thermoworks_smoke

View File

@ -62,7 +62,7 @@ let
extraBuildInputs = extraPackages py.pkgs; extraBuildInputs = extraPackages py.pkgs;
# Don't forget to run parse-requirements.py after updating # Don't forget to run parse-requirements.py after updating
hassVersion = "2021.1.0"; hassVersion = "2021.1.1";
in with py.pkgs; buildPythonApplication rec { in with py.pkgs; buildPythonApplication rec {
pname = "homeassistant"; pname = "homeassistant";
@ -81,7 +81,7 @@ in with py.pkgs; buildPythonApplication rec {
owner = "home-assistant"; owner = "home-assistant";
repo = "core"; repo = "core";
rev = version; rev = version;
sha256 = "14njb2j16h536xq5df4zpna874fxjcd6fqr881y6mq081f00i0r0"; sha256 = "1linjv1hryqsh8y1rql1i95b4lz4h8siw847gm78m1z8niacz7ss";
}; };
# leave this in, so users don't have to constantly update their downstream patch handling # leave this in, so users don't have to constantly update their downstream patch handling

View File

@ -1,8 +1,8 @@
{ lib, python3Packages, fetchFromGitHub, fetchpatch }: { lib, nixosTests, python3, python3Packages, fetchFromGitHub, fetchpatch }:
with python3Packages; with python3Packages;
buildPythonApplication rec { toPythonModule (buildPythonApplication rec {
pname = "searx"; pname = "searx";
version = "0.17.0"; version = "0.17.0";
@ -34,10 +34,18 @@ buildPythonApplication rec {
rm tests/test_robot.py # A variable that is imported is commented out rm tests/test_robot.py # A variable that is imported is commented out
''; '';
postInstall = ''
# Create a symlink for easier access to static data
mkdir -p $out/share
ln -s ../${python3.sitePackages}/searx/static $out/share/
'';
passthru.tests = { inherit (nixosTests) searx; };
meta = with lib; { meta = with lib; {
homepage = "https://github.com/asciimoo/searx"; homepage = "https://github.com/asciimoo/searx";
description = "A privacy-respecting, hackable metasearch engine"; description = "A privacy-respecting, hackable metasearch engine";
license = licenses.agpl3Plus; license = licenses.agpl3Plus;
maintainers = with maintainers; [ matejc fpletz globin danielfullmer ]; maintainers = with maintainers; [ matejc fpletz globin danielfullmer ];
}; };
} })

View File

@ -21,18 +21,18 @@ let
sources = name: system: { sources = name: system: {
x86_64-darwin = { x86_64-darwin = {
url = "${baseUrl}/${name}-darwin-x86_64.tar.gz"; url = "${baseUrl}/${name}-darwin-x86_64.tar.gz";
sha256 = "1miqvh2b3mxrrr63q8f5i944mp3rz6685ckmnk5fml2wyc273jiv"; sha256 = "1rfaa4b34mijlqxi9savzjplk1z83rs1z8iyx89zbw21fyry08kb";
}; };
x86_64-linux = { x86_64-linux = {
url = "${baseUrl}/${name}-linux-x86_64.tar.gz"; url = "${baseUrl}/${name}-linux-x86_64.tar.gz";
sha256 = "15kfsxn6j37rsw97ixj7ixkzcby0pkgc5xj7cpqdq975ym58sgv7"; sha256 = "15izl7bvv7m96nyrs93b60fivr7gi2i306ryq6ynxrbq0nq43iya";
}; };
}.${system}; }.${system};
in stdenv.mkDerivation rec { in stdenv.mkDerivation rec {
pname = "google-cloud-sdk"; pname = "google-cloud-sdk";
version = "321.0.0"; version = "322.0.0";
src = fetchurl (sources "${pname}-${version}" stdenv.hostPlatform.system); src = fetchurl (sources "${pname}-${version}" stdenv.hostPlatform.system);

View File

@ -20,6 +20,8 @@ pythonPackages.buildPythonApplication rec {
homepage = "https://nagstamon.ifw-dresden.de/"; homepage = "https://nagstamon.ifw-dresden.de/";
license = licenses.gpl2; license = licenses.gpl2;
maintainers = with maintainers; [ pSub ]; maintainers = with maintainers; [ pSub ];
inherit version; # fails to install with:
# TypeError: cannot unpack non-iterable bool object
broken = true;
}; };
} }

View File

@ -13361,6 +13361,10 @@ in
glew110 = callPackage ../development/libraries/glew/1.10.nix { glew110 = callPackage ../development/libraries/glew/1.10.nix {
inherit (darwin.apple_sdk.frameworks) AGL OpenGL; inherit (darwin.apple_sdk.frameworks) AGL OpenGL;
}; };
glew-egl = glew.overrideAttrs (oldAttrs: {
pname = "glew-egl";
makeFlags = oldAttrs.makeFlags ++ [ "SYSTEM=linux-egl" ];
});
glfw = glfw3; glfw = glfw3;
glfw2 = callPackage ../development/libraries/glfw/2.x.nix { }; glfw2 = callPackage ../development/libraries/glfw/2.x.nix { };
@ -18584,7 +18588,6 @@ in
kernelPatches = [ kernelPatches = [
kernelPatches.bridge_stp_helper kernelPatches.bridge_stp_helper
kernelPatches.request_key_helper kernelPatches.request_key_helper
kernelPatches.wireless_syntax_error
]; ];
}; };
@ -24038,6 +24041,10 @@ in
recode = callPackage ../tools/text/recode { }; recode = callPackage ../tools/text/recode { };
reddsaver = callPackage ../applications/misc/reddsaver {
inherit (darwin.apple_sdk.frameworks) Security;
};
rednotebook = python3Packages.callPackage ../applications/editors/rednotebook { }; rednotebook = python3Packages.callPackage ../applications/editors/rednotebook { };
remotebox = callPackage ../applications/virtualization/remotebox { }; remotebox = callPackage ../applications/virtualization/remotebox { };
@ -26156,6 +26163,7 @@ in
megaglest = callPackage ../games/megaglest {}; megaglest = callPackage ../games/megaglest {};
mindustry = callPackage ../games/mindustry { }; mindustry = callPackage ../games/mindustry { };
mindustry-wayland = callPackage ../games/mindustry { glew = glew-egl; };
mindustry-server = callPackage ../games/mindustry { mindustry-server = callPackage ../games/mindustry {
enableClient = false; enableClient = false;

View File

@ -2771,6 +2771,8 @@ in {
guzzle_sphinx_theme = callPackage ../development/python-modules/guzzle_sphinx_theme { }; guzzle_sphinx_theme = callPackage ../development/python-modules/guzzle_sphinx_theme { };
gviz-api = callPackage ../development/python-modules/gviz-api {};
gwyddion = disabledIf isPy3k (toPythonModule (pkgs.gwyddion.override { gwyddion = disabledIf isPy3k (toPythonModule (pkgs.gwyddion.override {
pythonSupport = true; pythonSupport = true;
pythonPackages = self; pythonPackages = self;
@ -4088,6 +4090,8 @@ in {
mt-940 = callPackage ../development/python-modules/mt-940 { }; mt-940 = callPackage ../development/python-modules/mt-940 { };
mulpyplexer = callPackage ../development/python-modules/mulpyplexer { };
multidict = callPackage ../development/python-modules/multidict { }; multidict = callPackage ../development/python-modules/multidict { };
multi_key_dict = callPackage ../development/python-modules/multi_key_dict { }; multi_key_dict = callPackage ../development/python-modules/multi_key_dict { };
@ -5446,6 +5450,8 @@ in {
pyls-mypy = callPackage ../development/python-modules/pyls-mypy { }; pyls-mypy = callPackage ../development/python-modules/pyls-mypy { };
pyls-spyder = callPackage ../development/python-modules/pyls-spyder { };
PyLTI = callPackage ../development/python-modules/pylti { }; PyLTI = callPackage ../development/python-modules/pylti { };
pymacaroons = callPackage ../development/python-modules/pymacaroons { }; pymacaroons = callPackage ../development/python-modules/pymacaroons { };
@ -7255,6 +7261,8 @@ in {
statsmodels = callPackage ../development/python-modules/statsmodels { }; statsmodels = callPackage ../development/python-modules/statsmodels { };
stdiomask = callPackage ../development/python-modules/stdiomask { };
stem = callPackage ../development/python-modules/stem { }; stem = callPackage ../development/python-modules/stem { };
stevedore = callPackage ../development/python-modules/stevedore { }; stevedore = callPackage ../development/python-modules/stevedore { };
@ -7293,6 +7301,8 @@ in {
stytra = callPackage ../development/python-modules/stytra { }; stytra = callPackage ../development/python-modules/stytra { };
subarulink = callPackage ../development/python-modules/subarulink { };
subdownloader = callPackage ../development/python-modules/subdownloader { }; subdownloader = callPackage ../development/python-modules/subdownloader { };
subliminal = callPackage ../development/python-modules/subliminal { }; subliminal = callPackage ../development/python-modules/subliminal { };
@ -7389,6 +7399,10 @@ in {
tenacity = callPackage ../development/python-modules/tenacity { }; tenacity = callPackage ../development/python-modules/tenacity { };
tensorboard-plugin-profile = callPackage ../development/python-modules/tensorboard-plugin-profile { };
tensorboard-plugin-wit = callPackage ../development/python-modules/tensorboard-plugin-wit {};
tensorboardx = callPackage ../development/python-modules/tensorboardx { }; tensorboardx = callPackage ../development/python-modules/tensorboardx { };
tensorflow-bin_2 = callPackage ../development/python-modules/tensorflow/bin.nix { tensorflow-bin_2 = callPackage ../development/python-modules/tensorflow/bin.nix {
@ -7405,9 +7419,10 @@ in {
cudatoolkit = pkgs.cudatoolkit_11_0; cudatoolkit = pkgs.cudatoolkit_11_0;
cudnn = pkgs.cudnn_cudatoolkit_11_0; cudnn = pkgs.cudnn_cudatoolkit_11_0;
nccl = pkgs.nccl_cudatoolkit_11; nccl = pkgs.nccl_cudatoolkit_11;
openssl = pkgs.openssl_1_1;
inherit (pkgs.darwin.apple_sdk.frameworks) Foundation Security; inherit (pkgs.darwin.apple_sdk.frameworks) Foundation Security;
inherit (pkgs) flatbuffers; flatbuffers-core = pkgs.flatbuffers;
flatbuffers-python = self.flatbuffers;
lmdb-core = pkgs.lmdb;
}; };
tensorflow-build = self.tensorflow-build_2; tensorflow-build = self.tensorflow-build_2;
@ -7495,6 +7510,8 @@ in {
threadpoolctl = callPackage ../development/python-modules/threadpoolctl { }; threadpoolctl = callPackage ../development/python-modules/threadpoolctl { };
three-merge = callPackage ../development/python-modules/three-merge { };
thrift = callPackage ../development/python-modules/thrift { }; thrift = callPackage ../development/python-modules/thrift { };
thumbor = callPackage ../development/python-modules/thumbor { }; thumbor = callPackage ../development/python-modules/thumbor { };

View File

@ -50,15 +50,6 @@ self: super: let
# ++ optional (super.stdenv.hostPlatform.libc == "glibc") ((flip overrideInStdenv) [ self.stdenv.glibc.static ]) # ++ optional (super.stdenv.hostPlatform.libc == "glibc") ((flip overrideInStdenv) [ self.stdenv.glibc.static ])
; ;
# Force everything to link statically.
haskellStaticAdapter = self: super: {
mkDerivation = attrs: super.mkDerivation (attrs // {
enableSharedLibraries = false;
enableSharedExecutables = false;
enableStaticLibraries = true;
});
};
removeUnknownConfigureFlags = f: with self.lib; removeUnknownConfigureFlags = f: with self.lib;
remove "--disable-shared" remove "--disable-shared"
(remove "--enable-static" f); (remove "--enable-static" f);
@ -102,12 +93,6 @@ in {
clangStdenv = foldl (flip id) super.clangStdenv staticAdapters; clangStdenv = foldl (flip id) super.clangStdenv staticAdapters;
libcxxStdenv = foldl (flip id) super.libcxxStdenv staticAdapters; libcxxStdenv = foldl (flip id) super.libcxxStdenv staticAdapters;
haskell = super.haskell // {
packageOverrides = composeExtensions
(super.haskell.packageOverrides or (_: _: {}))
haskellStaticAdapter;
};
zlib = super.zlib.override { zlib = super.zlib.override {
# Dont use new stdenv zlib because # Dont use new stdenv zlib because
# it doesnt like the --disable-shared flag # it doesnt like the --disable-shared flag