Merge staging-next into staging

This commit is contained in:
github-actions[bot] 2024-07-28 12:01:38 +00:00 committed by GitHub
commit 38b57cc2a9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
72 changed files with 1644 additions and 345 deletions

View File

@ -116,6 +116,55 @@ It has two modes:
: The `lychee` package to use.
## `shellcheck` {#tester-shellcheck}
Runs files through `shellcheck`, a static analysis tool for shell scripts.
:::{.example #ex-shellcheck}
# Run `testers.shellcheck`
A single script
```nix
testers.shellcheck {
name = "shellcheck";
src = ./script.sh;
}
```
Multiple files
```nix
let
inherit (lib) fileset;
in
testers.shellcheck {
name = "shellcheck";
src = fileset.toSource {
root = ./.;
fileset = fileset.unions [
./lib.sh
./nixbsd-activate
];
};
}
```
:::
### Inputs {#tester-shellcheck-inputs}
[`src` (path or string)]{#tester-shellcheck-param-src}
: The path to the shell script(s) to check.
This can be a single file or a directory containing shell files.
All files in `src` will be checked, so you may want to provide `fileset`-based source instead of a whole directory.
### Return value {#tester-shellcheck-return}
A derivation that runs `shellcheck` on the given script(s).
The build will fail if `shellcheck` finds any issues.
## `testVersion` {#tester-testVersion}
Checks that the output from running a command contains the specified version string in it as a whole word.

View File

@ -20260,6 +20260,13 @@
githubId = 71843723;
keys = [ { fingerprint = "EEFB CC3A C529 CFD1 943D A75C BDD5 7BE9 9D55 5965"; } ];
};
thepuzzlemaker = {
name = "ThePuzzlemaker";
email = "tpzker@thepuzzlemaker.info";
github = "ThePuzzlemaker";
githubId = 12666617;
keys = [ { fingerprint = "7095 C20A 9224 3DB6 5177 07B0 968C D9D7 1C9F BB6C"; } ];
};
therealansh = {
email = "tyagiansh23@gmail.com";
github = "therealansh";
@ -21578,6 +21585,12 @@
githubId = 70410;
name = "Rahul Gopinath";
};
vsharathchandra = {
email = "chandrasharath.v@gmail.com";
github = "vsharathchandra";
githubId = 12689380;
name = "sharath chandra";
};
vskilet = {
email = "victor@sene.ovh";
github = "Vskilet";

View File

@ -12,6 +12,7 @@ let
mkDefault
mkIf
mkOption
stringAfter
types
;
@ -97,5 +98,8 @@ in
systemd.tmpfiles.rules = lib.mkIf cfg.channel.enable [
''f /root/.nix-channels - - - - ${config.system.defaultChannel} nixos\n''
];
system.activationScripts.no-nix-channel = mkIf (!cfg.channel.enable)
(stringAfter [ "etc" "users" ] (builtins.readFile ./nix-channel/activation-check.sh));
};
}

View File

@ -0,0 +1,21 @@
# shellcheck shell=bash
explainChannelWarning=0
if [[ -e "/root/.nix-defexpr/channels" ]]; then
warn '/root/.nix-defexpr/channels exists, but channels have been disabled.'
explainChannelWarning=1
fi
if [[ -e "/nix/var/nix/profiles/per-user/root/channels" ]]; then
warn "/nix/var/nix/profiles/per-user/root/channels exists, but channels have been disabled."
explainChannelWarning=1
fi
while IFS=: read -r _ _ _ _ _ home _ ; do
if [[ -n "$home" && -e "$home/.nix-defexpr/channels" ]]; then
warn "$home/.nix-defexpr/channels exists, but channels have been disabled." 1>&2
explainChannelWarning=1
fi
done < <(getent passwd)
if [[ $explainChannelWarning -eq 1 ]]; then
echo "Due to https://github.com/NixOS/nix/issues/9574, Nix may still use these channels when NIX_PATH is unset." 1>&2
echo "Delete the above directory or directories to prevent this." 1>&2
fi

View File

@ -0,0 +1,19 @@
# Run:
# nix-build -A nixosTests.nix-channel
{ lib, testers }:
let
inherit (lib) fileset;
runShellcheck = testers.shellcheck {
src = fileset.toSource {
root = ./.;
fileset = fileset.unions [
./activation-check.sh
];
};
};
in
lib.recurseIntoAttrs {
inherit runShellcheck;
}

View File

@ -415,6 +415,7 @@
./services/blockchain/ethereum/geth.nix
./services/blockchain/ethereum/lighthouse.nix
./services/cluster/corosync/default.nix
./services/cluster/druid/default.nix
./services/cluster/hadoop/default.nix
./services/cluster/k3s/default.nix
./services/cluster/kubernetes/addon-manager.nix

View File

@ -3,9 +3,18 @@
config,
pkgs,
...
}: let
}:
let
cfg = config.programs.direnv;
in {
enabledOption =
x:
lib.mkEnableOption x
// {
default = true;
example = false;
};
in
{
options.programs.direnv = {
enable = lib.mkEnableOption ''
@ -14,7 +23,17 @@ in {
integration. Note that you need to logout and login for this change to apply
'';
package = lib.mkPackageOption pkgs "direnv" {};
package = lib.mkPackageOption pkgs "direnv" { };
enableBashIntegration = enabledOption ''
Bash integration
'';
enableZshIntegration = enabledOption ''
Zsh integration
'';
enableFishIntegration = enabledOption ''
Fish integration
'';
direnvrcExtra = lib.mkOption {
type = lib.types.lines;
@ -32,22 +51,14 @@ in {
the hiding of direnv logging
'';
loadInNixShell =
lib.mkEnableOption ''
loading direnv in `nix-shell` `nix shell` or `nix develop`
''
// {
default = true;
};
loadInNixShell = enabledOption ''
loading direnv in `nix-shell` `nix shell` or `nix develop`
'';
nix-direnv = {
enable =
(lib.mkEnableOption ''
a faster, persistent implementation of use_nix and use_flake, to replace the built-in one
'')
// {
default = true;
};
enable = enabledOption ''
a faster, persistent implementation of use_nix and use_flake, to replace the builtin one
'';
package = lib.mkOption {
default = pkgs.nix-direnv.override { nix = config.nix.package; };
@ -60,14 +71,10 @@ in {
};
};
imports = [
(lib.mkRemovedOptionModule ["programs" "direnv" "persistDerivations"] "persistDerivations was removed as it is no longer necessary")
];
config = lib.mkIf cfg.enable {
programs = {
zsh.interactiveShellInit = ''
zsh.interactiveShellInit = lib.mkIf cfg.enableZshIntegration ''
if ${lib.boolToString cfg.loadInNixShell} || printenv PATH | grep -vqc '/nix/store'; then
eval "$(${lib.getExe cfg.package} hook zsh)"
fi
@ -75,13 +82,13 @@ in {
#$NIX_GCROOT for "nix develop" https://github.com/NixOS/nix/blob/6db66ebfc55769edd0c6bc70fcbd76246d4d26e0/src/nix/develop.cc#L530
#$IN_NIX_SHELL for "nix-shell"
bash.interactiveShellInit = ''
bash.interactiveShellInit = lib.mkIf cfg.enableBashIntegration ''
if ${lib.boolToString cfg.loadInNixShell} || [ -z "$IN_NIX_SHELL$NIX_GCROOT$(printenv PATH | grep '/nix/store')" ] ; then
eval "$(${lib.getExe cfg.package} hook bash)"
fi
'';
fish.interactiveShellInit = ''
fish.interactiveShellInit = lib.mkIf cfg.enableFishIntegration ''
if ${lib.boolToString cfg.loadInNixShell};
or printenv PATH | grep -vqc '/nix/store';
${lib.getExe cfg.package} hook fish | source
@ -90,18 +97,17 @@ in {
};
environment = {
systemPackages =
if cfg.loadInNixShell then [cfg.package]
else [
#direnv has a fish library which sources direnv for some reason
(cfg.package.overrideAttrs (old: {
installPhase =
(old.installPhase or "")
+ ''
rm -rf $out/share/fish
'';
}))
];
systemPackages = [
# direnv has a fish library which automatically sources direnv for some reason
# I don't see any harm in doing this if we're sourcing it with fish.interactiveShellInit
(pkgs.symlinkJoin {
inherit (cfg.package) name;
paths = [ cfg.package ];
postBuild = ''
rm -rf $out/share/fish
'';
})
];
variables = {
DIRENV_CONFIG = "/etc/direnv";
@ -141,4 +147,5 @@ in {
};
};
};
meta.maintainers = with lib.maintainers; [ gerg-l ];
}

View File

@ -16,9 +16,8 @@ in
{
options.services.speechd = {
# FIXME: figure out how to deprecate this EXTREMELY CAREFULLY
enable = mkEnableOption "speech-dispatcher speech synthesizer daemon" // {
default = true;
};
# default guessed conservatively in ../misc/graphical-desktop.nix
enable = mkEnableOption "speech-dispatcher speech synthesizer daemon";
package = mkPackageOption pkgs "speechd" { };
};

View File

@ -0,0 +1,296 @@
{
config,
lib,
pkgs,
...
}:
let
cfg = config.services.druid;
inherit (lib)
concatStrings
concatStringsSep
mapAttrsToList
concatMap
attrByPath
mkIf
mkMerge
mkEnableOption
mkOption
types
mkPackageOption
;
druidServiceOption = serviceName: {
enable = mkEnableOption serviceName;
restartIfChanged = mkOption {
type = types.bool;
description = ''
Automatically restart the service on config change.
This can be set to false to defer restarts on clusters running critical applications.
Please consider the security implications of inadvertently running an older version,
and the possibility of unexpected behavior caused by inconsistent versions across a cluster when disabling this option.
'';
default = false;
};
config = mkOption {
default = { };
type = types.attrsOf types.anything;
description = ''
(key=value) Configuration to be written to runtime.properties of the druid ${serviceName}
<https://druid.apache.org/docs/latest/configuration/index.html>
'';
example = {
"druid.plainTextPort" = "8082";
"druid.service" = "servicename";
};
};
jdk = mkPackageOption pkgs "JDK" { default = [ "jdk17_headless" ]; };
jvmArgs = mkOption {
type = types.str;
default = "";
description = "Arguments to pass to the JVM";
};
openFirewall = mkOption {
type = types.bool;
default = false;
description = "Open firewall ports for ${serviceName}.";
};
internalConfig = mkOption {
default = { };
type = types.attrsOf types.anything;
internal = true;
description = "Internal Option to add to runtime.properties for ${serviceName}.";
};
};
druidServiceConfig =
{
name,
serviceOptions ? cfg."${name}",
allowedTCPPorts ? [ ],
tmpDirs ? [ ],
extraConfig ? { },
}:
(mkIf serviceOptions.enable (mkMerge [
{
systemd = {
services."druid-${name}" = {
after = [ "network.target" ];
description = "Druid ${name}";
wantedBy = [ "multi-user.target" ];
inherit (serviceOptions) restartIfChanged;
path = [
cfg.package
serviceOptions.jdk
];
script =
let
cfgFile =
fileName: properties:
pkgs.writeTextDir fileName (
concatStringsSep "\n" (mapAttrsToList (n: v: "${n}=${toString v}") properties)
);
commonConfigFile = cfgFile "common.runtime.properties" cfg.commonConfig;
configFile = cfgFile "runtime.properties" (serviceOptions.config // serviceOptions.internalConfig);
extraClassPath = concatStrings (map (path: ":" + path) cfg.extraClassPaths);
extraConfDir = concatStrings (map (dir: ":" + dir + "/*") cfg.extraConfDirs);
in
''
run-java -Dlog4j.configurationFile=file:${cfg.log4j} \
-Ddruid.extensions.directory=${cfg.package}/extensions \
-Ddruid.extensions.hadoopDependenciesDir=${cfg.package}/hadoop-dependencies \
-classpath ${commonConfigFile}:${configFile}:${cfg.package}/lib/\*${extraClassPath}${extraConfDir} \
${serviceOptions.jvmArgs} \
org.apache.druid.cli.Main server ${name}
'';
serviceConfig = {
User = "druid";
SyslogIdentifier = "druid-${name}";
Restart = "always";
};
};
tmpfiles.rules = concatMap (x: [ "d ${x} 0755 druid druid" ]) (cfg.commonTmpDirs ++ tmpDirs);
};
networking.firewall.allowedTCPPorts = mkIf (attrByPath [
"openFirewall"
] false serviceOptions) allowedTCPPorts;
users = {
users.druid = {
description = "Druid user";
group = "druid";
isNormalUser = true;
};
groups.druid = { };
};
}
extraConfig
]));
in
{
options.services.druid = {
package = mkPackageOption pkgs "apache-druid" { default = [ "druid" ]; };
commonConfig = mkOption {
default = { };
type = types.attrsOf types.anything;
description = "(key=value) Configuration to be written to common.runtime.properties";
example = {
"druid.zk.service.host" = "localhost:2181";
"druid.metadata.storage.type" = "mysql";
"druid.metadata.storage.connector.connectURI" = "jdbc:mysql://localhost:3306/druid";
"druid.extensions.loadList" = ''[ "mysql-metadata-storage" ]'';
};
};
commonTmpDirs = mkOption {
default = [ "/var/log/druid/requests" ];
type = types.listOf types.str;
description = "Common List of directories used by druid processes";
};
log4j = mkOption {
type = types.path;
description = "Log4j Configuration for the druid process";
};
extraClassPaths = mkOption {
default = [ ];
type = types.listOf types.str;
description = "Extra classpath to include in the jvm";
};
extraConfDirs = mkOption {
default = [ ];
type = types.listOf types.path;
description = "Extra Conf Dirs to include in the jvm";
};
overlord = druidServiceOption "Druid Overlord";
coordinator = druidServiceOption "Druid Coordinator";
broker = druidServiceOption "Druid Broker";
historical = (druidServiceOption "Druid Historical") // {
segmentLocations = mkOption {
default = null;
description = "Locations where the historical will store its data.";
type =
with types;
nullOr (
listOf (submodule {
options = {
path = mkOption {
type = path;
description = "the path to store the segments";
};
maxSize = mkOption {
type = str;
description = "Max size the druid historical can occupy";
};
freeSpacePercent = mkOption {
type = float;
default = 1.0;
description = "Druid Historical will fail to write if it exceeds this value";
};
};
})
);
};
};
middleManager = druidServiceOption "Druid middleManager";
router = druidServiceOption "Druid Router";
};
config = mkMerge [
(druidServiceConfig rec {
name = "overlord";
allowedTCPPorts = [ (attrByPath [ "druid.plaintextPort" ] 8090 cfg."${name}".config) ];
})
(druidServiceConfig rec {
name = "coordinator";
allowedTCPPorts = [ (attrByPath [ "druid.plaintextPort" ] 8081 cfg."${name}".config) ];
})
(druidServiceConfig rec {
name = "broker";
tmpDirs = [ (attrByPath [ "druid.lookup.snapshotWorkingDir" ] "" cfg."${name}".config) ];
allowedTCPPorts = [ (attrByPath [ "druid.plaintextPort" ] 8082 cfg."${name}".config) ];
})
(druidServiceConfig rec {
name = "historical";
tmpDirs = [
(attrByPath [ "druid.lookup.snapshotWorkingDir" ] "" cfg."${name}".config)
] ++ (map (x: x.path) cfg."${name}".segmentLocations);
allowedTCPPorts = [ (attrByPath [ "druid.plaintextPort" ] 8083 cfg."${name}".config) ];
extraConfig.services.druid.historical.internalConfig."druid.segmentCache.locations" = builtins.toJSON cfg.historical.segmentLocations;
})
(druidServiceConfig rec {
name = "middleManager";
tmpDirs = [
"/var/log/druid/indexer"
] ++ [ (attrByPath [ "druid.indexer.task.baseTaskDir" ] "" cfg."${name}".config) ];
allowedTCPPorts = [ (attrByPath [ "druid.plaintextPort" ] 8091 cfg."${name}".config) ];
extraConfig = {
services.druid.middleManager.internalConfig = {
"druid.indexer.runner.javaCommand" = "${cfg.middleManager.jdk}/bin/java";
"druid.indexer.runner.javaOpts" =
(attrByPath [ "druid.indexer.runner.javaOpts" ] "" cfg.middleManager.config)
+ " -Dlog4j.configurationFile=file:${cfg.log4j}";
};
networking.firewall.allowedTCPPortRanges = mkIf cfg.middleManager.openFirewall [
{
from = attrByPath [ "druid.indexer.runner.startPort" ] 8100 cfg.middleManager.config;
to = attrByPath [ "druid.indexer.runner.endPort" ] 65535 cfg.middleManager.config;
}
];
};
})
(druidServiceConfig rec {
name = "router";
allowedTCPPorts = [ (attrByPath [ "druid.plaintextPort" ] 8888 cfg."${name}".config) ];
})
];
}

View File

@ -42,6 +42,8 @@ in
programs.gnupg.agent.pinentryPackage = lib.mkOverride 1100 pkgs.pinentry-gnome3;
services.speechd.enable = lib.mkDefault true;
systemd.defaultUnit = lib.mkIf (xcfg.autorun || dmcfg.enable) "graphical.target";
xdg = {

View File

@ -33,6 +33,8 @@ let
''
#!${pkgs.runtimeShell}
source ${./lib/lib.sh}
systemConfig='@out@'
export PATH=/empty

View File

@ -0,0 +1,5 @@
# shellcheck shell=bash
warn() {
printf "\033[1;35mwarning:\033[0m %s\n" "$*" >&2
}

View File

@ -0,0 +1,36 @@
# Run:
# nix-build -A nixosTests.activation-lib
{ lib, stdenv, testers }:
let
inherit (lib) fileset;
runTests = stdenv.mkDerivation {
name = "tests-activation-lib";
src = fileset.toSource {
root = ./.;
fileset = fileset.unions [
./lib.sh
./test.sh
];
};
buildPhase = ":";
doCheck = true;
postUnpack = ''
patchShebangs --build .
'';
checkPhase = ''
./test.sh
'';
installPhase = ''
touch $out
'';
};
runShellcheck = testers.shellcheck {
src = runTests.src;
};
in
lib.recurseIntoAttrs {
inherit runTests runShellcheck;
}

View File

@ -0,0 +1,34 @@
#!/usr/bin/env bash
# Run:
# ./test.sh
# or:
# nix-build -A nixosTests.activation-lib
cd "$(dirname "${BASH_SOURCE[0]}")"
set -euo pipefail
# report failure
onerr() {
set +e
# find failed statement
echo "call trace:"
local i=0
while t="$(caller $i)"; do
line="${t%% *}"
file="${t##* }"
echo " $file:$line" >&2
((i++))
done
# red
printf "\033[1;31mtest failed\033[0m\n" >&2
exit 1
}
trap onerr ERR
source ./lib.sh
(warn hi, this works >/dev/null) 2>&1 | grep -E $'.*warning:.* hi, this works' >/dev/null
# green
printf "\033[1;32mok\033[0m\n"

View File

@ -69,7 +69,7 @@ in
type = types.bool;
default = false;
description = ''
**Deprecated**, please use virtualisation.containers.cdi.dynamic.nvidia.enable instead.
**Deprecated**, please use hardware.nvidia-container-toolkit.enable instead.
Enable nvidia-docker wrapper, supporting NVIDIA GPUs inside docker containers.
'';
@ -186,7 +186,7 @@ in
# wrappers.
warnings = lib.optionals (cfg.enableNvidia && (lib.strings.versionAtLeast cfg.package.version "25")) [
''
You have set virtualisation.docker.enableNvidia. This option is deprecated, please set virtualisation.containers.cdi.dynamic.nvidia.enable instead.
You have set virtualisation.docker.enableNvidia. This option is deprecated, please set hardware.nvidia-container-toolkit.enable instead.
''
];

View File

@ -82,7 +82,7 @@ in
type = types.bool;
default = false;
description = ''
**Deprecated**, please use virtualisation.containers.cdi.dynamic.nvidia.enable instead.
**Deprecated**, please use hardware.nvidia-container-toolkit.enable instead.
Enable use of NVidia GPUs from within podman containers.
'';

View File

@ -275,6 +275,7 @@ in {
dovecot = handleTest ./dovecot.nix {};
drawterm = discoverTests (import ./drawterm.nix);
drbd = handleTest ./drbd.nix {};
druid = handleTestOn [ "x86_64-linux" ] ./druid {};
dublin-traceroute = handleTest ./dublin-traceroute.nix {};
earlyoom = handleTestOn ["x86_64-linux"] ./earlyoom.nix {};
early-mount-options = handleTest ./early-mount-options.nix {};
@ -300,6 +301,7 @@ in {
esphome = handleTest ./esphome.nix {};
etc = pkgs.callPackage ../modules/system/etc/test.nix { inherit evalMinimalConfig; };
activation = pkgs.callPackage ../modules/system/activation/test.nix { };
activation-lib = pkgs.callPackage ../modules/system/activation/lib/test.nix { };
activation-var = runTest ./activation/var.nix;
activation-nix-channel = runTest ./activation/nix-channel.nix;
activation-etc-overlay-mutable = runTest ./activation/etc-overlay-mutable.nix;
@ -623,6 +625,7 @@ in {
nbd = handleTest ./nbd.nix {};
ncdns = handleTest ./ncdns.nix {};
ndppd = handleTest ./ndppd.nix {};
nix-channel = pkgs.callPackage ../modules/config/nix-channel/test.nix { };
nebula = handleTest ./nebula.nix {};
netbird = handleTest ./netbird.nix {};
nimdow = handleTest ./nimdow.nix {};

View File

@ -0,0 +1,289 @@
{ pkgs, ... }:
let
inherit (pkgs) lib;
commonConfig = {
"druid.zk.service.host" = "zk1:2181";
"druid.extensions.loadList" = ''[ "druid-histogram", "druid-datasketches", "mysql-metadata-storage", "druid-avro-extensions", "druid-parquet-extensions", "druid-lookups-cached-global", "druid-hdfs-storage","druid-kafka-indexing-service","druid-basic-security","druid-kinesis-indexing-service"]'';
"druid.startup.logging.logProperties" = "true";
"druid.metadata.storage.connector.connectURI" = "jdbc:mysql://mysql:3306/druid";
"druid.metadata.storage.connector.user" = "druid";
"druid.metadata.storage.connector.password" = "druid";
"druid.request.logging.type" = "file";
"druid.request.logging.dir" = "/var/log/druid/requests";
"druid.javascript.enabled" = "true";
"druid.sql.enable" = "true";
"druid.metadata.storage.type" = "mysql";
"druid.storage.type" = "hdfs";
"druid.storage.storageDirectory" = "/druid-deepstore";
};
log4jConfig = ''
<?xml version="1.0" encoding="UTF-8" ?>
<Configuration status="WARN">
<Appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="%d{ISO8601} %p [%t] %c - %m%n"/>
</Console>
</Appenders>
<Loggers>
<Root level="error">
<AppenderRef ref="Console"/>
</Root>
</Loggers>
</Configuration>
'';
log4j = pkgs.writeText "log4j2.xml" log4jConfig;
coreSite = {
"fs.defaultFS" = "hdfs://namenode:8020";
};
tests = {
default = testsForPackage {
druidPackage = pkgs.druid;
hadoopPackage = pkgs.hadoop_3_2;
};
};
testsForPackage =
args:
lib.recurseIntoAttrs {
druidCluster = testDruidCluster args;
passthru.override = args': testsForPackage (args // args');
};
testDruidCluster =
{ druidPackage, hadoopPackage, ... }:
pkgs.testers.nixosTest {
name = "druid-hdfs";
nodes = {
zk1 =
{ ... }:
{
services.zookeeper.enable = true;
networking.firewall.allowedTCPPorts = [ 2181 ];
};
namenode =
{ ... }:
{
services.hadoop = {
package = hadoopPackage;
hdfs = {
namenode = {
enable = true;
openFirewall = true;
formatOnInit = true;
};
};
inherit coreSite;
};
};
datanode =
{ ... }:
{
services.hadoop = {
package = hadoopPackage;
hdfs.datanode = {
enable = true;
openFirewall = true;
};
inherit coreSite;
};
};
mm =
{ ... }:
{
virtualisation.memorySize = 1024;
services.druid = {
inherit commonConfig log4j;
package = druidPackage;
extraClassPaths = [ "/etc/hadoop-conf" ];
middleManager = {
config = {
"druid.indexer.task.baseTaskDir" = "/tmp/druid/persistent/task";
"druid.worker.capacity" = 1;
"druid.indexer.logs.type" = "file";
"druid.indexer.logs.directory" = "/var/log/druid/indexer";
"druid.indexer.runner.startPort" = 8100;
"druid.indexer.runner.endPort" = 8101;
};
enable = true;
openFirewall = true;
};
};
services.hadoop = {
gatewayRole.enable = true;
package = hadoopPackage;
inherit coreSite;
};
};
overlord =
{ ... }:
{
services.druid = {
inherit commonConfig log4j;
package = druidPackage;
extraClassPaths = [ "/etc/hadoop-conf" ];
overlord = {
config = {
"druid.indexer.runner.type" = "remote";
"druid.indexer.storage.type" = "metadata";
};
enable = true;
openFirewall = true;
};
};
services.hadoop = {
gatewayRole.enable = true;
package = hadoopPackage;
inherit coreSite;
};
};
broker =
{ ... }:
{
services.druid = {
package = druidPackage;
inherit commonConfig log4j;
extraClassPaths = [ "/etc/hadoop-conf" ];
broker = {
config = {
"druid.plaintextPort" = 8082;
"druid.broker.http.numConnections" = "2";
"druid.server.http.numThreads" = "2";
"druid.processing.buffer.sizeBytes" = "100";
"druid.processing.numThreads" = "1";
"druid.processing.numMergeBuffers" = "1";
"druid.broker.cache.unCacheable" = ''["groupBy"]'';
"druid.lookup.snapshotWorkingDir" = "/opt/broker/lookups";
};
enable = true;
openFirewall = true;
};
};
services.hadoop = {
gatewayRole.enable = true;
package = hadoopPackage;
inherit coreSite;
};
};
historical =
{ ... }:
{
services.druid = {
package = druidPackage;
inherit commonConfig log4j;
extraClassPaths = [ "/etc/hadoop-conf" ];
historical = {
config = {
"maxSize" = 200000000;
"druid.lookup.snapshotWorkingDir" = "/opt/historical/lookups";
};
segmentLocations = [
{
"path" = "/tmp/1";
"maxSize" = "100000000";
}
{
"path" = "/tmp/2";
"maxSize" = "100000000";
}
];
enable = true;
openFirewall = true;
};
};
services.hadoop = {
gatewayRole.enable = true;
package = hadoopPackage;
inherit coreSite;
};
};
coordinator =
{ ... }:
{
services.druid = {
package = druidPackage;
inherit commonConfig log4j;
extraClassPaths = [ "/etc/hadoop-conf" ];
coordinator = {
config = {
"druid.plaintextPort" = 9091;
"druid.service" = "coordinator";
"druid.coordinator.startDelay" = "PT10S";
"druid.coordinator.period" = "PT10S";
"druid.manager.config.pollDuration" = "PT10S";
"druid.manager.segments.pollDuration" = "PT10S";
"druid.manager.rules.pollDuration" = "PT10S";
};
enable = true;
openFirewall = true;
};
};
services.hadoop = {
gatewayRole.enable = true;
package = hadoopPackage;
inherit coreSite;
};
};
mysql =
{ ... }:
{
services.mysql = {
enable = true;
package = pkgs.mariadb;
initialDatabases = [ { name = "druid"; } ];
initialScript = pkgs.writeText "mysql-init.sql" ''
CREATE USER 'druid'@'%' IDENTIFIED BY 'druid';
GRANT ALL PRIVILEGES ON druid.* TO 'druid'@'%';
'';
};
networking.firewall.allowedTCPPorts = [ 3306 ];
};
};
testScript = ''
start_all()
namenode.wait_for_unit("hdfs-namenode")
namenode.wait_for_unit("network.target")
namenode.wait_for_open_port(8020)
namenode.succeed("ss -tulpne | systemd-cat")
namenode.succeed("cat /etc/hadoop*/hdfs-site.xml | systemd-cat")
namenode.wait_for_open_port(9870)
datanode.wait_for_unit("hdfs-datanode")
datanode.wait_for_unit("network.target")
mm.succeed("mkdir -p /quickstart/")
mm.succeed("cp -r ${pkgs.druid}/quickstart/* /quickstart/")
mm.succeed("touch /quickstart/tutorial/wikiticker-2015-09-12-sampled.json")
mm.succeed("zcat /quickstart/tutorial/wikiticker-2015-09-12-sampled.json.gz | head -n 10 > /quickstart/tutorial/wikiticker-2015-09-12-sampled.json || true")
mm.succeed("rm /quickstart/tutorial/wikiticker-2015-09-12-sampled.json.gz && gzip /quickstart/tutorial/wikiticker-2015-09-12-sampled.json")
namenode.succeed("sudo -u hdfs hdfs dfs -mkdir /druid-deepstore")
namenode.succeed("HADOOP_USER_NAME=druid sudo -u hdfs hdfs dfs -chown druid:hadoop /druid-deepstore")
### Druid tests
coordinator.wait_for_unit("druid-coordinator")
overlord.wait_for_unit("druid-overlord")
historical.wait_for_unit("druid-historical")
mm.wait_for_unit("druid-middleManager")
coordinator.wait_for_open_port(9091)
overlord.wait_for_open_port(8090)
historical.wait_for_open_port(8083)
mm.wait_for_open_port(8091)
broker.wait_for_unit("network.target")
broker.wait_for_open_port(8082)
broker.succeed("curl -X 'POST' -H 'Content-Type:application/json' -d @${pkgs.druid}/quickstart/tutorial/wikipedia-index.json http://coordinator:9091/druid/indexer/v1/task")
broker.wait_until_succeeds("curl http://coordinator:9091/druid/coordinator/v1/metadata/datasources | grep 'wikipedia'")
broker.wait_until_succeeds("curl http://localhost:8082/druid/v2/datasources/ | grep wikipedia")
broker.succeed("curl -X 'POST' -H 'Content-Type:application/json' -d @${pkgs.druid}/quickstart/tutorial/wikipedia-top-pages.json http://localhost:8082/druid/v2/")
'';
};
in
tests

View File

@ -463,7 +463,32 @@ let
""")
with subtest("Switch to flake based config"):
target.succeed("nixos-rebuild switch --flake /root/my-config#xyz")
target.succeed("nixos-rebuild switch --flake /root/my-config#xyz 2>&1 | tee activation-log >&2")
target.succeed("""
cat -n activation-log >&2
""")
target.succeed("""
grep -F '/root/.nix-defexpr/channels exists, but channels have been disabled.' activation-log
""")
target.succeed("""
grep -F '/nix/var/nix/profiles/per-user/root/channels exists, but channels have been disabled.' activation-log
""")
target.succeed("""
grep -F '/root/.nix-defexpr/channels exists, but channels have been disabled.' activation-log
""")
target.succeed("""
grep -F 'Due to https://github.com/NixOS/nix/issues/9574, Nix may still use these channels when NIX_PATH is unset.' activation-log
""")
target.succeed("rm activation-log")
# Perform the suggested cleanups we've just seen in the log
# TODO after https://github.com/NixOS/nix/issues/9574: don't remove them yet
target.succeed("""
rm -rf /root/.nix-defexpr/channels /nix/var/nix/profiles/per-user/root/channels /root/.nix-defexpr/channels
""")
target.shutdown()
@ -474,10 +499,20 @@ let
# Note that the channel profile is still present on disk, but configured
# not to be used.
with subtest("builtins.nixPath is now empty"):
target.succeed("""
[[ "[ ]" == "$(nix-instantiate builtins.nixPath --eval --expr)" ]]
""")
# TODO after issue https://github.com/NixOS/nix/issues/9574: re-enable this assertion
# I believe what happens is
# - because of the issue, we've removed the `nix-path =` line from nix.conf
# - the "backdoor" shell is not a proper session and does not have `NIX_PATH=""` set
# - seeing no nix path settings at all, Nix loads its hardcoded default value,
# which is unfortunately non-empty
# Or maybe it's the new default NIX_PATH?? :(
# with subtest("builtins.nixPath is now empty"):
# target.succeed("""
# (
# set -x;
# [[ "[ ]" == "$(nix-instantiate builtins.nixPath --eval --expr)" ]];
# )
# """)
with subtest("<nixpkgs> does not resolve"):
target.succeed("""
@ -491,12 +526,16 @@ let
target.succeed("""
(
exec 1>&2
rm -v /root/.nix-channels
rm -vf /root/.nix-channels
rm -vrf ~/.nix-defexpr
rm -vrf /nix/var/nix/profiles/per-user/root/channels*
)
""")
target.succeed("nixos-rebuild switch --flake /root/my-config#xyz")
target.succeed("nixos-rebuild switch --flake /root/my-config#xyz | tee activation-log >&2")
target.succeed("cat -n activation-log >&2")
target.succeed("! grep -F '/root/.nix-defexpr/channels' activation-log")
target.succeed("! grep -F 'but channels have been disabled' activation-log")
target.succeed("! grep -F 'https://github.com/NixOS/nix/issues/9574' activation-log")
target.shutdown()
'';

View File

@ -15,7 +15,9 @@ in
cask = callPackage ./manual-packages/cask { };
codeium = callPackage ./manual-packages/codeium { };
codeium = callPackage ./manual-packages/codeium {
inherit (pkgs) codeium;
};
consult-gh = callPackage ./manual-packages/consult-gh { };

View File

@ -1,9 +1,10 @@
{
lib,
codeium,
fetchFromGitHub,
melpaBuild,
pkgs,
lib,
substituteAll,
gitUpdater,
}:
melpaBuild {
@ -19,17 +20,19 @@ melpaBuild {
patches = [
(substituteAll {
src = ./codeium.el.patch;
codeium = "${pkgs.codeium}/bin/codeium_language_server";
src = ./0000-set-codeium-command-executable.patch;
codeium = lib.getExe' codeium "codeium_language_server";
})
];
passthru.updateScript = gitUpdater { };
meta = {
description = "Free, ultrafast Copilot alternative for Emacs";
homepage = "https://github.com/Exafunction/codeium.el";
license = lib.licenses.mit;
maintainers = [ lib.maintainers.running-grass ];
platforms = pkgs.codeium.meta.platforms;
inherit (codeium.meta) platforms;
sourceProvenance = [ lib.sourceTypes.fromSource ];
};

View File

@ -1437,12 +1437,12 @@
sniprun =
let
version = "1.3.14";
version = "1.3.15";
src = fetchFromGitHub {
owner = "michaelb";
repo = "sniprun";
rev = "refs/tags/v${version}";
hash = "sha256-9vglmQ9sy0aCbj4H81ublHclpoSfOA7ss5CNdoX54sY=";
hash = "sha256-8N+KUawQ6RI6sG8m9wpvJTMQyJ5j/43PRkrTPrWAREQ=";
};
sniprun-bin = rustPlatform.buildRustPackage {
pname = "sniprun-bin";
@ -1452,7 +1452,7 @@
darwin.apple_sdk.frameworks.Security
];
cargoHash = "sha256-p4rZBgB3xQC14hRRTjNZT1G1gbaKydlKu6MYNSLk6iA=";
cargoHash = "sha256-bLki+6uMKJtk/bu+LNf2E1m/HpEG8zmnM3JI89IjmNs=";
nativeBuildInputs = [ makeWrapper ];

View File

@ -2,7 +2,7 @@
let
pname = "joplin-desktop";
version = "3.0.12";
version = "3.0.13";
inherit (stdenv.hostPlatform) system;
throwSystem = throw "Unsupported system: ${system}";
@ -16,7 +16,7 @@ let
src = fetchurl {
url = "https://github.com/laurent22/joplin/releases/download/v${version}/Joplin-${version}${suffix}";
sha256 = {
x86_64-linux = "sha256-vMz+ZeBHP+9Ugy8KO8lbp8zqC8VHtf1TWw10YytQFSs=";
x86_64-linux = "sha256-/B7udtkRP8rOYzXupWSEGg0FrJoRJ63l4uLtQWe2CZ8=";
x86_64-darwin = "sha256-XZN1jTv/FhJXuFxZ6D6h/vFMdKi84Z9UWfj2CrMgBBA=";
aarch64-darwin = "sha256-lsODOBkZ4+x5D6Er2/paTzAMKZvqIBVkKrWHh5iRvrk=";
}.${system} or throwSystem;

View File

@ -140,4 +140,6 @@
hasPkgConfigModules = callPackage ./hasPkgConfigModules/tester.nix { };
testMetaPkgConfig = callPackage ./testMetaPkgConfig/tester.nix { };
shellcheck = callPackage ./shellcheck/tester.nix { };
}

View File

@ -0,0 +1,3 @@
#!/usr/bin/env bash
echo $@

View File

@ -0,0 +1,28 @@
# Dependencies (callPackage)
{ lib, stdenv, shellcheck }:
# testers.shellcheck function
# Docs: doc/build-helpers/testers.chapter.md
# Tests: ./tests.nix
{ src }:
let
inherit (lib) fileset pathType isPath;
in
stdenv.mkDerivation {
name = "run-shellcheck";
src =
if isPath src && pathType src == "regular" # note that for strings this would have been IFD, which we prefer to avoid
then fileset.toSource { root = dirOf src; fileset = src; }
else src;
nativeBuildInputs = [ shellcheck ];
doCheck = true;
dontConfigure = true;
dontBuild = true;
checkPhase = ''
find . -type f -print0 \
| xargs -0 shellcheck
'';
installPhase = ''
touch $out
'';
}

View File

@ -0,0 +1,38 @@
# Run:
# nix-build -A tests.testers.shellcheck
{ lib, testers, runCommand }:
let
inherit (lib) fileset;
in
lib.recurseIntoAttrs {
example-dir = runCommand "test-testers-shellcheck-example-dir" {
failure = testers.testBuildFailure
(testers.shellcheck {
src = fileset.toSource {
root = ./.;
fileset = fileset.unions [
./example.sh
];
};
});
} ''
log="$failure/testBuildFailure.log"
echo "Checking $log"
grep SC2068 "$log"
touch $out
'';
example-file = runCommand "test-testers-shellcheck-example-file" {
failure = testers.testBuildFailure
(testers.shellcheck {
src = ./example.sh;
});
} ''
log="$failure/testBuildFailure.log"
echo "Checking $log"
grep SC2068 "$log"
touch $out
'';
}

View File

@ -16,6 +16,8 @@ lib.recurseIntoAttrs {
hasPkgConfigModules = pkgs.callPackage ../hasPkgConfigModules/tests.nix { };
shellcheck = pkgs.callPackage ../shellcheck/tests.nix { };
runNixOSTest-example = pkgs-with-overlay.testers.runNixOSTest ({ lib, ... }: {
name = "runNixOSTest-test";
nodes.machine = { pkgs, ... }: {

View File

@ -0,0 +1,82 @@
{
lib,
stdenv,
fetchurl,
extensions ? { },
libJars ? [ ],
nixosTests,
mysqlSupport ? true,
}:
let
inherit (lib)
concatStringsSep
licenses
maintainers
mapAttrsToList
optionalString
forEach
;
in
stdenv.mkDerivation (finalAttrs: {
pname = "apache-druid";
version = "30.0.0";
src = fetchurl {
url = "mirror://apache/druid/${finalAttrs.version}/apache-druid-${finalAttrs.version}-bin.tar.gz";
hash = "sha256-mRYorVkNzM94LP53G78eW20N5UsvMP7Lv4rAysmPwXw=";
};
mysqlConnector = fetchurl {
url = "https://repo1.maven.org/maven2/mysql/mysql-connector-java/5.1.48/mysql-connector-java-5.1.48.jar";
hash = "sha256-VuJsqqOCH1rkr0T5x09mz4uE6gFRatOAPLsOkEm27Kg=";
};
dontBuild = true;
loadExtensions = (
concatStringsSep "\n" (
mapAttrsToList (
dir: files:
''
if ! test -d $out/extensions/${dir}; then
mkdir $out/extensions/${dir};
fi
''
+ concatStringsSep "\n" (
forEach files (file: ''
if test -d ${file} ; then
cp ${file}/* $out/extensions/${dir}/
else
cp ${file} $out/extensions/${dir}/
fi
'')
)
) extensions
)
);
loadJars = concatStringsSep "\n" (forEach libJars (jar: "cp ${jar} $out/lib/"));
installPhase = ''
runHook preInstall
mkdir $out
mv * $out
${optionalString mysqlSupport "cp ${finalAttrs.mysqlConnector} $out/extensions/mysql-metadata-storage"}
${finalAttrs.loadExtensions}
${finalAttrs.loadJars}
runHook postInstall
'';
passthru = {
tests = nixosTests.druid.default.passthru.override { druidPackage = finalAttrs.finalPackage; };
};
meta = {
description = "Apache Druid: a high performance real-time analytics database";
homepage = "https://github.com/apache/druid";
license = licenses.asl20;
maintainers = with maintainers; [ vsharathchandra ];
mainProgram = "druid";
};
})

View File

@ -10,13 +10,13 @@
stdenv.mkDerivation rec {
pname = "marwaita-red";
version = "20.2-unstable-2024-07-01";
version = "20.3.1";
src = fetchFromGitHub {
owner = "darkomarko42";
repo = pname;
rev = "79c65e37774395f7fa51ed1416874aa78f768d54";
hash = "sha256-GmVen97oJel4KVm+IwV8GTemIyHnQ4XjvGclUjdGDvw=";
rev = version;
hash = "sha256-cMZDd/WQFrfr6Zrq1/1It26OmML3cf7+ZU/I8IMjuX4=";
};
buildInputs = [

View File

@ -8,6 +8,8 @@
, ninja
, wrapGAppsHook4
, libadwaita
, xdotool
, wl-clipboard
}:
python3.pkgs.buildPythonApplication rec {
@ -44,7 +46,13 @@ python3.pkgs.buildPythonApplication rec {
dontWrapGApps = true;
preFixup = ''
makeWrapperArgs+=("''${gappsWrapperArgs[@]}")
makeWrapperArgs+=(
"''${gappsWrapperArgs[@]}"
--prefix PATH : ${lib.makeBinPath [
xdotool
wl-clipboard
]}
)
'';
meta = {

View File

@ -6,13 +6,13 @@
}:
stdenvNoCC.mkDerivation (finalAttrs: {
pname = "stevenblack-blocklist";
version = "3.14.88";
version = "3.14.90";
src = fetchFromGitHub {
owner = "StevenBlack";
repo = "hosts";
rev = "refs/tags/${finalAttrs.version}";
hash = "sha256-tS7CDuotk+aAbtOR1x3u3ymyRvPgb3GZjuDcJNm6lZs=";
hash = "sha256-0/niQ0qWzGesqWIe/NZ2SD0Pdvk3GRsY1mT24eFMpt8=";
};
outputs = [

View File

@ -8,13 +8,13 @@
stdenvNoCC.mkDerivation (finalAttrs: {
pname = "ubuntu-sans-mono";
version = "1.004";
version = "1.006";
src = fetchFromGitHub {
owner = "canonical";
repo = "Ubuntu-Sans-Mono-fonts";
rev = "v${finalAttrs.version}";
hash = "sha256-IjfjFsXRYK2l6i4Q/LoYuwu5t18TmVXXJQDSsW45qNc=";
hash = "sha256-EFZZnMZTQHo2Tr9/rtb7C5gAlQ/0uYT+MQ1gKkqQ5hE=";
};
installPhase = ''

View File

@ -0,0 +1,25 @@
From 78e78dc10e2a4fd84ef0041d346d8f6d162f22c6 Mon Sep 17 00:00:00 2001
From: ThePuzzlemaker <tpzker@thepuzzlemaker.info>
Date: Wed, 24 Jul 2024 21:18:04 -0500
Subject: [PATCH] Fix build on JDK 21+
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 5a3254b..42b1405 100644
--- a/pom.xml
+++ b/pom.xml
@@ -59,7 +59,7 @@
<maven.compiler.target>${java.version}</maven.compiler.target>
<java.version>15</java.version>
- <lombok.version>1.18.22</lombok.version>
+ <lombok.version>1.18.30</lombok.version>
<javafx.version>17.0.1</javafx.version>
<slf4j.version>1.7.32</slf4j.version>
<mvvmfx.version>1.9.0-SNAPSHOT</mvvmfx.version>
--
2.44.1

View File

@ -0,0 +1,102 @@
{
lib,
stdenv,
jdk,
maven,
makeWrapper,
fetchFromGitHub,
libGL,
libxkbcommon,
wayland,
fontconfig,
libX11,
libXcursor,
libXi,
libXrandr,
libXxf86vm,
libXtst,
copyDesktopItems,
makeDesktopItem,
}:
let
libPath = lib.makeLibraryPath [
libGL
libxkbcommon
wayland
libX11
libXcursor
libXi
libXrandr
libXxf86vm
libXtst
fontconfig
];
in
maven.buildMavenPackage rec {
pname = "vatprism";
version = "0.3.5";
src = fetchFromGitHub {
owner = "marvk";
repo = "vatprism";
rev = "refs/tags/v${version}";
hash = "sha256-ofEwHUCm79roHe2bawmKFw2QHhIonnlkFG5nhE6uN+g=";
};
nativeBuildInputs = [
makeWrapper
copyDesktopItems
];
# https://github.com/marvk/vatprism/pull/141
patches = [ ./0001-Fix-build-on-JDK-21.patch ];
desktopItems = [
(makeDesktopItem {
name = "vatprism";
desktopName = "VATprism";
exec = "vatprism";
terminal = false;
icon = "vatprism";
})
];
mvnHash =
if (stdenv.isLinux && stdenv.isAarch64) then
"sha256-x0nFt2C7dZqMdllI1+Io9SPBY2J/dVgBTVb9T24vFFI="
else
"sha256-9uyNCUqnMgpiwm2kz544pWNB/SkRpASm2Dln0e4yZos=";
installPhase = ''
runHook preInstall
# create the bin directory
mkdir -p $out/bin $out/share/icons/hicolor/256x256/apps
# copy out the JAR
# Maven already setup the classpath to use m2 repository layout
# with the prefix of lib/
cp target-fat-jar/vatsim-map-${version}-fat.jar $out/
cp src/main/resources/net/marvk/fs/vatsim/map/icon-256.png $out/share/icons/hicolor/256x256/apps/vatprism.png
# create a wrapper that will automatically set the classpath
# this should be the paths from the dependency derivation
makeWrapper ${jdk}/bin/java $out/bin/${pname} \
--add-flags "-jar $out/vatsim-map-${version}-fat.jar" \
--set JAVA_HOME ${jdk.home} \
--suffix LD_LIBRARY_PATH : ${libPath}
runHook postInstall
'';
meta = {
description = "VATSIM map and data explorer";
longDescription = ''
VATprism is a VATSIM Map and VATSIM Data Explorer, VATSIM being the
Virtual Air Traffic Simulation Network. VATprism allows users to explore
available ATC services, connected pilots, Airports, Flight and Upper
Information Regions and more!
'';
homepage = "https://vatprism.org/";
mainProgram = "vatprism";
license = lib.licenses.agpl3Plus;
platforms = lib.platforms.linux;
maintainers = with lib.maintainers; [ thepuzzlemaker ];
};
}

View File

@ -25,6 +25,8 @@ rustPlatform.buildRustPackage rec {
cargoHash = "sha256-qnbinuTuaPiD7ib3aCJzSwuA4s3naFzi+txqX7jkHIo=";
env.YAZI_GEN_COMPLETIONS = true;
env.VERGEN_GIT_SHA = "Nixpkgs";
env.VERGEN_BUILD_DATE = "2024-04-23";
# TODO: remove in the next release
cargoBuildFlags = [

View File

@ -2,11 +2,11 @@
stdenvNoCC.mkDerivation rec {
pname = "phinger-cursors";
version = "2.0";
version = "2.1";
src = fetchurl {
url = "https://github.com/phisch/phinger-cursors/releases/download/v${version}/phinger-cursors-variants.tar.bz2";
sha256 = "sha256-A12BGtc0+wDqeSGN4lbUe5G3Pv4IsQB4TkvWHnDU6bE=";
sha256 = "sha256-3bcxDGK/jg4nmKJPioZ+Svexejl1e6RcheE/OYj2Rvw=";
};
sourceRoot = ".";

View File

@ -6,13 +6,13 @@
stdenvNoCC.mkDerivation (self: {
pname = "alacritty-theme";
version = "0-unstable-2024-07-16";
version = "0-unstable-2024-07-25";
src = fetchFromGitHub {
owner = "alacritty";
repo = "alacritty-theme";
rev = "cafca9764653f0bd536073a0f882227f04ddc256";
hash = "sha256-2MZOZ1nVu9lSBAuvCgebtWjX5uoEqMqd8jcEjOfTTMM=";
rev = "bcc5ec1bdecb4a799a6bc8ad3a5b206b3058d6df";
hash = "sha256-IRAUY/59InKYLRfMYI78wSKC6+KI/7aOtOhQNUqdjOA=";
};
dontConfigure = true;

View File

@ -1,23 +1,24 @@
{ llvmPackages
, lib
, fetchFromGitHub
, cmake
, python3
, curl
, libxml2
, libffi
, xar
, testers
{
llvmPackages,
lib,
fetchFromGitHub,
cmake,
python3,
curl,
libxml2,
libffi,
xar,
testers,
}:
llvmPackages.stdenv.mkDerivation (finalAttrs: {
pname = "c3c";
version = "0.5.5";
version = "0.6.1";
src = fetchFromGitHub {
owner = "c3lang";
repo = "c3c";
rev = "refs/tags/${finalAttrs.version}";
rev = "refs/tags/v${finalAttrs.version}";
hash = "sha256-iOljE1BRVc92NJZj+nr1G6KkBTCwJEUOadXHUDNoPGk=";
};
@ -26,9 +27,7 @@ llvmPackages.stdenv.mkDerivation (finalAttrs: {
--replace-fail "\''${LLVM_LIBRARY_DIRS}" "${llvmPackages.lld.lib}/lib ${llvmPackages.llvm.lib}/lib"
'';
nativeBuildInputs = [
cmake
];
nativeBuildInputs = [ cmake ];
buildInputs = [
llvmPackages.llvm
@ -36,9 +35,7 @@ llvmPackages.stdenv.mkDerivation (finalAttrs: {
curl
libxml2
libffi
] ++ lib.optionals llvmPackages.stdenv.isDarwin [
xar
];
] ++ lib.optionals llvmPackages.stdenv.isDarwin [ xar ];
nativeCheckInputs = [ python3 ];
@ -52,16 +49,17 @@ llvmPackages.stdenv.mkDerivation (finalAttrs: {
'';
passthru.tests = {
version = testers.testVersion {
package = finalAttrs.finalPackage;
};
version = testers.testVersion { package = finalAttrs.finalPackage; };
};
meta = with lib; {
description = "Compiler for the C3 language";
homepage = "https://github.com/c3lang/c3c";
license = licenses.lgpl3Only;
maintainers = with maintainers; [ luc65r ];
maintainers = with maintainers; [
luc65r
anas
];
platforms = platforms.all;
mainProgram = "c3c";
};

View File

@ -4,6 +4,7 @@
, ncurses6, gmp, libiconv, numactl
, llvmPackages
, coreutils
, rcodesign
, targetPackages
# minimal = true; will remove files that aren't strictly necessary for
@ -190,7 +191,15 @@ stdenv.mkDerivation rec {
# https://gitlab.haskell.org/ghc/ghc/-/issues/20059
# and update this comment accordingly.
nativeBuildInputs = [ perl ];
nativeBuildInputs = [ perl ]
# Upstream binaries may not be linker-signed, which invalidates their signatures
# because `install_name_tool` will only replace a signature if it is both
# an ad hoc signature and the signature is flagged as linker-signed.
#
# rcodesign is used to replace the signature instead of sigtool because it
# supports setting the linker-signed flag, which will ensure future processing
# of the binaries does not invalidate their signatures.
++ lib.optionals (stdenv.isDarwin && stdenv.isAarch64) [ rcodesign ];
# Set LD_LIBRARY_PATH or equivalent so that the programs running as part
# of the bindist installer can find the libraries they expect.
@ -236,15 +245,20 @@ stdenv.mkDerivation rec {
])
# GHC has dtrace probes, which causes ld to try to open /usr/lib/libdtrace.dylib
# during linking
+ lib.optionalString stdenv.isDarwin ''
+ lib.optionalString stdenv.isDarwin (''
export NIX_LDFLAGS+=" -no_dtrace_dof"
# not enough room in the object files for the full path to libiconv :(
for exe in $(find . -type f -executable); do
isScript $exe && continue
ln -fs ${libiconv}/lib/libiconv.dylib $(dirname $exe)/libiconv.dylib
install_name_tool -change /usr/lib/libiconv.2.dylib @executable_path/libiconv.dylib -change /usr/local/lib/gcc/6/libgcc_s.1.dylib ${gcc.cc.lib}/lib/libgcc_s.1.dylib $exe
'' + lib.optionalString stdenv.isAarch64 ''
# Resign the binary and set the linker-signed flag. Ignore failures when the file is an object file.
# Object files dont have signatures, so ignoring the failures is harmless.
rcodesign sign --code-signature-flags linker-signed $exe || true
'' + ''
done
'' +
'') +
# Some scripts used during the build need to have their shebangs patched
''

View File

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "argparse";
version = "3.0";
version = "3.1";
src = fetchFromGitHub {
owner = "p-ranav";
repo = "argparse";
rev = "v${version}";
sha256 = "sha256-0fgMy7Q9BiQ/C1tmhuNpQgad8yzaLYxh5f6Ps38f2mk=";
sha256 = "sha256-JH9t/e/6RaSz+3LjmuNBYOy5gJT3zA+Vz4CxuwEXlvA=";
};
postPatch = ''

View File

@ -72,7 +72,7 @@ let
"address-model=${toString stdenv.hostPlatform.parsed.cpu.bits}"
"architecture=${if stdenv.hostPlatform.isMips64
then if lib.versionOlder version "1.78" then "mips1" else "mips"
else if stdenv.hostPlatform.parsed.cpu.name == "s390x" then "s390x"
else if stdenv.hostPlatform.isS390 then "s390x"
else toString stdenv.hostPlatform.parsed.cpu.family}"
# env in host triplet for Mach-O is "macho", but boost binary format for Mach-O is "mach-o"
"binary-format=${if stdenv.hostPlatform.isMacho then "mach-o"

View File

@ -11,6 +11,7 @@
, rustc
, cargo
, cargo-c
, lld
, nasm
, gstreamer
, gst-plugins-base
@ -199,10 +200,14 @@ stdenv.mkDerivation (finalAttrs: {
cargo
cargo-c'
nasm
] ++ lib.optionals stdenv.isDarwin [
lld
] ++ lib.optionals enableDocumentation [
hotdoc
];
env = lib.optionalAttrs stdenv.isDarwin { NIX_CFLAGS_LINK = "-fuse-ld=lld"; };
buildInputs = [
gstreamer
gst-plugins-base

View File

@ -84,7 +84,7 @@ stdenv.mkDerivation rec {
python3
sqlite
zlib
libxml2
(libxml2.override { enableHttp = true; })
postgresql
protozero
sparsehash

View File

@ -49,7 +49,7 @@ stdenv.mkDerivation (finalAttrs: {
laszip
libgeotiff
libtiff
libxml2
(libxml2.override { enableHttp = true; })
openscenegraph
postgresql
proj

View File

@ -1,4 +1,16 @@
{ lib, stdenv, fetchFromGitHub, cmake }:
{
lib,
stdenv,
fetchFromGitHub,
cmake,
# Options
# The submodules in the pico-sdk contain important additional functionality
# such as tinyusb, but not all these libraries might be bsd3.
# Off by default.
withSubmodules ? false,
}:
stdenv.mkDerivation (finalAttrs: {
pname = "pico-sdk";
@ -8,7 +20,11 @@ stdenv.mkDerivation (finalAttrs: {
owner = "raspberrypi";
repo = "pico-sdk";
rev = finalAttrs.version;
hash = "sha256-JNcxd86XNNiPkvipVFR3X255boMmq+YcuJXUP4JwInU=";
fetchSubmodules = withSubmodules;
hash = if (withSubmodules) then
"sha256-GY5jjJzaENL3ftuU5KpEZAmEZgyFRtLwGVg3W1e/4Ho="
else
"sha256-JNcxd86XNNiPkvipVFR3X255boMmq+YcuJXUP4JwInU=";
};
nativeBuildInputs = [ cmake ];

View File

@ -36,20 +36,19 @@ stdenv.mkDerivation (finalAttrs: {
ninja
];
buildInputs = lib.optionals finalAttrs.doCheck [
buildInputs = [
gbenchmark
gtest
];
propagatedBuildInputs = [
abseil-cpp
icu
];
propagatedBuildInputs = [ abseil-cpp ] ++ lib.optionals (!stdenv.hostPlatform.isStatic) [ icu ];
cmakeFlags = [
(lib.cmakeBool "RE2_BUILD_TESTING" finalAttrs.doCheck)
(lib.cmakeBool "RE2_USE_ICU" true)
] ++ lib.optional (!stdenv.hostPlatform.isStatic) (lib.cmakeBool "BUILD_SHARED_LIBS" true);
cmakeFlags =
[ (lib.cmakeBool "RE2_BUILD_TESTING" true) ]
++ lib.optionals (!stdenv.hostPlatform.isStatic) [
(lib.cmakeBool "RE2_USE_ICU" true)
(lib.cmakeBool "BUILD_SHARED_LIBS" true)
];
doCheck = true;

View File

@ -1,26 +1,28 @@
{ lib
, config
, fetchFromGitHub
, stdenv
, cmake
, cudaPackages ? { }
, cudaSupport ? config.cudaSupport
, pythonSupport ? true
, pythonPackages
, llvmPackages
, blas
, swig
, addDriverRunpath
, optLevel ? let
optLevels =
lib.optionals stdenv.hostPlatform.avx2Support [ "avx2" ]
++ lib.optionals stdenv.hostPlatform.sse4_1Support [ "sse4" ]
++ [ "generic" ];
in
# Choose the maximum available optimization level
builtins.head optLevels
, faiss # To run demos in the tests
, runCommand
{
lib,
config,
fetchFromGitHub,
stdenv,
cmake,
cudaPackages ? { },
cudaSupport ? config.cudaSupport,
pythonSupport ? true,
pythonPackages,
llvmPackages,
blas,
swig,
autoAddDriverRunpath,
optLevel ?
let
optLevels =
lib.optionals stdenv.hostPlatform.avx2Support [ "avx2" ]
++ lib.optionals stdenv.hostPlatform.sse4_1Support [ "sse4" ]
++ [ "generic" ];
in
# Choose the maximum available optimization level
builtins.head optLevels,
faiss, # To run demos in the tests
runCommand,
}@inputs:
let
@ -44,7 +46,11 @@ in
stdenv.mkDerivation {
inherit pname version;
outputs = [ "out" "demos" ];
outputs = [
"out"
"demos"
"dist"
];
src = fetchFromGitHub {
owner = "facebookresearch";
@ -53,50 +59,46 @@ stdenv.mkDerivation {
hash = "sha256-nS8nhkNGGb2oAJKfr/MIAZjAwMxBGbNd16/CkEtv67I=";
};
# Remove the following substituteInPlace when updating
# to a release that contains change from PR
# https://github.com/facebookresearch/faiss/issues/3239
# that fixes building faiss with swig 4.2.x
postPatch = ''
# Remove the following substituteInPlace when updating
# to a release that contains change from PR
# https://github.com/facebookresearch/faiss/issues/3239
# that fixes building faiss with swig 4.2.x
substituteInPlace faiss/python/swigfaiss.swig \
--replace-fail '#ifdef SWIGWORDSIZE64' '#if (__SIZEOF_LONG__ == 8)'
'';
buildInputs = [
blas
swig
] ++ lib.optionals pythonSupport [
pythonPackages.setuptools
pythonPackages.pip
pythonPackages.wheel
] ++ lib.optionals stdenv.cc.isClang [
llvmPackages.openmp
] ++ lib.optionals cudaSupport cudaComponents;
nativeBuildInputs =
[ cmake ]
++ lib.optionals cudaSupport [
cudaPackages.cuda_nvcc
autoAddDriverRunpath
]
++ lib.optionals pythonSupport [
pythonPackages.python
pythonPackages.setuptools
pythonPackages.pip
pythonPackages.wheel
];
propagatedBuildInputs = lib.optionals pythonSupport [
pythonPackages.numpy
pythonPackages.packaging
];
nativeBuildInputs = [ cmake ] ++ lib.optionals cudaSupport [
cudaPackages.cuda_nvcc
addDriverRunpath
] ++ lib.optionals pythonSupport [
pythonPackages.python
];
passthru.extra-requires.all = [
pythonPackages.numpy
];
cmakeFlags = [
"-DFAISS_ENABLE_GPU=${if cudaSupport then "ON" else "OFF"}"
"-DFAISS_ENABLE_PYTHON=${if pythonSupport then "ON" else "OFF"}"
"-DFAISS_OPT_LEVEL=${optLevel}"
] ++ lib.optionals cudaSupport [
"-DCMAKE_CUDA_ARCHITECTURES=${flags.cmakeCudaArchitecturesString}"
];
buildInputs =
[
blas
swig
]
++ lib.optionals pythonSupport [ pythonPackages.numpy ]
++ lib.optionals stdenv.cc.isClang [ llvmPackages.openmp ]
++ lib.optionals cudaSupport cudaComponents;
cmakeFlags =
[
(lib.cmakeBool "FAISS_ENABLE_GPU" cudaSupport)
(lib.cmakeBool "FAISS_ENABLE_PYTHON" pythonSupport)
(lib.cmakeFeature "FAISS_OPT_LEVEL" optLevel)
]
++ lib.optionals cudaSupport [
(lib.cmakeFeature "CMAKE_CUDA_ARCHITECTURES" flags.cmakeCudaArchitecturesString)
];
buildFlags =
[ "faiss" ]
@ -113,39 +115,29 @@ stdenv.mkDerivation {
python -m pip wheel --verbose --no-index --no-deps --no-clean --no-build-isolation --wheel-dir dist .)
'';
postInstall = ''
mkdir -p $demos/bin
if [[ "$buildInputs" == *demo_ivfpq_indexing* ]] ; then
cp ./demos/demo_ivfpq_indexing $demos/bin/
fi
'' + lib.optionalString pythonSupport ''
mkdir -p $out/${pythonPackages.python.sitePackages}
(cd faiss/python && python -m pip install dist/*.whl --no-index --no-warn-script-location --prefix="$out" --no-cache)
'';
postFixup = lib.optionalString (pythonSupport && cudaSupport) ''
addDriverRunpath $out/${pythonPackages.python.sitePackages}/faiss/*.so
addDriverRunpath $demos/bin/*
'';
# Need buildPythonPackage for this one
# pythonImportsCheck = [
# "faiss"
# ];
postInstall =
''
mkdir -p $demos/bin
if [[ "$buildInputs" == *demo_ivfpq_indexing* ]] ; then
cp ./demos/demo_ivfpq_indexing $demos/bin/
fi
''
+ lib.optionalString pythonSupport ''
mkdir "$dist"
cp faiss/python/dist/*.whl "$dist/"
'';
passthru = {
inherit cudaSupport cudaPackages pythonSupport;
tests = {
runDemos = runCommand "${pname}-run-demos"
{ buildInputs = [ faiss.demos ]; }
# There are more demos, we run just the one that documentation mentions
''
demo_ivfpq_indexing && touch $out
'';
} // lib.optionalAttrs pythonSupport {
pytest = pythonPackages.callPackage ./tests.nix { };
};
runDemos =
runCommand "${pname}-run-demos" { buildInputs = [ faiss.demos ]; }
# There are more demos, we run just the one that documentation mentions
''
demo_ivfpq_indexing && touch $out
'';
} // lib.optionalAttrs pythonSupport { pytest = pythonPackages.callPackage ./tests.nix { }; };
};
meta = with lib; {

View File

@ -3,7 +3,6 @@
blockdiag,
buildPythonPackage,
fetchFromGitHub,
nose,
pytestCheckHook,
pythonOlder,
setuptools,
@ -23,14 +22,13 @@ buildPythonPackage rec {
hash = "sha256-WmprkHOgvlsOIg8H77P7fzEqxGnj6xaL7Df7urRkg3o=";
};
patches = [ ./fix_test_generate.patch ];
build-system = [ setuptools ];
propagatedBuildInputs = [ blockdiag ];
nativeCheckInputs = [
nose
pytestCheckHook
];
nativeCheckInputs = [ pytestCheckHook ];
pytestFlagsArray = [ "src/actdiag/tests/" ];

View File

@ -0,0 +1,22 @@
diff --git a/src/actdiag/tests/test_generate_diagram.py b/src/actdiag/tests/test_generate_diagram.py
index c5ee3d5..a74a151 100644
--- a/src/actdiag/tests/test_generate_diagram.py
+++ b/src/actdiag/tests/test_generate_diagram.py
@@ -16,16 +16,6 @@
import os
from blockdiag.tests.test_generate_diagram import (get_diagram_files,
- testcase_generator)
+ test_generate_with_separate)
-import actdiag.command
-
-def test_generate():
- mainfunc = actdiag.command.main
- basepath = os.path.dirname(__file__)
- files = get_diagram_files(basepath)
- options = []
-
- for testcase in testcase_generator(basepath, mainfunc, files, options):
- yield testcase

View File

@ -5,9 +5,9 @@
ephem,
fetchFromGitHub,
fetchpatch,
fetchpatch2,
funcparserlib,
pillow,
nose,
pytestCheckHook,
pythonOlder,
reportlab,
@ -36,8 +36,21 @@ buildPythonPackage rec {
url = "https://github.com/blockdiag/blockdiag/commit/20d780cad84e7b010066cb55f848477957870165.patch";
hash = "sha256-t1zWFzAsLL2EUa0nD4Eui4Y5AhAZLRmp/yC9QpzzeUA=";
})
# https://github.com/blockdiag/blockdiag/pull/175
(fetchpatch2 {
name = "migrate-to-pytest.patch";
url = "https://github.com/blockdiag/blockdiag/commit/4f4f726252084f17ecc6c524592222af09d37da4.patch";
hash = "sha256-OkfKJwJtb2DJRXE/8thYnisTFwcfstUFTTJHdM/qBzg=";
})
];
postPatch = ''
# requires network access the url-based icon
# and path-based icon is set to debian logo (/usr/share/pixmaps/debian-logo.png)
rm src/blockdiag/tests/diagrams/node_icon.diag
# note: this is a postPatch as `seqdiag` uses them directly
'';
build-system = [ setuptools ];
dependencies = [
@ -50,7 +63,6 @@ buildPythonPackage rec {
nativeCheckInputs = [
ephem
nose
pytestCheckHook
];

View File

@ -0,0 +1,46 @@
{
lib,
buildPythonPackage,
faiss-build,
numpy,
packaging,
setuptools,
pip,
wheel,
}:
buildPythonPackage {
inherit (faiss-build) pname version;
pyproject = true;
src = "${lib.getOutput "dist" faiss-build}";
postPatch = ''
mkdir dist
mv *.whl dist/
'';
build-system = [
setuptools
pip
wheel
];
dependencies = [
numpy
packaging
];
# E.g. cuda libraries; needed because reference scanning
# can't see inside the wheels
inherit (faiss-build) buildInputs;
dontBuild = true;
pythonImportsCheck = [ "faiss" ];
meta = lib.pipe (faiss-build.meta or { }) [
(lib.flip builtins.removeAttrs [ "mainProgram" ])
(m: m // { description = "Bindings for faiss, the similarity search library"; })
];
}

View File

@ -3,7 +3,6 @@
blockdiag,
fetchFromGitHub,
buildPythonPackage,
nose,
pytestCheckHook,
setuptools,
pythonOlder,
@ -23,14 +22,13 @@ buildPythonPackage rec {
hash = "sha256-uKrdkXpL5YBr953sRsHknYg+2/WwrZmyDf8BMA2+0tU=";
};
patches = [ ./fix_test_generate.patch ];
build-system = [ setuptools ];
dependencies = [ blockdiag ];
nativeCheckInputs = [
nose
pytestCheckHook
];
nativeCheckInputs = [ pytestCheckHook ];
pytestFlagsArray = [ "src/nwdiag/tests/" ];

View File

@ -0,0 +1,22 @@
diff --git a/src/nwdiag/tests/test_generate_diagram.py b/src/nwdiag/tests/test_generate_diagram.py
index 2065208..ac9b096 100644
--- a/src/nwdiag/tests/test_generate_diagram.py
+++ b/src/nwdiag/tests/test_generate_diagram.py
@@ -16,16 +16,4 @@
import os
from blockdiag.tests.test_generate_diagram import (get_diagram_files,
- testcase_generator)
-
-import nwdiag.command
-
-
-def test_generate():
- mainfunc = nwdiag.command.main
- basepath = os.path.dirname(__file__)
- files = get_diagram_files(basepath)
- options = []
-
- for testcase in testcase_generator(basepath, mainfunc, files, options):
- yield testcase
+ test_generate_with_separate)

View File

@ -1,28 +1,37 @@
{
lib,
buildPythonPackage,
fetchPypi,
nose,
fetchFromGitHub,
pytestCheckHook,
setuptools,
}:
buildPythonPackage rec {
version = "0.3.3";
format = "setuptools";
pname = "ofxhome";
pyproject = true;
src = fetchPypi {
inherit pname version;
sha256 = "1rpyfqr2q9pnin47rjd4qapl8ngk1m9jx36iqckhdhr8s8gla445";
src = fetchFromGitHub {
owner = "captin411";
repo = "ofxhome";
rev = "v${version}";
hash = "sha256-i16bE9iuafhAKco2jYfg5T5QCWFHdnYVztf1z2XbO9g=";
};
buildInputs = [ nose ];
build-system = [ setuptools ];
# ImportError: No module named tests
doCheck = false;
nativeCheckInputs = [ pytestCheckHook ];
meta = with lib; {
# These are helper functions that should not be called as tests
disabledTests = [
"testfile_name"
"testfile"
];
meta = {
homepage = "https://github.com/captin411/ofxhome";
description = "ofxhome.com financial institution lookup REST client";
license = licenses.mit;
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ pyrox0 ];
};
}

View File

@ -2,31 +2,30 @@
lib,
buildPythonPackage,
fetchPypi,
nose,
pytestCheckHook,
setuptools,
}:
buildPythonPackage rec {
pname = "para";
version = "0.0.8";
format = "setuptools";
pyproject = true;
src = fetchPypi {
inherit pname version;
hash = "sha256-RsMjKunY6p2IbP0IzdESiSICvthkX0C2JVWXukz+8hc=";
};
nativeCheckInputs = [
nose
pytestCheckHook
];
build-system = [ setuptools ];
nativeCheckInputs = [ pytestCheckHook ];
pythonImportsCheck = [ "para" ];
meta = with lib; {
meta = {
description = "Set utilities that ake advantage of python's 'multiprocessing' module to distribute CPU-intensive tasks";
homepage = "https://pypi.org/project/para";
license = licenses.mit;
maintainers = with maintainers; [ GaetanLepage ];
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ GaetanLepage ];
};
}

View File

@ -2,11 +2,10 @@
lib,
buildPythonPackage,
fetchPypi,
nose,
python,
pytestCheckHook,
six,
paste,
setuptools,
pastedeploy,
pythonOlder,
}:
@ -14,7 +13,7 @@
buildPythonPackage rec {
pname = "pastescript";
version = "3.6.0";
format = "setuptools";
pyproject = true;
disabled = pythonOlder "3.7";
@ -24,6 +23,8 @@ buildPythonPackage rec {
hash = "sha256-HCLSt81TUWRr7tKMb3DrSipLklZR2a/Ko1AdBsq7UXE=";
};
build-system = [ setuptools ];
propagatedBuildInputs = [
paste
pastedeploy
@ -33,10 +34,7 @@ buildPythonPackage rec {
# test suite seems to unset PYTHONPATH
doCheck = false;
nativeCheckInputs = [
nose
pytestCheckHook
];
nativeCheckInputs = [ pytestCheckHook ];
disabledTestPaths = [ "appsetup/testfiles" ];

View File

@ -1,19 +1,22 @@
{
lib,
buildPythonPackage,
fetchPypi,
fetchFromGitHub,
numpy,
nose,
setuptools,
pytestCheckHook,
}:
buildPythonPackage rec {
pname = "pyquaternion";
version = "0.9.9";
format = "setuptools";
pyproject = true;
src = fetchPypi {
inherit pname version;
hash = "sha256-sfYa8hnLL+lmtft5oZISTy5jo/end6w8rfKVexqBvqg=";
src = fetchFromGitHub {
owner = "KieranWynn";
repo = "pyquaternion";
rev = "v${version}";
hash = "sha256-L0wT9DFUDRcmmN7OpmIDNvtQWQrM7iFnZt6R2xrJ+3A=";
};
# The VERSION.txt file is required for setup.py
@ -22,9 +25,14 @@ buildPythonPackage rec {
echo "${version}" > VERSION.txt
'';
propagatedBuildInputs = [ numpy ];
build-system = [ setuptools ];
dependencies = [ numpy ];
nativeCheckInputs = [ pytestCheckHook ];
pytestFlagsArray = [ "pyquaternion/test/" ];
nativeCheckInputs = [ nose ];
pythonImportsCheck = [ "pyquaternion" ];
meta = with lib; {

View File

@ -5,6 +5,7 @@
substituteAll,
isPyPy,
python,
setuptools,
pillow,
pycairo,
pkg-config,
@ -21,7 +22,6 @@
zlib,
libxml2,
sqlite,
nose,
pytestCheckHook,
darwin,
sparsehash,
@ -30,7 +30,7 @@
buildPythonPackage rec {
pname = "python-mapnik";
version = "3.0.16-unstable-2024-02-22";
format = "setuptools";
pyproject = true;
src = fetchFromGitHub {
owner = "mapnik";
@ -55,12 +55,14 @@ buildPythonPackage rec {
stdenv = if python.stdenv.isDarwin then darwin.apple_sdk_11_0.stdenv else python.stdenv;
build-system = [ setuptools ];
nativeBuildInputs = [
mapnik # for mapnik_config
pkg-config
];
buildInputs = [
dependencies = [
mapnik
boost
cairo
@ -94,10 +96,7 @@ buildPythonPackage rec {
export XMLPARSER=libxml2
'';
nativeCheckInputs = [
nose
pytestCheckHook
];
nativeCheckInputs = [ pytestCheckHook ];
preCheck =
''

View File

@ -1,52 +0,0 @@
{
lib,
buildPythonPackage,
fetchFromGitHub,
fetchpatch,
requests,
pytestCheckHook,
mock,
nose,
pycrypto,
}:
buildPythonPackage rec {
pname = "rauth";
version = "0.7.2";
format = "setuptools";
src = fetchFromGitHub {
owner = "litl";
repo = "rauth";
rev = version;
hash = "sha256-wRKZbxZCEfihOaJM8sk8438LE++KJWxdOGImpL1gHa4=";
};
patches = [
(fetchpatch {
# https://github.com/litl/rauth/pull/211
name = "fix-pycrypdodome-replacement-for-pycrypto.patch";
url = "https://github.com/litl/rauth/commit/7fb3b7bf1a1869a52cf59ee3eb607d318e97265c.patch";
hash = "sha256-jiAIw+VQ2d/bkm2brqfY1RUrNGf+lsMPnoI91gGUS6o=";
})
];
propagatedBuildInputs = [ requests ];
pythonImportsCheck = [ "rauth" ];
nativeCheckInputs = [
pytestCheckHook
mock
nose
pycrypto
];
meta = with lib; {
description = "Python library for OAuth 1.0/a, 2.0, and Ofly";
homepage = "https://github.com/litl/rauth";
changelog = "https://github.com/litl/rauth/blob/${src.rev}/CHANGELOG";
license = licenses.mit;
maintainers = with maintainers; [ blaggacao ];
};
}

View File

@ -3,7 +3,6 @@
blockdiag,
buildPythonPackage,
fetchFromGitHub,
nose,
pytestCheckHook,
pythonOlder,
setuptools,
@ -27,11 +26,9 @@ buildPythonPackage rec {
dependencies = [ blockdiag ];
nativeCheckInputs = [
nose
pytestCheckHook
];
patches = [ ./fix_test_generate.patch ];
nativeCheckInputs = [ pytestCheckHook ];
pytestFlagsArray = [ "src/seqdiag/tests/" ];
pythonImportsCheck = [ "seqdiag" ];

View File

@ -0,0 +1,25 @@
diff --git a/src/seqdiag/tests/test_generate_diagram.py b/src/seqdiag/tests/test_generate_diagram.py
index cdd340f..a1dd2d4 100644
--- a/src/seqdiag/tests/test_generate_diagram.py
+++ b/src/seqdiag/tests/test_generate_diagram.py
@@ -13,19 +13,5 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
-
from blockdiag.tests.test_generate_diagram import (get_diagram_files,
- testcase_generator)
-
-import seqdiag.command
-
-
-def test_generate():
- mainfunc = seqdiag.command.main
- basepath = os.path.dirname(__file__)
- files = get_diagram_files(basepath)
- options = []
-
- for testcase in testcase_generator(basepath, mainfunc, files, options):
- yield testcase
+ test_generate_with_separate)

View File

@ -2,17 +2,17 @@
lib,
buildPythonPackage,
fetchFromGitHub,
nose,
pytestCheckHook,
setuptools,
pythonOlder,
six,
sqlalchemy,
pytestCheckHook,
}:
buildPythonPackage rec {
pname = "sqlalchemy-mixins";
version = "2.0.5";
format = "setuptools";
pyproject = true;
disabled = pythonOlder "3.8";
@ -23,15 +23,14 @@ buildPythonPackage rec {
hash = "sha256-iJrRlV/M0Z1IOdrwWSblefm6wjvdk4/v0am+It8VeWI=";
};
propagatedBuildInputs = [
build-system = [ setuptools ];
dependencies = [
six
sqlalchemy
];
nativeCheckInputs = [
nose
pytestCheckHook
];
nativeCheckInputs = [ pytestCheckHook ];
pythonImportsCheck = [ "sqlalchemy_mixins" ];

View File

@ -1,7 +1,7 @@
{ lib, buildGoModule, fetchFromGitLab, bash }:
let
version = "17.1.0";
version = "17.2.0";
in
buildGoModule rec {
inherit version;
@ -17,13 +17,13 @@ buildGoModule rec {
# For patchShebangs
buildInputs = [ bash ];
vendorHash = "sha256-Rk5/h8wqVwGzovtAjjNkvexG71Dj36mFxU8OsLJzpUo=";
vendorHash = "sha256-1MwHss76apA9KoFhEU6lYiUACrPMGYzjhds6nTyNuJI=";
src = fetchFromGitLab {
owner = "gitlab-org";
repo = "gitlab-runner";
rev = "v${version}";
hash = "sha256-mRL62PIAkPK0aLA7uYpGlUvaJfbD354RDOD4P8MLzx8=";
hash = "sha256-a2Igy4DS3fYTvPW1vvDrH/DjMQ4lG9cm/P3mFr+y9s4=";
};
patches = [

View File

@ -8,11 +8,11 @@
}:
mkKdeDerivation rec {
pname = "marknote";
version = "1.2.1";
version = "1.3.0";
src = fetchurl {
url = "mirror://kde/stable/marknote/marknote-${version}.tar.xz";
hash = "sha256-HzImkm8l8Rqiuyq2QezfdqJ1hxIdLZhiIGVM9xzpyaA=";
hash = "sha256-/5lZhBWmzKWQDLTRDStypvOS6v4Hh0tuLrQun3qzvSg=";
};
extraBuildInputs = [

View File

@ -14,11 +14,11 @@
stdenv.mkDerivation rec {
pname = "atop";
version = "2.10.0";
version = "2.11.0";
src = fetchurl {
url = "https://www.atoptool.nl/download/atop-${version}.tar.gz";
hash = "sha256-56ZzzyyCV4592C7LDeyD/Z7LMIKLJWHCip+lqvddX5M=";
hash = "sha256-m5TGZmAu//e/QC7M5wbDR/OMOctjSY+dOWJoYeVkbiA=";
};
nativeBuildInputs = lib.optionals withAtopgpu [

View File

@ -10,11 +10,11 @@
stdenv.mkDerivation (finalAttrs: {
pname = "ncdu";
version = "2.4";
version = "2.5";
src = fetchurl {
url = "https://dev.yorhel.nl/download/ncdu-${finalAttrs.version}.tar.gz";
hash = "sha256-Sj0AAjCc9qfOp5GTjayb7N7OTVKdDW3I2RtztOaFVQk=";
hash = "sha256-f0neJQJKurGvH/IrO4VCwNFY4Bj+DpYHT9lLDh5tMaU=";
};
nativeBuildInputs = [

View File

@ -5,17 +5,20 @@
, pkg-config
, stdenv
, darwin
, nix-update-script
, testers
, speedtest-rs
}:
rustPlatform.buildRustPackage rec {
pname = "speedtest-rs";
version = "0.1.5";
version = "0.2.0";
src = fetchFromGitHub {
owner = "nelsonjchen";
repo = pname;
repo = "speedtest-rs";
rev = "refs/tags/v${version}";
hash = "sha256-JKthXrosqDZh6CWEqT08h3ySPZulitDol7lX3Eo7orM=";
hash = "sha256-1FAFYiWDD/KG/7/UTv/EW6Nj2GnU0GZFFq6ouMc0URA=";
};
buildInputs = [ openssl ] ++
@ -26,14 +29,25 @@ rustPlatform.buildRustPackage rec {
nativeBuildInputs = [ pkg-config ];
cargoHash = "sha256-kUXHC/qXgukaUqaBykXB2ZWmfQEjzJuIyemr1ogVX1U=";
cargoHash = "sha256-0YPCBzidE1+LgIYk457eSoerLvQuuZs9cTd7uUt1Lr8=";
meta = with lib; {
# Fail for unclear reasons (only on darwin)
checkFlags = lib.optionals stdenv.isDarwin [
"--skip=speedtest::tests::test_get_configuration"
"--skip=speedtest::tests::test_get_server_list_with_config"
];
passthru = {
updateScript = nix-update-script { };
tests.version = testers.testVersion { package = speedtest-rs; };
};
meta = {
description = "Command line internet speedtest tool written in rust";
homepage = "https://github.com/nelsonjchen/speedtest-rs";
changelog = "https://github.com/nelsonjchen/speedtest-rs/blob/v${version}/CHANGELOG.md";
license = with licenses; [ mit asl20 ];
maintainers = with maintainers; [ GaetanLepage ];
license = with lib.licenses; [ mit asl20 ];
maintainers = with lib.maintainers; [ GaetanLepage ];
mainProgram = "speedtest-rs";
};
}

View File

@ -393,6 +393,18 @@ in {
"NEWS"
];
})
] ++ lib.optionals (lib.versions.majorMinor php.version == "8.2" && lib.versionOlder php.version "8.2.22") [
# Fixes compatibility with libxml2 2.13. Part of 8.3.10RC1+, 8.2.22RC1+
(fetchpatch {
url = "https://github.com/php/php-src/commit/4fe821311cafb18ca8bdf20b9d796c48a13ba552.diff?full_index=1";
hash = "sha256-YC3I0BQi3o3+VmRu/UqpqPpaSC+ekPqzbORTHftbPvY=";
})
] ++ lib.optionals (lib.versions.majorMinor php.version == "8.3" && lib.versionOlder php.version "8.3.10") [
(fetchpatch {
url = "https://github.com/php/php-src/commit/ecf0bb0fd12132d853969c5e9a212e5f627f2da2.diff?full_index=1";
hash = "sha256-sodGODHb4l04P0srn3L8l3K+DjZzCsCNbamfkmIyF+k=";
excludes = [ "NEWS" ];
})
];
}
{
@ -605,6 +617,19 @@ in {
configureFlags = [
"--enable-simplexml"
];
patches = lib.optionals (lib.versions.majorMinor php.version == "8.2" && lib.versionOlder php.version "8.2.22") [
# Fixes compatibility with libxml2 2.13. Part of 8.3.10RC1+, 8.2.22RC1+
(fetchpatch {
url = "https://github.com/php/php-src/commit/4fe821311cafb18ca8bdf20b9d796c48a13ba552.diff?full_index=1";
hash = "sha256-YC3I0BQi3o3+VmRu/UqpqPpaSC+ekPqzbORTHftbPvY=";
})
] ++ lib.optionals (lib.versions.majorMinor php.version == "8.3" && lib.versionOlder php.version "8.3.10") [
(fetchpatch {
url = "https://github.com/php/php-src/commit/ecf0bb0fd12132d853969c5e9a212e5f627f2da2.diff?full_index=1";
hash = "sha256-sodGODHb4l04P0srn3L8l3K+DjZzCsCNbamfkmIyF+k=";
excludes = [ "NEWS" ];
})
];
}
{
name = "snmp";
@ -620,8 +645,21 @@ in {
configureFlags = [
"--enable-soap"
];
doCheck = false;
doCheck = stdenv.isDarwin; # TODO: a couple tests still fail on *-linux
internalDeps = [ php.extensions.session ];
patches = lib.optionals (lib.versions.majorMinor php.version == "8.2" && lib.versionOlder php.version "8.2.22") [
# Fixes compatibility with libxml2 2.13. Part of 8.3.10RC1+, 8.2.22RC1+
(fetchpatch {
url = "https://github.com/php/php-src/commit/4fe821311cafb18ca8bdf20b9d796c48a13ba552.diff?full_index=1";
hash = "sha256-YC3I0BQi3o3+VmRu/UqpqPpaSC+ekPqzbORTHftbPvY=";
})
] ++ lib.optionals (lib.versions.majorMinor php.version == "8.3" && lib.versionOlder php.version "8.3.10") [
(fetchpatch {
url = "https://github.com/php/php-src/commit/ecf0bb0fd12132d853969c5e9a212e5f627f2da2.diff?full_index=1";
hash = "sha256-sodGODHb4l04P0srn3L8l3K+DjZzCsCNbamfkmIyF+k=";
excludes = [ "NEWS" ];
})
];
}
{
name = "sockets";
@ -654,6 +692,19 @@ in {
"--enable-xml"
];
doCheck = false;
patches = lib.optionals (lib.versions.majorMinor php.version == "8.2" && lib.versionOlder php.version "8.2.22") [
# Fixes compatibility with libxml2 2.13. Part of 8.3.10RC1+, 8.2.22RC1+
(fetchpatch {
url = "https://github.com/php/php-src/commit/4fe821311cafb18ca8bdf20b9d796c48a13ba552.diff?full_index=1";
hash = "sha256-YC3I0BQi3o3+VmRu/UqpqPpaSC+ekPqzbORTHftbPvY=";
})
] ++ lib.optionals (lib.versions.majorMinor php.version == "8.3" && lib.versionOlder php.version "8.3.10") [
(fetchpatch {
url = "https://github.com/php/php-src/commit/ecf0bb0fd12132d853969c5e9a212e5f627f2da2.diff?full_index=1";
hash = "sha256-sodGODHb4l04P0srn3L8l3K+DjZzCsCNbamfkmIyF+k=";
excludes = [ "NEWS" ];
})
];
}
{
name = "xmlreader";
@ -671,6 +722,19 @@ in {
configureFlags = [
"--enable-xmlwriter"
];
patches = lib.optionals (lib.versions.majorMinor php.version == "8.2" && lib.versionOlder php.version "8.2.22") [
# Fixes compatibility with libxml2 2.13. Part of 8.3.10RC1+, 8.2.22RC1+
(fetchpatch {
url = "https://github.com/php/php-src/commit/4fe821311cafb18ca8bdf20b9d796c48a13ba552.diff?full_index=1";
hash = "sha256-YC3I0BQi3o3+VmRu/UqpqPpaSC+ekPqzbORTHftbPvY=";
})
] ++ lib.optionals (lib.versions.majorMinor php.version == "8.3" && lib.versionOlder php.version "8.3.10") [
(fetchpatch {
url = "https://github.com/php/php-src/commit/ecf0bb0fd12132d853969c5e9a212e5f627f2da2.diff?full_index=1";
hash = "sha256-sodGODHb4l04P0srn3L8l3K+DjZzCsCNbamfkmIyF+k=";
excludes = [ "NEWS" ];
})
];
}
{
name = "xsl";

View File

@ -494,6 +494,7 @@ mapAliases ({
radicale_infcloud = radicale-infcloud; # added 2024-01-07
radio_beam = radio-beam; # added 2023-11-04
ratelimiter = throw "ratelimiter has been removed, since it is unmaintained and broken"; # added 2023-10-21
rauth = throw "rauth has beed removed, since it is unmaintained upstream"; # added 2024-07-27
rdflib-jsonld = throw "rdflib-jsonld is not compatible with rdflib 6"; # added 2021-11-05
readme_renderer = readme-renderer; # added 2024-01-07
recaptcha_client = throw "recaptcha_client has been removed since it is no longer maintained"; # added 2023-10-20

View File

@ -4161,10 +4161,12 @@ self: super: with self; {
fairseq = callPackage ../development/python-modules/fairseq { };
faiss = toPythonModule (pkgs.faiss.override {
pythonSupport = true;
pythonPackages = self;
});
faiss = callPackage ../development/python-modules/faiss {
faiss-build = pkgs.faiss.override {
pythonSupport = true;
pythonPackages = self;
};
};
fake-useragent = callPackage ../development/python-modules/fake-useragent { };
@ -13359,8 +13361,6 @@ self: super: with self; {
ratelimit = callPackage ../development/python-modules/ratelimit { };
rauth = callPackage ../development/python-modules/rauth { };
raven = callPackage ../development/python-modules/raven { };
rawkit = callPackage ../development/python-modules/rawkit { };