Merge pull request #261939 from tweag/check-by-name-intermediate

`nixpkgs-check-by-name`: Intermediate error representation refactor
This commit is contained in:
Silvan Mosberger 2023-10-30 12:49:04 +01:00 committed by GitHub
commit 4651ac9bcd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 687 additions and 350 deletions

View File

@ -162,6 +162,12 @@ version = "3.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
[[package]]
name = "either"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
[[package]]
name = "errno"
version = "0.3.2"
@ -218,6 +224,15 @@ dependencies = [
"windows-sys",
]
[[package]]
name = "itertools"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "1.0.9"
@ -274,6 +289,7 @@ dependencies = [
"anyhow",
"clap",
"colored",
"itertools",
"lazy_static",
"regex",
"rnix",

View File

@ -13,6 +13,7 @@ serde = { version = "1.0.185", features = ["derive"] }
anyhow = "1.0"
lazy_static = "1.4.0"
colored = "2.0.4"
itertools = "0.11.0"
[dev-dependencies]
temp-env = "0.3.5"

View File

@ -1,6 +1,6 @@
# Nixpkgs pkgs/by-name checker
This directory implements a program to check the [validity](#validity-checks) of the `pkgs/by-name` Nixpkgs directory once introduced.
This directory implements a program to check the [validity](#validity-checks) of the `pkgs/by-name` Nixpkgs directory.
It is being used by [this GitHub Actions workflow](../../../.github/workflows/check-by-name.yml).
This is part of the implementation of [RFC 140](https://github.com/NixOS/rfcs/pull/140).
@ -24,7 +24,7 @@ This API may be changed over time if the CI workflow making use of it is adjuste
- `2`: If an unexpected I/O error occurs
- Standard error:
- Informative messages
- Error messages if validation is not successful
- Detected problems if validation is not successful
## Validity checks

View File

@ -1,12 +1,12 @@
use crate::nixpkgs_problem::NixpkgsProblem;
use crate::structure;
use crate::utils::ErrorWriter;
use crate::validation::{self, Validation::Success};
use crate::Version;
use std::path::Path;
use anyhow::Context;
use serde::Deserialize;
use std::collections::HashMap;
use std::io;
use std::path::PathBuf;
use std::process;
use tempfile::NamedTempFile;
@ -40,12 +40,12 @@ const EXPR: &str = include_str!("eval.nix");
/// Check that the Nixpkgs attribute values corresponding to the packages in pkgs/by-name are
/// of the form `callPackage <package_file> { ... }`.
/// See the `eval.nix` file for how this is achieved on the Nix side
pub fn check_values<W: io::Write>(
pub fn check_values(
version: Version,
error_writer: &mut ErrorWriter<W>,
nixpkgs: &structure::Nixpkgs,
nixpkgs_path: &Path,
package_names: Vec<String>,
eval_accessible_paths: Vec<&Path>,
) -> anyhow::Result<()> {
) -> validation::Result<()> {
// Write the list of packages we need to check into a temporary JSON file.
// This can then get read by the Nix evaluation.
let attrs_file = NamedTempFile::new().context("Failed to create a temporary file")?;
@ -55,7 +55,7 @@ pub fn check_values<W: io::Write>(
// entry is needed.
let attrs_file_path = attrs_file.path().canonicalize()?;
serde_json::to_writer(&attrs_file, &nixpkgs.package_names).context(format!(
serde_json::to_writer(&attrs_file, &package_names).context(format!(
"Failed to serialise the package names to the temporary path {}",
attrs_file_path.display()
))?;
@ -87,9 +87,9 @@ pub fn check_values<W: io::Write>(
.arg(&attrs_file_path)
// Same for the nixpkgs to test
.args(["--arg", "nixpkgsPath"])
.arg(&nixpkgs.path)
.arg(nixpkgs_path)
.arg("-I")
.arg(&nixpkgs.path);
.arg(nixpkgs_path);
// Also add extra paths that need to be accessible
for path in eval_accessible_paths {
@ -111,52 +111,54 @@ pub fn check_values<W: io::Write>(
String::from_utf8_lossy(&result.stdout)
))?;
for package_name in &nixpkgs.package_names {
let relative_package_file = structure::Nixpkgs::relative_file_for_package(package_name);
let absolute_package_file = nixpkgs.path.join(&relative_package_file);
Ok(validation::sequence_(package_names.iter().map(
|package_name| {
let relative_package_file = structure::relative_file_for_package(package_name);
let absolute_package_file = nixpkgs_path.join(&relative_package_file);
if let Some(attribute_info) = actual_files.get(package_name) {
let valid = match &attribute_info.variant {
AttributeVariant::AutoCalled => true,
AttributeVariant::CallPackage { path, empty_arg } => {
let correct_file = if let Some(call_package_path) = path {
absolute_package_file == *call_package_path
} else {
false
};
// Only check for the argument to be non-empty if the version is V1 or
// higher
let non_empty = if version >= Version::V1 {
!empty_arg
} else {
true
};
correct_file && non_empty
if let Some(attribute_info) = actual_files.get(package_name) {
let valid = match &attribute_info.variant {
AttributeVariant::AutoCalled => true,
AttributeVariant::CallPackage { path, empty_arg } => {
let correct_file = if let Some(call_package_path) = path {
absolute_package_file == *call_package_path
} else {
false
};
// Only check for the argument to be non-empty if the version is V1 or
// higher
let non_empty = if version >= Version::V1 {
!empty_arg
} else {
true
};
correct_file && non_empty
}
AttributeVariant::Other => false,
};
if !valid {
NixpkgsProblem::WrongCallPackage {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into()
} else if !attribute_info.is_derivation {
NixpkgsProblem::NonDerivation {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into()
} else {
Success(())
}
AttributeVariant::Other => false,
};
if !valid {
error_writer.write(&format!(
"pkgs.{package_name}: This attribute is manually defined (most likely in pkgs/top-level/all-packages.nix), which is only allowed if the definition is of the form `pkgs.callPackage {} {{ ... }}` with a non-empty second argument.",
relative_package_file.display()
))?;
continue;
} else {
NixpkgsProblem::UndefinedAttr {
relative_package_file: relative_package_file.clone(),
package_name: package_name.clone(),
}
.into()
}
if !attribute_info.is_derivation {
error_writer.write(&format!(
"pkgs.{package_name}: This attribute defined by {} is not a derivation",
relative_package_file.display()
))?;
}
} else {
error_writer.write(&format!(
"pkgs.{package_name}: This attribute is not defined but it should be defined automatically as {}",
relative_package_file.display()
))?;
continue;
}
}
Ok(())
},
)))
}

View File

@ -1,16 +1,19 @@
mod eval;
mod nixpkgs_problem;
mod references;
mod structure;
mod utils;
mod validation;
use crate::structure::check_structure;
use crate::validation::Validation::Failure;
use crate::validation::Validation::Success;
use anyhow::Context;
use clap::{Parser, ValueEnum};
use colored::Colorize;
use std::io;
use std::path::{Path, PathBuf};
use std::process::ExitCode;
use structure::Nixpkgs;
use utils::ErrorWriter;
/// Program to check the validity of pkgs/by-name
#[derive(Parser, Debug)]
@ -63,8 +66,8 @@ fn main() -> ExitCode {
///
/// # Return value
/// - `Err(e)` if an I/O-related error `e` occurred.
/// - `Ok(false)` if the structure is invalid, all the structural errors have been written to `error_writer`.
/// - `Ok(true)` if the structure is valid, nothing will have been written to `error_writer`.
/// - `Ok(false)` if there are problems, all of which will be written to `error_writer`.
/// - `Ok(true)` if there are no problems
pub fn check_nixpkgs<W: io::Write>(
nixpkgs_path: &Path,
version: Version,
@ -76,31 +79,38 @@ pub fn check_nixpkgs<W: io::Write>(
nixpkgs_path.display()
))?;
// Wraps the error_writer to print everything in red, and tracks whether anything was printed
// at all. Later used to figure out if the structure was valid or not.
let mut error_writer = ErrorWriter::new(error_writer);
if !nixpkgs_path.join(structure::BASE_SUBPATH).exists() {
let check_result = if !nixpkgs_path.join(utils::BASE_SUBPATH).exists() {
eprintln!(
"Given Nixpkgs path does not contain a {} subdirectory, no check necessary.",
structure::BASE_SUBPATH
utils::BASE_SUBPATH
);
Success(())
} else {
let nixpkgs = Nixpkgs::new(&nixpkgs_path, &mut error_writer)?;
if error_writer.empty {
// Only if we could successfully parse the structure, we do the semantic checks
eval::check_values(version, &mut error_writer, &nixpkgs, eval_accessible_paths)?;
references::check_references(&mut error_writer, &nixpkgs)?;
match check_structure(&nixpkgs_path)? {
Failure(errors) => Failure(errors),
Success(package_names) =>
// Only if we could successfully parse the structure, we do the evaluation checks
{
eval::check_values(version, &nixpkgs_path, package_names, eval_accessible_paths)?
}
}
};
match check_result {
Failure(errors) => {
for error in errors {
writeln!(error_writer, "{}", error.to_string().red())?
}
Ok(false)
}
Success(_) => Ok(true),
}
Ok(error_writer.empty)
}
#[cfg(test)]
mod tests {
use crate::check_nixpkgs;
use crate::structure;
use crate::utils;
use crate::Version;
use anyhow::Context;
use std::fs;
@ -145,7 +155,7 @@ mod tests {
return Ok(());
}
let base = path.join(structure::BASE_SUBPATH);
let base = path.join(utils::BASE_SUBPATH);
fs::create_dir_all(base.join("fo/foo"))?;
fs::write(base.join("fo/foo/package.nix"), "{ someDrv }: someDrv")?;

View File

@ -0,0 +1,218 @@
use crate::utils::PACKAGE_NIX_FILENAME;
use rnix::parser::ParseError;
use std::ffi::OsString;
use std::fmt;
use std::io;
use std::path::PathBuf;
/// Any problem that can occur when checking Nixpkgs
pub enum NixpkgsProblem {
ShardNonDir {
relative_shard_path: PathBuf,
},
InvalidShardName {
relative_shard_path: PathBuf,
shard_name: String,
},
PackageNonDir {
relative_package_dir: PathBuf,
},
CaseSensitiveDuplicate {
relative_shard_path: PathBuf,
first: OsString,
second: OsString,
},
InvalidPackageName {
relative_package_dir: PathBuf,
package_name: String,
},
IncorrectShard {
relative_package_dir: PathBuf,
correct_relative_package_dir: PathBuf,
},
PackageNixNonExistent {
relative_package_dir: PathBuf,
},
PackageNixDir {
relative_package_dir: PathBuf,
},
UndefinedAttr {
relative_package_file: PathBuf,
package_name: String,
},
WrongCallPackage {
relative_package_file: PathBuf,
package_name: String,
},
NonDerivation {
relative_package_file: PathBuf,
package_name: String,
},
OutsideSymlink {
relative_package_dir: PathBuf,
subpath: PathBuf,
},
UnresolvableSymlink {
relative_package_dir: PathBuf,
subpath: PathBuf,
io_error: io::Error,
},
CouldNotParseNix {
relative_package_dir: PathBuf,
subpath: PathBuf,
error: ParseError,
},
PathInterpolation {
relative_package_dir: PathBuf,
subpath: PathBuf,
line: usize,
text: String,
},
SearchPath {
relative_package_dir: PathBuf,
subpath: PathBuf,
line: usize,
text: String,
},
OutsidePathReference {
relative_package_dir: PathBuf,
subpath: PathBuf,
line: usize,
text: String,
},
UnresolvablePathReference {
relative_package_dir: PathBuf,
subpath: PathBuf,
line: usize,
text: String,
io_error: io::Error,
},
}
impl fmt::Display for NixpkgsProblem {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
NixpkgsProblem::ShardNonDir { relative_shard_path } =>
write!(
f,
"{}: This is a file, but it should be a directory.",
relative_shard_path.display(),
),
NixpkgsProblem::InvalidShardName { relative_shard_path, shard_name } =>
write!(
f,
"{}: Invalid directory name \"{shard_name}\", must be at most 2 ASCII characters consisting of a-z, 0-9, \"-\" or \"_\".",
relative_shard_path.display()
),
NixpkgsProblem::PackageNonDir { relative_package_dir } =>
write!(
f,
"{}: This path is a file, but it should be a directory.",
relative_package_dir.display(),
),
NixpkgsProblem::CaseSensitiveDuplicate { relative_shard_path, first, second } =>
write!(
f,
"{}: Duplicate case-sensitive package directories {first:?} and {second:?}.",
relative_shard_path.display(),
),
NixpkgsProblem::InvalidPackageName { relative_package_dir, package_name } =>
write!(
f,
"{}: Invalid package directory name \"{package_name}\", must be ASCII characters consisting of a-z, A-Z, 0-9, \"-\" or \"_\".",
relative_package_dir.display(),
),
NixpkgsProblem::IncorrectShard { relative_package_dir, correct_relative_package_dir } =>
write!(
f,
"{}: Incorrect directory location, should be {} instead.",
relative_package_dir.display(),
correct_relative_package_dir.display(),
),
NixpkgsProblem::PackageNixNonExistent { relative_package_dir } =>
write!(
f,
"{}: Missing required \"{PACKAGE_NIX_FILENAME}\" file.",
relative_package_dir.display(),
),
NixpkgsProblem::PackageNixDir { relative_package_dir } =>
write!(
f,
"{}: \"{PACKAGE_NIX_FILENAME}\" must be a file.",
relative_package_dir.display(),
),
NixpkgsProblem::UndefinedAttr { relative_package_file, package_name } =>
write!(
f,
"pkgs.{package_name}: This attribute is not defined but it should be defined automatically as {}",
relative_package_file.display()
),
NixpkgsProblem::WrongCallPackage { relative_package_file, package_name } =>
write!(
f,
"pkgs.{package_name}: This attribute is manually defined (most likely in pkgs/top-level/all-packages.nix), which is only allowed if the definition is of the form `pkgs.callPackage {} {{ ... }}` with a non-empty second argument.",
relative_package_file.display()
),
NixpkgsProblem::NonDerivation { relative_package_file, package_name } =>
write!(
f,
"pkgs.{package_name}: This attribute defined by {} is not a derivation",
relative_package_file.display()
),
NixpkgsProblem::OutsideSymlink { relative_package_dir, subpath } =>
write!(
f,
"{}: Path {} is a symlink pointing to a path outside the directory of that package.",
relative_package_dir.display(),
subpath.display(),
),
NixpkgsProblem::UnresolvableSymlink { relative_package_dir, subpath, io_error } =>
write!(
f,
"{}: Path {} is a symlink which cannot be resolved: {io_error}.",
relative_package_dir.display(),
subpath.display(),
),
NixpkgsProblem::CouldNotParseNix { relative_package_dir, subpath, error } =>
write!(
f,
"{}: File {} could not be parsed by rnix: {}",
relative_package_dir.display(),
subpath.display(),
error,
),
NixpkgsProblem::PathInterpolation { relative_package_dir, subpath, line, text } =>
write!(
f,
"{}: File {} at line {line} contains the path expression \"{}\", which is not yet supported and may point outside the directory of that package.",
relative_package_dir.display(),
subpath.display(),
text
),
NixpkgsProblem::SearchPath { relative_package_dir, subpath, line, text } =>
write!(
f,
"{}: File {} at line {line} contains the nix search path expression \"{}\" which may point outside the directory of that package.",
relative_package_dir.display(),
subpath.display(),
text
),
NixpkgsProblem::OutsidePathReference { relative_package_dir, subpath, line, text } =>
write!(
f,
"{}: File {} at line {line} contains the path expression \"{}\" which may point outside the directory of that package.",
relative_package_dir.display(),
subpath.display(),
text,
),
NixpkgsProblem::UnresolvablePathReference { relative_package_dir, subpath, line, text, io_error } =>
write!(
f,
"{}: File {} at line {line} contains the path expression \"{}\" which cannot be resolved: {io_error}.",
relative_package_dir.display(),
subpath.display(),
text,
),
}
}
}

View File

@ -1,105 +1,98 @@
use crate::structure::Nixpkgs;
use crate::nixpkgs_problem::NixpkgsProblem;
use crate::utils;
use crate::utils::{ErrorWriter, LineIndex};
use crate::utils::LineIndex;
use crate::validation::{self, ResultIteratorExt, Validation::Success};
use anyhow::Context;
use rnix::{Root, SyntaxKind::NODE_PATH};
use std::ffi::OsStr;
use std::fs::read_to_string;
use std::io;
use std::path::{Path, PathBuf};
/// Small helper so we don't need to pass in the same arguments to all functions
struct PackageContext<'a, W: io::Write> {
error_writer: &'a mut ErrorWriter<W>,
/// The package directory relative to Nixpkgs, such as `pkgs/by-name/fo/foo`
relative_package_dir: &'a PathBuf,
/// The absolute package directory
absolute_package_dir: &'a PathBuf,
}
use std::path::Path;
/// Check that every package directory in pkgs/by-name doesn't link to outside that directory.
/// Both symlinks and Nix path expressions are checked.
pub fn check_references<W: io::Write>(
error_writer: &mut ErrorWriter<W>,
nixpkgs: &Nixpkgs,
) -> anyhow::Result<()> {
// Check the directories for each package separately
for package_name in &nixpkgs.package_names {
let relative_package_dir = Nixpkgs::relative_dir_for_package(package_name);
let mut context = PackageContext {
error_writer,
relative_package_dir: &relative_package_dir,
absolute_package_dir: &nixpkgs.path.join(&relative_package_dir),
};
// The empty argument here is the subpath under the package directory to check
// An empty one means the package directory itself
check_path(&mut context, Path::new("")).context(format!(
"While checking the references in package directory {}",
relative_package_dir.display()
))?;
}
Ok(())
pub fn check_references(
relative_package_dir: &Path,
absolute_package_dir: &Path,
) -> validation::Result<()> {
// The empty argument here is the subpath under the package directory to check
// An empty one means the package directory itself
check_path(relative_package_dir, absolute_package_dir, Path::new("")).context(format!(
"While checking the references in package directory {}",
relative_package_dir.display()
))
}
/// Checks for a specific path to not have references outside
fn check_path<W: io::Write>(context: &mut PackageContext<W>, subpath: &Path) -> anyhow::Result<()> {
let path = context.absolute_package_dir.join(subpath);
fn check_path(
relative_package_dir: &Path,
absolute_package_dir: &Path,
subpath: &Path,
) -> validation::Result<()> {
let path = absolute_package_dir.join(subpath);
if path.is_symlink() {
Ok(if path.is_symlink() {
// Check whether the symlink resolves to outside the package directory
match path.canonicalize() {
Ok(target) => {
// No need to handle the case of it being inside the directory, since we scan through the
// entire directory recursively anyways
if let Err(_prefix_error) = target.strip_prefix(context.absolute_package_dir) {
context.error_writer.write(&format!(
"{}: Path {} is a symlink pointing to a path outside the directory of that package.",
context.relative_package_dir.display(),
subpath.display(),
))?;
if let Err(_prefix_error) = target.strip_prefix(absolute_package_dir) {
NixpkgsProblem::OutsideSymlink {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
}
.into()
} else {
Success(())
}
}
Err(e) => {
context.error_writer.write(&format!(
"{}: Path {} is a symlink which cannot be resolved: {e}.",
context.relative_package_dir.display(),
subpath.display(),
))?;
Err(io_error) => NixpkgsProblem::UnresolvableSymlink {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
io_error,
}
.into(),
}
} else if path.is_dir() {
// Recursively check each entry
for entry in utils::read_dir_sorted(&path)? {
let entry_subpath = subpath.join(entry.file_name());
check_path(context, &entry_subpath)
.context(format!("Error while recursing into {}", subpath.display()))?
}
validation::sequence_(
utils::read_dir_sorted(&path)?
.into_iter()
.map(|entry| {
let entry_subpath = subpath.join(entry.file_name());
check_path(relative_package_dir, absolute_package_dir, &entry_subpath)
.context(format!("Error while recursing into {}", subpath.display()))
})
.collect_vec()?,
)
} else if path.is_file() {
// Only check Nix files
if let Some(ext) = path.extension() {
if ext == OsStr::new("nix") {
check_nix_file(context, subpath).context(format!(
"Error while checking Nix file {}",
subpath.display()
))?
check_nix_file(relative_package_dir, absolute_package_dir, subpath).context(
format!("Error while checking Nix file {}", subpath.display()),
)?
} else {
Success(())
}
} else {
Success(())
}
} else {
// This should never happen, git doesn't support other file types
anyhow::bail!("Unsupported file type for path {}", subpath.display());
}
Ok(())
})
}
/// Check whether a nix file contains path expression references pointing outside the package
/// directory
fn check_nix_file<W: io::Write>(
context: &mut PackageContext<W>,
fn check_nix_file(
relative_package_dir: &Path,
absolute_package_dir: &Path,
subpath: &Path,
) -> anyhow::Result<()> {
let path = context.absolute_package_dir.join(subpath);
) -> validation::Result<()> {
let path = absolute_package_dir.join(subpath);
let parent_dir = path.parent().context(format!(
"Could not get parent of path {}",
subpath.display()
@ -110,75 +103,73 @@ fn check_nix_file<W: io::Write>(
let root = Root::parse(&contents);
if let Some(error) = root.errors().first() {
context.error_writer.write(&format!(
"{}: File {} could not be parsed by rnix: {}",
context.relative_package_dir.display(),
subpath.display(),
error,
))?;
return Ok(());
return Ok(NixpkgsProblem::CouldNotParseNix {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
error: error.clone(),
}
.into());
}
let line_index = LineIndex::new(&contents);
for node in root.syntax().descendants() {
// We're only interested in Path expressions
if node.kind() != NODE_PATH {
continue;
}
Ok(validation::sequence_(root.syntax().descendants().map(
|node| {
let text = node.text().to_string();
let line = line_index.line(node.text_range().start().into());
let text = node.text().to_string();
let line = line_index.line(node.text_range().start().into());
// Filters out ./foo/${bar}/baz
// TODO: We can just check ./foo
if node.children().count() != 0 {
context.error_writer.write(&format!(
"{}: File {} at line {line} contains the path expression \"{}\", which is not yet supported and may point outside the directory of that package.",
context.relative_package_dir.display(),
subpath.display(),
text
))?;
continue;
}
// Filters out search paths like <nixpkgs>
if text.starts_with('<') {
context.error_writer.write(&format!(
"{}: File {} at line {line} contains the nix search path expression \"{}\" which may point outside the directory of that package.",
context.relative_package_dir.display(),
subpath.display(),
text
))?;
continue;
}
// Resolves the reference of the Nix path
// turning `../baz` inside `/foo/bar/default.nix` to `/foo/baz`
match parent_dir.join(Path::new(&text)).canonicalize() {
Ok(target) => {
// Then checking if it's still in the package directory
// No need to handle the case of it being inside the directory, since we scan through the
// entire directory recursively anyways
if let Err(_prefix_error) = target.strip_prefix(context.absolute_package_dir) {
context.error_writer.write(&format!(
"{}: File {} at line {line} contains the path expression \"{}\" which may point outside the directory of that package.",
context.relative_package_dir.display(),
subpath.display(),
if node.kind() != NODE_PATH {
// We're only interested in Path expressions
Success(())
} else if node.children().count() != 0 {
// Filters out ./foo/${bar}/baz
// TODO: We can just check ./foo
NixpkgsProblem::PathInterpolation {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
line,
text,
}
.into()
} else if text.starts_with('<') {
// Filters out search paths like <nixpkgs>
NixpkgsProblem::SearchPath {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
line,
text,
}
.into()
} else {
// Resolves the reference of the Nix path
// turning `../baz` inside `/foo/bar/default.nix` to `/foo/baz`
match parent_dir.join(Path::new(&text)).canonicalize() {
Ok(target) => {
// Then checking if it's still in the package directory
// No need to handle the case of it being inside the directory, since we scan through the
// entire directory recursively anyways
if let Err(_prefix_error) = target.strip_prefix(absolute_package_dir) {
NixpkgsProblem::OutsidePathReference {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
line,
text,
}
.into()
} else {
Success(())
}
}
Err(e) => NixpkgsProblem::UnresolvablePathReference {
relative_package_dir: relative_package_dir.to_path_buf(),
subpath: subpath.to_path_buf(),
line,
text,
))?;
io_error: e,
}
.into(),
}
}
Err(e) => {
context.error_writer.write(&format!(
"{}: File {} at line {line} contains the path expression \"{}\" which cannot be resolved: {e}.",
context.relative_package_dir.display(),
subpath.display(),
text,
))?;
}
};
}
Ok(())
},
)))
}

View File

@ -1,152 +1,170 @@
use crate::nixpkgs_problem::NixpkgsProblem;
use crate::references;
use crate::utils;
use crate::utils::ErrorWriter;
use crate::utils::{BASE_SUBPATH, PACKAGE_NIX_FILENAME};
use crate::validation::{self, ResultIteratorExt, Validation::Success};
use itertools::concat;
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::HashMap;
use std::io;
use std::fs::DirEntry;
use std::path::{Path, PathBuf};
pub const BASE_SUBPATH: &str = "pkgs/by-name";
pub const PACKAGE_NIX_FILENAME: &str = "package.nix";
lazy_static! {
static ref SHARD_NAME_REGEX: Regex = Regex::new(r"^[a-z0-9_-]{1,2}$").unwrap();
static ref PACKAGE_NAME_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9_-]+$").unwrap();
}
/// Contains information about the structure of the pkgs/by-name directory of a Nixpkgs
pub struct Nixpkgs {
/// The path to nixpkgs
pub path: PathBuf,
/// The names of all packages declared in pkgs/by-name
pub package_names: Vec<String>,
// Some utility functions for the basic structure
pub fn shard_for_package(package_name: &str) -> String {
package_name.to_lowercase().chars().take(2).collect()
}
impl Nixpkgs {
// Some utility functions for the basic structure
pub fn shard_for_package(package_name: &str) -> String {
package_name.to_lowercase().chars().take(2).collect()
}
pub fn relative_dir_for_shard(shard_name: &str) -> PathBuf {
PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}"))
}
pub fn relative_dir_for_package(package_name: &str) -> PathBuf {
Nixpkgs::relative_dir_for_shard(&Nixpkgs::shard_for_package(package_name))
.join(package_name)
}
pub fn relative_file_for_package(package_name: &str) -> PathBuf {
Nixpkgs::relative_dir_for_package(package_name).join(PACKAGE_NIX_FILENAME)
}
pub fn relative_dir_for_shard(shard_name: &str) -> PathBuf {
PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}"))
}
impl Nixpkgs {
/// Read the structure of a Nixpkgs directory, displaying errors on the writer.
/// May return early with I/O errors.
pub fn new<W: io::Write>(
path: &Path,
error_writer: &mut ErrorWriter<W>,
) -> anyhow::Result<Nixpkgs> {
let base_dir = path.join(BASE_SUBPATH);
pub fn relative_dir_for_package(package_name: &str) -> PathBuf {
relative_dir_for_shard(&shard_for_package(package_name)).join(package_name)
}
let mut package_names = Vec::new();
pub fn relative_file_for_package(package_name: &str) -> PathBuf {
relative_dir_for_package(package_name).join(PACKAGE_NIX_FILENAME)
}
for shard_entry in utils::read_dir_sorted(&base_dir)? {
/// Check the structure of Nixpkgs, returning the attribute names that are defined in
/// `pkgs/by-name`
pub fn check_structure(path: &Path) -> validation::Result<Vec<String>> {
let base_dir = path.join(BASE_SUBPATH);
let shard_results = utils::read_dir_sorted(&base_dir)?
.into_iter()
.map(|shard_entry| -> validation::Result<_> {
let shard_path = shard_entry.path();
let shard_name = shard_entry.file_name().to_string_lossy().into_owned();
let relative_shard_path = Nixpkgs::relative_dir_for_shard(&shard_name);
let relative_shard_path = relative_dir_for_shard(&shard_name);
if shard_name == "README.md" {
Ok(if shard_name == "README.md" {
// README.md is allowed to be a file and not checked
continue;
}
if !shard_path.is_dir() {
error_writer.write(&format!(
"{}: This is a file, but it should be a directory.",
relative_shard_path.display(),
))?;
Success(vec![])
} else if !shard_path.is_dir() {
NixpkgsProblem::ShardNonDir {
relative_shard_path: relative_shard_path.clone(),
}
.into()
// we can't check for any other errors if it's a file, since there's no subdirectories to check
continue;
}
let shard_name_valid = SHARD_NAME_REGEX.is_match(&shard_name);
if !shard_name_valid {
error_writer.write(&format!(
"{}: Invalid directory name \"{shard_name}\", must be at most 2 ASCII characters consisting of a-z, 0-9, \"-\" or \"_\".",
relative_shard_path.display()
))?;
}
let mut unique_package_names = HashMap::new();
for package_entry in utils::read_dir_sorted(&shard_path)? {
let package_path = package_entry.path();
let package_name = package_entry.file_name().to_string_lossy().into_owned();
let relative_package_dir =
PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}/{package_name}"));
if !package_path.is_dir() {
error_writer.write(&format!(
"{}: This path is a file, but it should be a directory.",
relative_package_dir.display(),
))?;
continue;
}
if let Some(duplicate_package_name) =
unique_package_names.insert(package_name.to_lowercase(), package_name.clone())
{
error_writer.write(&format!(
"{}: Duplicate case-sensitive package directories \"{duplicate_package_name}\" and \"{package_name}\".",
relative_shard_path.display(),
))?;
}
let package_name_valid = PACKAGE_NAME_REGEX.is_match(&package_name);
if !package_name_valid {
error_writer.write(&format!(
"{}: Invalid package directory name \"{package_name}\", must be ASCII characters consisting of a-z, A-Z, 0-9, \"-\" or \"_\".",
relative_package_dir.display(),
))?;
}
let correct_relative_package_dir = Nixpkgs::relative_dir_for_package(&package_name);
if relative_package_dir != correct_relative_package_dir {
// Only show this error if we have a valid shard and package name
// Because if one of those is wrong, you should fix that first
if shard_name_valid && package_name_valid {
error_writer.write(&format!(
"{}: Incorrect directory location, should be {} instead.",
relative_package_dir.display(),
correct_relative_package_dir.display(),
))?;
} else {
let shard_name_valid = SHARD_NAME_REGEX.is_match(&shard_name);
let result = if !shard_name_valid {
NixpkgsProblem::InvalidShardName {
relative_shard_path: relative_shard_path.clone(),
shard_name: shard_name.clone(),
}
}
.into()
} else {
Success(())
};
let package_nix_path = package_path.join(PACKAGE_NIX_FILENAME);
if !package_nix_path.exists() {
error_writer.write(&format!(
"{}: Missing required \"{PACKAGE_NIX_FILENAME}\" file.",
relative_package_dir.display(),
))?;
} else if package_nix_path.is_dir() {
error_writer.write(&format!(
"{}: \"{PACKAGE_NIX_FILENAME}\" must be a file.",
relative_package_dir.display(),
))?;
}
let entries = utils::read_dir_sorted(&shard_path)?;
package_names.push(package_name.clone());
}
}
let duplicate_results = entries
.iter()
.zip(entries.iter().skip(1))
.filter(|(l, r)| {
l.file_name().to_ascii_lowercase() == r.file_name().to_ascii_lowercase()
})
.map(|(l, r)| {
NixpkgsProblem::CaseSensitiveDuplicate {
relative_shard_path: relative_shard_path.clone(),
first: l.file_name(),
second: r.file_name(),
}
.into()
});
Ok(Nixpkgs {
path: path.to_owned(),
package_names,
let result = result.and(validation::sequence_(duplicate_results));
let package_results = entries
.into_iter()
.map(|package_entry| {
check_package(path, &shard_name, shard_name_valid, package_entry)
})
.collect_vec()?;
result.and(validation::sequence(package_results))
})
})
}
.collect_vec()?;
// Combine the package names conatained within each shard into a longer list
Ok(validation::sequence(shard_results).map(concat))
}
fn check_package(
path: &Path,
shard_name: &str,
shard_name_valid: bool,
package_entry: DirEntry,
) -> validation::Result<String> {
let package_path = package_entry.path();
let package_name = package_entry.file_name().to_string_lossy().into_owned();
let relative_package_dir = PathBuf::from(format!("{BASE_SUBPATH}/{shard_name}/{package_name}"));
Ok(if !package_path.is_dir() {
NixpkgsProblem::PackageNonDir {
relative_package_dir: relative_package_dir.clone(),
}
.into()
} else {
let package_name_valid = PACKAGE_NAME_REGEX.is_match(&package_name);
let result = if !package_name_valid {
NixpkgsProblem::InvalidPackageName {
relative_package_dir: relative_package_dir.clone(),
package_name: package_name.clone(),
}
.into()
} else {
Success(())
};
let correct_relative_package_dir = relative_dir_for_package(&package_name);
let result = result.and(if relative_package_dir != correct_relative_package_dir {
// Only show this error if we have a valid shard and package name
// Because if one of those is wrong, you should fix that first
if shard_name_valid && package_name_valid {
NixpkgsProblem::IncorrectShard {
relative_package_dir: relative_package_dir.clone(),
correct_relative_package_dir: correct_relative_package_dir.clone(),
}
.into()
} else {
Success(())
}
} else {
Success(())
});
let package_nix_path = package_path.join(PACKAGE_NIX_FILENAME);
let result = result.and(if !package_nix_path.exists() {
NixpkgsProblem::PackageNixNonExistent {
relative_package_dir: relative_package_dir.clone(),
}
.into()
} else if package_nix_path.is_dir() {
NixpkgsProblem::PackageNixDir {
relative_package_dir: relative_package_dir.clone(),
}
.into()
} else {
Success(())
});
let result = result.and(references::check_references(
&relative_package_dir,
&path.join(&relative_package_dir),
)?);
result.map(|_| package_name.clone())
})
}

View File

@ -1,9 +1,11 @@
use anyhow::Context;
use colored::Colorize;
use std::fs;
use std::io;
use std::path::Path;
pub const BASE_SUBPATH: &str = "pkgs/by-name";
pub const PACKAGE_NIX_FILENAME: &str = "package.nix";
/// Deterministic file listing so that tests are reproducible
pub fn read_dir_sorted(base_dir: &Path) -> anyhow::Result<Vec<fs::DirEntry>> {
let listing = base_dir
@ -47,26 +49,3 @@ impl LineIndex {
}
}
}
/// A small wrapper around a generic io::Write specifically for errors:
/// - Print everything in red to signal it's an error
/// - Keep track of whether anything was printed at all, so that
/// it can be queried whether any errors were encountered at all
pub struct ErrorWriter<W> {
pub writer: W,
pub empty: bool,
}
impl<W: io::Write> ErrorWriter<W> {
pub fn new(writer: W) -> ErrorWriter<W> {
ErrorWriter {
writer,
empty: true,
}
}
pub fn write(&mut self, string: &str) -> io::Result<()> {
self.empty = false;
writeln!(self.writer, "{}", string.red())
}
}

View File

@ -0,0 +1,102 @@
use crate::nixpkgs_problem::NixpkgsProblem;
use itertools::concat;
use itertools::{
Either::{Left, Right},
Itertools,
};
use Validation::*;
/// The validation result of a check.
/// Instead of exiting at the first failure,
/// this type can accumulate multiple failures.
/// This can be achieved using the functions `and`, `sequence` and `sequence_`
///
/// This leans on https://hackage.haskell.org/package/validation
pub enum Validation<A> {
Failure(Vec<NixpkgsProblem>),
Success(A),
}
impl<A> From<NixpkgsProblem> for Validation<A> {
/// Create a `Validation<A>` from a single check problem
fn from(value: NixpkgsProblem) -> Self {
Failure(vec![value])
}
}
/// A type alias representing the result of a check, either:
/// - Err(anyhow::Error): A fatal failure, typically I/O errors.
/// Such failures are not caused by the files in Nixpkgs.
/// This hints at a bug in the code or a problem with the deployment.
/// - Ok(Failure(Vec<NixpkgsProblem>)): A non-fatal validation problem with the Nixpkgs files.
/// Further checks can be run even with this result type.
/// Such problems can be fixed by changing the Nixpkgs files.
/// - Ok(Success(A)): A successful (potentially intermediate) result with an arbitrary value.
/// No fatal errors have occurred and no validation problems have been found with Nixpkgs.
pub type Result<A> = anyhow::Result<Validation<A>>;
pub trait ResultIteratorExt<A, E>: Sized + Iterator<Item = std::result::Result<A, E>> {
fn collect_vec(self) -> std::result::Result<Vec<A>, E>;
}
impl<I, A, E> ResultIteratorExt<A, E> for I
where
I: Sized + Iterator<Item = std::result::Result<A, E>>,
{
/// A convenience version of `collect` specialised to a vector
fn collect_vec(self) -> std::result::Result<Vec<A>, E> {
self.collect()
}
}
impl<A> Validation<A> {
/// Map a `Validation<A>` to a `Validation<B>` by applying a function to the
/// potentially contained value in case of success.
pub fn map<B>(self, f: impl FnOnce(A) -> B) -> Validation<B> {
match self {
Failure(err) => Failure(err),
Success(value) => Success(f(value)),
}
}
}
impl Validation<()> {
/// Combine two validations, both of which need to be successful for the return value to be successful.
/// The `NixpkgsProblem`s of both sides are returned concatenated.
pub fn and<A>(self, other: Validation<A>) -> Validation<A> {
match (self, other) {
(Success(_), Success(right_value)) => Success(right_value),
(Failure(errors), Success(_)) => Failure(errors),
(Success(_), Failure(errors)) => Failure(errors),
(Failure(errors_l), Failure(errors_r)) => Failure(concat([errors_l, errors_r])),
}
}
}
/// Combine many validations into a single one.
/// All given validations need to be successful in order for the returned validation to be successful,
/// in which case the returned validation value contains a `Vec` of each individual value.
/// Otherwise the `NixpkgsProblem`s of all validations are returned concatenated.
pub fn sequence<A>(check_results: impl IntoIterator<Item = Validation<A>>) -> Validation<Vec<A>> {
let (errors, values): (Vec<Vec<NixpkgsProblem>>, Vec<A>) = check_results
.into_iter()
.partition_map(|validation| match validation {
Failure(err) => Left(err),
Success(value) => Right(value),
});
// To combine the errors from the results we flatten all the error Vec's into a new Vec
// This is not very efficient, but doesn't matter because generally we should have no errors
let flattened_errors = errors.into_iter().concat();
if flattened_errors.is_empty() {
Success(values)
} else {
Failure(flattened_errors)
}
}
/// Like `sequence`, but without any containing value, for convenience
pub fn sequence_(validations: impl IntoIterator<Item = Validation<()>>) -> Validation<()> {
sequence(validations).map(|_| ())
}