Unverified Commit 2d9bfd24 authored by github-actions[bot]'s avatar github-actions[bot] Committed by GitHub
Browse files

Merge master into haskell-updates

parents ea0284a3 c96a78b5
Loading
Loading
Loading
Loading
+90 −1
Original line number Diff line number Diff line
@@ -21,6 +21,16 @@ jobs:
        with:
          # pull_request_target checks out the base branch by default
          ref: refs/pull/${{ github.event.pull_request.number }}/merge
          # Fetches the merge commit and its parents
          fetch-depth: 2
      - name: Determining PR git hashes
        run: |
          echo "mergedSha=$(git rev-parse HEAD)" >> "$GITHUB_ENV"

          # For pull_request_target this is the same as $GITHUB_SHA
          echo "baseSha=$(git rev-parse HEAD^1)" >> "$GITHUB_ENV"

          echo "headSha=$(git rev-parse HEAD^2)" >> "$GITHUB_ENV"
      - uses: cachix/install-nix-action@v23
      - name: Determining channel to use for dependencies
        run: |
@@ -51,4 +61,83 @@ jobs:
          # Passing --max-jobs 0 makes sure that we won't build anything
          nix-build "$nixpkgs" -A tests.nixpkgs-check-by-name --max-jobs 0
      - name: Running nixpkgs-check-by-name
        run: result/bin/nixpkgs-check-by-name .
        run: |
          echo "Checking whether the check succeeds on the base branch $GITHUB_BASE_REF"
          git checkout -q "$baseSha"
          if baseOutput=$(result/bin/nixpkgs-check-by-name . 2>&1); then
            baseSuccess=1
          else
            baseSuccess=
          fi
          printf "%s\n" "$baseOutput"

          echo "Checking whether the check would succeed after merging this pull request"
          git checkout -q "$mergedSha"
          if mergedOutput=$(result/bin/nixpkgs-check-by-name . 2>&1); then
            mergedSuccess=1
            exitCode=0
          else
            mergedSuccess=
            exitCode=1
          fi
          printf "%s\n" "$mergedOutput"

          resultToEmoji() {
            if [[ -n "$1" ]]; then
              echo ":heavy_check_mark:"
            else
              echo ":x:"
            fi
          }

          # Print a markdown summary in GitHub actions
          {
            echo "| Nixpkgs version | Check result |"
            echo "| --- | --- |"
            echo "| Latest base commit | $(resultToEmoji "$baseSuccess") |"
            echo "| After merging this PR | $(resultToEmoji "$mergedSuccess") |"
            echo ""

            if [[ -n "$baseSuccess" ]]; then
              if [[ -n "$mergedSuccess" ]]; then
                echo "The check succeeds on both the base branch and after merging this PR"
              else
                echo "The check succeeds on the base branch, but would fail after merging this PR:"
                echo "\`\`\`"
                echo "$mergedOutput"
                echo "\`\`\`"
                echo ""
              fi
            else
              if [[ -n "$mergedSuccess" ]]; then
                echo "The check fails on the base branch, but this PR fixes it, nicely done!"
              else
                echo "The check fails on both the base branch and after merging this PR, unknown if only this PRs changes would satisfy the check, the base branch needs to be fixed first."
                echo ""
                echo "Failure on the base branch:"
                echo "\`\`\`"
                echo "$baseOutput"
                echo "\`\`\`"
                echo ""
                echo "Failure after merging this PR:"
                echo "\`\`\`"
                echo "$mergedOutput"
                echo "\`\`\`"
                echo ""
              fi
            fi

            echo "### Details"
            echo "- nixpkgs-check-by-name tool:"
            echo "  - Channel: $channel"
            echo "  - Nixpkgs commit: [$rev](https://github.com/${GITHUB_REPOSITORY}/commit/$rev)"
            echo "  - Store path: \`$(realpath result)\`"
            echo "- Tested Nixpkgs:"
            echo "  - Base branch $GITHUB_BASE_REF"
            echo "  - Latest base branch commit: [$baseSha](https://github.com/${GITHUB_REPOSITORY}/commit/$baseSha)"
            echo "  - Latest PR commit: [$headSha](https://github.com/${GITHUB_REPOSITORY}/commit/$headSha)"
            echo "  - Merge commit: [$mergedSha](https://github.com/${GITHUB_REPOSITORY}/commit/$mergedSha)"
          } >> "$GITHUB_STEP_SUMMARY"

          exit "$exitCode"
+6 −5
Original line number Diff line number Diff line
@@ -212,9 +212,9 @@ Note: this is not possible anymore for Neovim.

## Adding new plugins to nixpkgs {#adding-new-plugins-to-nixpkgs}

Nix expressions for Vim plugins are stored in [pkgs/applications/editors/vim/plugins](https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/editors/vim/plugins). For the vast majority of plugins, Nix expressions are automatically generated by running [`./update.py`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/update.py). This creates a [generated.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/generated.nix) file based on the plugins listed in [vim-plugin-names](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/vim-plugin-names).
Nix expressions for Vim plugins are stored in [pkgs/applications/editors/vim/plugins](https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/editors/vim/plugins). For the vast majority of plugins, Nix expressions are automatically generated by running [`nix-shell -p vimPluginsUpdater --run vim-plugins-updater`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/updater.nix). This creates a [generated.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/generated.nix) file based on the plugins listed in [vim-plugin-names](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/vim-plugin-names).

After running `./update.py`, if nvim-treesitter received an update, also run [`nvim-treesitter/update.py`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/update.py) to update the tree sitter grammars for `nvim-treesitter`.
After running the updater, if nvim-treesitter received an update, also run [`nvim-treesitter/update.py`](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/update.py) to update the tree sitter grammars for `nvim-treesitter`.

Some plugins require overrides in order to function properly. Overrides are placed in [overrides.nix](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/editors/vim/plugins/overrides.nix). Overrides are most often required when a plugin requires some dependencies, or extra steps are required during the build process. For example `deoplete-fish` requires both `deoplete-nvim` and `vim-fish`, and so the following override was added:

@@ -241,7 +241,8 @@ GITHUB_API_TOKEN=my_token ./pkgs/applications/editors/vim/plugins/update.py
Alternatively, set the number of processes to a lower count to avoid rate-limiting.

```sh
./pkgs/applications/editors/vim/plugins/update.py --proc 1

nix-shell -p vimPluginsUpdater --run 'vim-plugins-updater --proc 1'
```

## How to maintain an out-of-tree overlay of vim plugins ? {#vim-out-of-tree-overlays}
@@ -250,7 +251,7 @@ You can use the updater script to generate basic packages out of a custom vim
plugin list:

```
pkgs/applications/editors/vim/plugins/update.py -i vim-plugin-names -o generated.nix --no-commit
nix-shell -p vimPluginsUpdater --run vim-plugins-updater -i vim-plugin-names -o generated.nix --no-commit
```

with the contents of `vim-plugin-names` being for example:
@@ -264,7 +265,7 @@ You can then reference the generated vim plugins via:

```nix
myVimPlugins = pkgs.vimPlugins.extend (
  (pkgs.callPackage generated.nix {})
  (pkgs.callPackage ./generated.nix {})
);
```
+33 −1
Original line number Diff line number Diff line
# File set library

This is the internal contributor documentation.
The user documentation is [in the Nixpkgs manual](https://nixos.org/manual/nixpkgs/unstable/#sec-fileset).

## Goals

The main goal of the file set library is to be able to select local files that should be added to the Nix store.
It should have the following properties:
- Easy:
@@ -41,9 +46,16 @@ An attribute set with these values:
- `_type` (constant string `"fileset"`):
  Tag to indicate this value is a file set.

- `_internalVersion` (constant `2`, the current version):
- `_internalVersion` (constant `3`, the current version):
  Version of the representation.

- `_internalIsEmptyWithoutBase` (bool):
  Whether this file set is the empty file set without a base path.
  If `true`, `_internalBase*` and `_internalTree` are not set.
  This is the only way to represent an empty file set without needing a base path.

  Such a value can be used as the identity element for `union` and the return value of `unions []` and co.

- `_internalBase` (path):
  Any files outside of this path cannot influence the set of files.
  This is always a directory.
@@ -111,6 +123,26 @@ Arguments:
- (+) This can be removed later, if we discover it's too restrictive
- (-) It leads to errors when a sensible result could sometimes be returned, such as in the above example.

### Empty file set without a base

There is a special representation for an empty file set without a base path.
This is used for return values that should be empty but when there's no base path that would makes sense.

Arguments:
- Alternative: This could also be represented using `_internalBase = /.` and `_internalTree = null`.
  - (+) Removes the need for a special representation.
  - (-) Due to [influence tracking](#influence-tracking),
    `union empty ./.` would have `/.` as the base path,
    which would then prevent `toSource { root = ./.; fileset = union empty ./.; }` from working,
    which is not as one would expect.
  - (-) With the assumption that there can be multiple filesystem roots (as established with the [path library](../path/README.md)),
    this would have to cause an error with `union empty pathWithAnotherFilesystemRoot`,
    which is not as one would expect.
- Alternative: Do not have such a value and error when it would be needed as a return value
  - (+) Removes the need for a special representation.
  - (-) Leaves us with no identity element for `union` and no reasonable return value for `unions []`.
    From a set theory perspective, which has a well-known notion of empty sets, this is unintuitive.

### Empty directories

File sets can only represent a _set_ of local files, directories on their own are not representable.
+2 −6
Original line number Diff line number Diff line
@@ -156,7 +156,7 @@ If a directory does not recursively contain any file, it is omitted from the sto
          lib.fileset.toSource: `root` is of type ${typeOf root}, but it should be a path instead.''
    # Currently all Nix paths have the same filesystem root, but this could change in the future.
    # See also ../path/README.md
    else if rootFilesystemRoot != filesetFilesystemRoot then
    else if ! fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
      throw ''
        lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` ("${toString root}"):
            `root`: root "${toString rootFilesystemRoot}"
@@ -170,7 +170,7 @@ If a directory does not recursively contain any file, it is omitted from the sto
        lib.fileset.toSource: `root` (${toString root}) is a file, but it should be a directory instead. Potential solutions:
            - If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
            - If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as ${toString (dirOf root)}, and set `fileset` to the file path.''
    else if ! hasPrefix root fileset._internalBase then
    else if ! fileset._internalIsEmptyWithoutBase && ! hasPrefix root fileset._internalBase then
      throw ''
        lib.fileset.toSource: `fileset` could contain files in ${toString fileset._internalBase}, which is not under the `root` (${toString root}). Potential solutions:
            - Set `root` to ${toString fileset._internalBase} or any directory higher up. This changes the layout of the resulting store path.
@@ -258,15 +258,11 @@ If a directory does not recursively contain any file, it is omitted from the sto
  */
  unions =
    # A list of file sets.
    # Must contain at least 1 element.
    # The elements can also be paths,
    # which get [implicitly coerced to file sets](#sec-fileset-path-coercion).
    filesets:
    if ! isList filesets then
      throw "lib.fileset.unions: Expected argument to be a list, but got a ${typeOf filesets}."
    else if filesets == [ ] then
      # TODO: This could be supported, but requires an extra internal representation for the empty file set, which would be special for not having a base path.
      throw "lib.fileset.unions: Expected argument to be a list with at least one element, but it contains no elements."
    else
      pipe filesets [
        # Annotate the elements with context, used by _coerceMany for better errors
+56 −13
Original line number Diff line number Diff line
@@ -28,6 +28,7 @@ let
    drop
    elemAt
    filter
    findFirst
    findFirstIndex
    foldl'
    head
@@ -64,7 +65,7 @@ rec {
  # - Increment this version
  # - Add an additional migration function below
  # - Update the description of the internal representation in ./README.md
  _currentVersion = 2;
  _currentVersion = 3;

  # Migrations between versions. The 0th element converts from v0 to v1, and so on
  migrations = [
@@ -89,8 +90,34 @@ rec {
        _internalVersion = 2;
      }
    )

    # Convert v2 into v3: filesetTree's now have a representation for an empty file set without a base path
    (
      filesetV2:
      filesetV2 // {
        # All v1 file sets are not the new empty file set
        _internalIsEmptyWithoutBase = false;
        _internalVersion = 3;
      }
    )
  ];

  _noEvalMessage = ''
    lib.fileset: Directly evaluating a file set is not supported. Use `lib.fileset.toSource` to turn it into a usable source instead.'';

  # The empty file set without a base path
  _emptyWithoutBase = {
    _type = "fileset";

    _internalVersion = _currentVersion;

    # The one and only!
    _internalIsEmptyWithoutBase = true;

    # Double __ to make it be evaluated and ordered first
    __noEval = throw _noEvalMessage;
  };

  # Create a fileset, see ./README.md#fileset
  # Type: path -> filesetTree -> fileset
  _create = base: tree:
@@ -103,14 +130,15 @@ rec {
      _type = "fileset";

      _internalVersion = _currentVersion;

      _internalIsEmptyWithoutBase = false;
      _internalBase = base;
      _internalBaseRoot = parts.root;
      _internalBaseComponents = components parts.subpath;
      _internalTree = tree;

      # Double __ to make it be evaluated and ordered first
      __noEval = throw ''
        lib.fileset: Directly evaluating a file set is not supported. Use `lib.fileset.toSource` to turn it into a usable source instead.'';
      __noEval = throw _noEvalMessage;
    };

  # Coerce a value to a fileset, erroring when the value cannot be coerced.
@@ -155,14 +183,20 @@ rec {
        _coerce "${functionContext}: ${context}" value
      ) list;

      firstBaseRoot = (head filesets)._internalBaseRoot;
      # Find the first value with a base, there may be none!
      firstWithBase = findFirst (fileset: ! fileset._internalIsEmptyWithoutBase) null filesets;
      # This value is only accessed if first != null
      firstBaseRoot = firstWithBase._internalBaseRoot;

      # Finds the first element with a filesystem root different than the first element, if any
      differentIndex = findFirstIndex (fileset:
        firstBaseRoot != fileset._internalBaseRoot
        # The empty value without a base doesn't have a base path
        ! fileset._internalIsEmptyWithoutBase
        && firstBaseRoot != fileset._internalBaseRoot
      ) null filesets;
    in
    if differentIndex != null then
    # Only evaluates `differentIndex` if there are any elements with a base
    if firstWithBase != null && differentIndex != null then
      throw ''
        ${functionContext}: Filesystem roots are not the same:
            ${(head list).context}: root "${toString firstBaseRoot}"
@@ -311,7 +345,7 @@ rec {
    # Special case because the code below assumes that the _internalBase is always included in the result
    # which shouldn't be done when we have no files at all in the base
    # This also forces the tree before returning the filter, leads to earlier error messages
    if tree == null then
    if fileset._internalIsEmptyWithoutBase || tree == null then
      empty
    else
      nonEmpty;
@@ -321,7 +355,12 @@ rec {
  # Type: [ Fileset ] -> Fileset
  _unionMany = filesets:
    let
      first = head filesets;
      # All filesets that have a base, aka not the ones that are the empty value without a base
      filesetsWithBase = filter (fileset: ! fileset._internalIsEmptyWithoutBase) filesets;

      # The first fileset that has a base.
      # This value is only accessed if there are at all.
      firstWithBase = head filesetsWithBase;

      # To be able to union filesetTree's together, they need to have the same base path.
      # Base paths can be unioned by taking their common prefix,
@@ -332,14 +371,14 @@ rec {
      # so this cannot cause a stack overflow due to a build-up of unevaluated thunks.
      commonBaseComponents = foldl'
        (components: el: commonPrefix components el._internalBaseComponents)
        first._internalBaseComponents
        firstWithBase._internalBaseComponents
        # We could also not do the `tail` here to avoid a list allocation,
        # but then we'd have to pay for a potentially expensive
        # but unnecessary `commonPrefix` call
        (tail filesets);
        (tail filesetsWithBase);

      # The common base path assembled from a filesystem root and the common components
      commonBase = append first._internalBaseRoot (join commonBaseComponents);
      commonBase = append firstWithBase._internalBaseRoot (join commonBaseComponents);

      # A list of filesetTree's that all have the same base path
      # This is achieved by nesting the trees into the components they have over the common base path
@@ -351,13 +390,17 @@ rec {
        setAttrByPath
          (drop (length commonBaseComponents) fileset._internalBaseComponents)
          fileset._internalTree
        ) filesets;
        ) filesetsWithBase;

      # Folds all trees together into a single one using _unionTree
      # We do not use a fold here because it would cause a thunk build-up
      # which could cause a stack overflow for a large number of trees
      resultTree = _unionTrees trees;
    in
    # If there's no values with a base, we have no files
    if filesetsWithBase == [ ] then
      _emptyWithoutBase
    else
      _create commonBase resultTree;

  # The union of multiple filesetTree's with the same base path.
Loading