Commit 4261ff37 authored by Hugo Herter's avatar Hugo Herter Committed by Gaetan Lepage
Browse files

python3Packages.unsloth-zoo: 2026.3.4 -> 2026.4.2

https://pypi.org/project/unsloth-zoo/2026.4.2/

Update the declared runtime dependencies to match the new upstream
metadata, narrow the relaxed dependency bounds to the ones still needed
in nixpkgs, keep only the patch hunk that removes the hard runtime
requirement on unsloth, correct the package license metadata to
LGPL-3.0-or-later, disable the import check because importing
unsloth_zoo now touches torch.cuda and queries device memory at import
time, and add a GPU-only passthru smoke test for that import path.
parent a424a303
Loading
Loading
Loading
Loading
+49 −10
Original line number Diff line number Diff line
@@ -11,30 +11,42 @@
  accelerate,
  cut-cross-entropy,
  datasets,
  filelock,
  hf-transfer,
  huggingface-hub,
  msgspec,
  numpy,
  packaging,
  peft,
  pillow,
  protobuf,
  psutil,
  regex,
  sentencepiece,
  torch,
  torchao,
  triton,
  tqdm,
  transformers,
  trl,
  tyro,
  typing-extensions,

  # tests
  cudaPackages,
  python,
}:

buildPythonPackage (finalAttrs: {
  pname = "unsloth-zoo";
  version = "2026.3.4";
  version = "2026.4.2";
  pyproject = true;

  # no tags on GitHub
  src = fetchPypi {
    pname = "unsloth_zoo";
    inherit (finalAttrs) version;
    hash = "sha256-24w8UV5cLG5XWrU73xfmg+Jk32zl+QSPdqXLzMtmP1E=";
    hash = "sha256-l0OTaZjPrNnrxVYIfZcf6pYr1tJS9EGj+iguU6S+D28=";
  };

  postPatch = ''
@@ -47,14 +59,11 @@ buildPythonPackage (finalAttrs: {
        "setuptools-scm"
  '';

  # pyproject.toml requires an obsolete version of protobuf,
  # but it is not used.
  # Upstream issue: https://github.com/unslothai/unsloth-zoo/pull/68
  # Upstream constrains datasets/torch more tightly than the versions
  # currently shipped in nixpkgs, but the package still builds and works with
  # the newer dependency set here.
  pythonRelaxDeps = [
    "datasets"
    "protobuf"
    "transformers"
    "trl"
    "torch"
  ];

@@ -72,29 +81,59 @@ buildPythonPackage (finalAttrs: {
    accelerate
    cut-cross-entropy
    datasets
    filelock
    hf-transfer
    huggingface-hub
    msgspec
    numpy
    packaging
    peft
    pillow
    protobuf
    psutil
    regex
    sentencepiece
    torch
    torchao
    triton
    tqdm
    transformers
    trl
    tyro
    typing-extensions
  ];

  # No tests
  doCheck = false;

  pythonImportsCheck = [ "unsloth_zoo" ];
  # Importing touches torch.cuda at module import time and queries GPU memory.
  dontUsePythonImportsCheck = true;

  # Cover the import path on GPU-enabled runners instead of pure builders.
  passthru.gpuCheck =
    (cudaPackages.writeGpuTestPython.override { python3Packages = python.pkgs; }
      {
        libraries = ps: [ ps.unsloth-zoo ];
      }
      ''
        import torch

        assert torch.cuda.is_available(), "CUDA is not available"
        assert torch.ones(1, device="cuda").is_cuda

        import unsloth_zoo  # noqa: F401
        from unsloth_zoo.device_type import DEVICE_COUNT, DEVICE_TYPE

        assert DEVICE_TYPE == "cuda", DEVICE_TYPE
        assert DEVICE_COUNT > 0, DEVICE_COUNT
        print(f"Unsloth Zoo detected {DEVICE_COUNT} CUDA device(s)")
      ''
    ).gpuCheck;

  meta = {
    description = "Utils for Unsloth";
    homepage = "https://github.com/unslothai/unsloth_zoo";
    license = lib.licenses.mit;
    license = lib.licenses.lgpl3Plus;
    maintainers = with lib.maintainers; [ hoh ];
  };
})
+0 −14
Original line number Diff line number Diff line
@@ -21,17 +21,3 @@ index 8c4404c..dc1666f 100644
 try:
     print("🦥 Unsloth: Will patch your computer to enable 2x faster free finetuning.")
 except:
diff --git a/unsloth_zoo/device_type.py b/unsloth_zoo/device_type.py
index 11136fb..8f8dafc 100644
--- a/unsloth_zoo/device_type.py
+++ b/unsloth_zoo/device_type.py
@@ -209,6 +209,9 @@ def get_device_type():
         return "cuda"
     elif hasattr(torch, "xpu") and torch.xpu.is_available():
         return "xpu"
+    else:
+        # Allow import during tests
+        return None
     # Check torch.accelerator
     if hasattr(torch, "accelerator"):
         if not torch.accelerator.is_available():