Unverified Commit f4cf33c3 authored by Fabian Affolter's avatar Fabian Affolter Committed by GitHub
Browse files

python3Packages.llama-cloud: 0.1.45 -> 1.6.0; upgrade dependents; drop...

python3Packages.llama-cloud: 0.1.45 -> 1.6.0; upgrade dependents; drop deprecated packages (#499644)
parents 25820c5a cf18f04c
Loading
Loading
Loading
Loading
+0 −64
Original line number Diff line number Diff line
{
  lib,
  buildPythonPackage,
  click,
  eval-type-backport,
  fetchFromGitHub,
  gitUpdater,
  llama-cloud,
  llama-index-core,
  platformdirs,
  hatchling,
  pydantic,
  python-dotenv,
}:

buildPythonPackage (finalAttrs: {
  pname = "llama-cloud-services";
  version = "0.6.94";
  pyproject = true;

  src = fetchFromGitHub {
    owner = "run-llama";
    repo = "llama_cloud_services";
    tag = "llama-cloud-services-py%40${finalAttrs.version}";
    hash = "sha256-BjwXdv7ekehYGGnKk0ElVlxmGkmtam9RLECgxfM7lYc=";
  };

  sourceRoot = "${finalAttrs.src.name}/py";

  pythonRelaxDeps = [ "llama-cloud" ];

  build-system = [ hatchling ];

  dependencies = [
    click
    eval-type-backport
    llama-cloud
    llama-index-core
    platformdirs
    pydantic
    python-dotenv
  ];

  # Missing dependency autoevals
  doCheck = false;

  pythonImportsCheck = [ "llama_cloud_services" ];

  # update script sets wrong version
  passthru = {
    skipBulkUpdate = true;
    updateScript = gitUpdater {
      rev-prefix = "llama-cloud-services-py@";
    };
  };

  meta = {
    description = "Knowledge Agents and Management in the Cloud";
    homepage = "https://github.com/run-llama/llama_cloud_services";
    changelog = "https://github.com/run-llama/llama_cloud_services/releases/tag/${finalAttrs.src.tag}";
    license = lib.licenses.mit;
    maintainers = with lib.maintainers; [ fab ];
  };
})
+45 −9
Original line number Diff line number Diff line
@@ -2,31 +2,67 @@
  lib,
  buildPythonPackage,
  fetchPypi,
  hatchling,
  hatch-fancy-pypi-readme,
  pythonOlder,
  pythonAtLeast,

  # Dependencies
  httpx,
  poetry-core,
  pydantic,
  anyio,
  distro,
  sniffio,

  # Test dependencies
  pytestCheckHook,
  pytest-asyncio,
  pytest-xdist,
  dirty-equals,
  respx,
  llama-index-core,
}:

buildPythonPackage rec {
buildPythonPackage (finalAttrs: {
  pname = "llama-cloud";
  version = "0.1.45";
  version = "1.6.0";
  pyproject = true;

  src = fetchPypi {
    pname = "llama_cloud";
    inherit version;
    hash = "sha256-FAJEAIzFcQ4xrpfGBDlzo6mWmlGw84FV+jOoQ0B46Ko=";
    inherit (finalAttrs) version;
    hash = "sha256-sAx133a1m+zKcvJix1WllSnwwJ8M2nnghu7e/GLVmsg=";
  };

  build-system = [ poetry-core ];
  postPatch = ''
    substituteInPlace pyproject.toml \
      --replace-fail "hatchling==1.26.3" "hatchling>=1.26.3"
  '';

  build-system = [
    hatchling
    hatch-fancy-pypi-readme
  ];

  dependencies = [
    httpx
    pydantic
    distro
    sniffio
    anyio
  ];

  # Module has no tests
  doCheck = false;
  nativeCheckInputs = [
    pytestCheckHook
    pytest-asyncio
    pytest-xdist
    dirty-equals
    respx
  ]
  ++ lib.optional (pythonOlder "3.14") llama-index-core;

  # Transitively requires google-pasta (broken on 3.14) through llama-index-core
  disabledTestPaths = lib.optional (pythonAtLeast "3.14") "tests/test_index.py";

  pythonImportsCheck = [ "llama_cloud" ];

@@ -36,4 +72,4 @@ buildPythonPackage rec {
    license = lib.licenses.mit;
    maintainers = with lib.maintainers; [ fab ];
  };
}
})
+2 −4
Original line number Diff line number Diff line
@@ -6,18 +6,17 @@
  llama-index-core,
  llama-index-embeddings-openai,
  llama-index-llms-openai,
  llama-index-vector-stores-chroma,
}:

buildPythonPackage (finalAttrs: {
  pname = "llama-index-cli";
  version = "0.5.5";
  version = "0.5.6";
  pyproject = true;

  src = fetchPypi {
    pname = "llama_index_cli";
    inherit (finalAttrs) version;
    hash = "sha256-ot5aIvZ19gkIyM0f2HPxMs8r/fNGL6ee9fvmuVcnows=";
    hash = "sha256-ThTQcv6/Ym0F+CHQSoWN6N2cx8mDdmWKCrmEifWmvPc=";
  };

  build-system = [ hatchling ];
@@ -26,7 +25,6 @@ buildPythonPackage (finalAttrs: {
    llama-index-core
    llama-index-embeddings-openai
    llama-index-llms-openai
    llama-index-vector-stores-chroma
  ];

  # Tests are only available in the mono repo
+11 −7
Original line number Diff line number Diff line
@@ -31,24 +31,25 @@
  spacy,
  sqlalchemy,
  tenacity,
  tinytag,
  tiktoken,
  tree-sitter,
  typing-inspect,
}:

buildPythonPackage rec {
buildPythonPackage (finalAttrs: {
  pname = "llama-index-core";
  version = "0.14.12";
  version = "0.14.19";
  pyproject = true;

  src = fetchFromGitHub {
    owner = "run-llama";
    repo = "llama_index";
    tag = "v${version}";
    hash = "sha256-grF9IToAMc3x5/40+u3lHU9RyjROWu1e3M6N1owq0f4=";
    tag = "v${finalAttrs.version}";
    hash = "sha256-xcssJPBXq3bjSD13nsR6jRTmTWPVks8aKHZCZ3lSKY4=";
  };

  sourceRoot = "${src.name}/${pname}";
  sourceRoot = "${finalAttrs.src.name}/${finalAttrs.pname}";

  # When `llama-index` is imported, it uses `nltk` to look for the following files and tries to
  # download them if they aren't present.
@@ -96,6 +97,7 @@ buildPythonPackage rec {
    spacy
    sqlalchemy
    tenacity
    tinytag
    tiktoken
    typing-inspect
  ];
@@ -133,6 +135,8 @@ buildPythonPackage rec {
    "tests/tools/"
    "tests/schema/"
    "tests/multi_modal_llms/"
    "tests/prompts/"
    "tests/base/llms/"
  ];

  disabledTests = [
@@ -160,8 +164,8 @@ buildPythonPackage rec {
  meta = {
    description = "Data framework for your LLM applications";
    homepage = "https://github.com/run-llama/llama_index/";
    changelog = "https://github.com/run-llama/llama_index/blob/${src.tag}/CHANGELOG.md";
    changelog = "https://github.com/run-llama/llama_index/blob/${finalAttrs.src.tag}/CHANGELOG.md";
    license = lib.licenses.mit;
    maintainers = with lib.maintainers; [ fab ];
  };
}
})
+10 −6
Original line number Diff line number Diff line
@@ -3,23 +3,27 @@
  buildPythonPackage,
  fetchPypi,
  llama-index-core,
  openai,
  hatchling,
}:

buildPythonPackage rec {
buildPythonPackage (finalAttrs: {
  pname = "llama-index-embeddings-openai";
  version = "0.5.2";
  version = "0.6.0";
  pyproject = true;

  src = fetchPypi {
    pname = "llama_index_embeddings_openai";
    inherit version;
    hash = "sha256-CRvQw+kYJ0jogn3n15cTohnV9eDcl9G7eycc9SRSDks=";
    inherit (finalAttrs) version;
    hash = "sha256-6z5mBr6By4kSUHPiPJfAphGdq7SCetvRRpfCAprXPyk=";
  };

  build-system = [ hatchling ];

  dependencies = [ llama-index-core ];
  dependencies = [
    llama-index-core
    openai
  ];

  # Tests are only available in the mono repo
  doCheck = false;
@@ -32,4 +36,4 @@ buildPythonPackage rec {
    license = lib.licenses.mit;
    maintainers = with lib.maintainers; [ fab ];
  };
}
})
Loading