Loading pkgs/development/python-modules/sqlframe/default.nix 0 → 100644 +79 −0 Original line number Diff line number Diff line { lib, buildPythonPackage, fetchFromGitHub, # build-system setuptools-scm, # dependencies prettytable, sqlglot, typing-extensions, # tests databricks-sql-connector, duckdb, findspark, google-cloud-bigquery, pyspark, pytest-postgresql, pytest-xdist, pytestCheckHook, }: buildPythonPackage rec { pname = "sqlframe"; version = "3.31.2"; pyproject = true; src = fetchFromGitHub { owner = "eakmanrq"; repo = "sqlframe"; tag = "v${version}"; hash = "sha256-X7KZSbq9KKUE4lXZIHj++koJmjWYI1sMhA6LfClr9pU="; }; build-system = [ setuptools-scm ]; dependencies = [ prettytable sqlglot typing-extensions ]; pythonImportsCheck = [ "sqlframe" ]; nativeCheckInputs = [ databricks-sql-connector duckdb findspark google-cloud-bigquery pyspark pytest-postgresql pytest-xdist pytestCheckHook ]; disabledTests = [ # Requires google-cloud credentials # google.auth.exceptions.DefaultCredentialsErro "test_activate_bigquery_default_dataset" ]; disabledTestPaths = [ # duckdb.duckdb.CatalogException: Catalog Error: Table Function with name "dsdgen" is not in the catalog, but it exists in the tpcds extension. # "tests/integration/test_int_dataframe.py" "tests/integration/" ]; meta = { description = "Turning PySpark Into a Universal DataFrame API"; homepage = "https://github.com/eakmanrq/sqlframe"; changelog = "https://github.com/eakmanrq/sqlframe/releases/tag/v${version}"; license = lib.licenses.mit; maintainers = with lib.maintainers; [ GaetanLepage ]; }; } pkgs/top-level/python-packages.nix +2 −0 Original line number Diff line number Diff line Loading @@ -16485,6 +16485,8 @@ self: super: with self; { sqlfmt = callPackage ../development/python-modules/sqlfmt { }; sqlframe = callPackage ../development/python-modules/sqlframe { }; sqlglot = callPackage ../development/python-modules/sqlglot { }; sqlite-anyio = callPackage ../development/python-modules/sqlite-anyio { }; Loading
pkgs/development/python-modules/sqlframe/default.nix 0 → 100644 +79 −0 Original line number Diff line number Diff line { lib, buildPythonPackage, fetchFromGitHub, # build-system setuptools-scm, # dependencies prettytable, sqlglot, typing-extensions, # tests databricks-sql-connector, duckdb, findspark, google-cloud-bigquery, pyspark, pytest-postgresql, pytest-xdist, pytestCheckHook, }: buildPythonPackage rec { pname = "sqlframe"; version = "3.31.2"; pyproject = true; src = fetchFromGitHub { owner = "eakmanrq"; repo = "sqlframe"; tag = "v${version}"; hash = "sha256-X7KZSbq9KKUE4lXZIHj++koJmjWYI1sMhA6LfClr9pU="; }; build-system = [ setuptools-scm ]; dependencies = [ prettytable sqlglot typing-extensions ]; pythonImportsCheck = [ "sqlframe" ]; nativeCheckInputs = [ databricks-sql-connector duckdb findspark google-cloud-bigquery pyspark pytest-postgresql pytest-xdist pytestCheckHook ]; disabledTests = [ # Requires google-cloud credentials # google.auth.exceptions.DefaultCredentialsErro "test_activate_bigquery_default_dataset" ]; disabledTestPaths = [ # duckdb.duckdb.CatalogException: Catalog Error: Table Function with name "dsdgen" is not in the catalog, but it exists in the tpcds extension. # "tests/integration/test_int_dataframe.py" "tests/integration/" ]; meta = { description = "Turning PySpark Into a Universal DataFrame API"; homepage = "https://github.com/eakmanrq/sqlframe"; changelog = "https://github.com/eakmanrq/sqlframe/releases/tag/v${version}"; license = lib.licenses.mit; maintainers = with lib.maintainers; [ GaetanLepage ]; }; }
pkgs/top-level/python-packages.nix +2 −0 Original line number Diff line number Diff line Loading @@ -16485,6 +16485,8 @@ self: super: with self; { sqlfmt = callPackage ../development/python-modules/sqlfmt { }; sqlframe = callPackage ../development/python-modules/sqlframe { }; sqlglot = callPackage ../development/python-modules/sqlglot { }; sqlite-anyio = callPackage ../development/python-modules/sqlite-anyio { };