Loading pkgs/development/python-modules/tf2onnx/default.nix 0 → 100644 +86 −0 Original line number Diff line number Diff line { lib , buildPythonPackage , fetchFromGitHub , pythonRelaxDepsHook , pytest-runner # runtime dependencies , numpy , onnx , requests , six , flatbuffers , protobuf , tensorflow # check dependencies , pytestCheckHook , graphviz , parameterized , pytest-cov , pyyaml , timeout-decorator , onnxruntime , keras }: buildPythonPackage rec { pname = "tf2onnx"; version = "1.14.0"; format = "setuptools"; src = fetchFromGitHub { owner = "onnx"; repo = "tensorflow-onnx"; rev = "v${version}"; hash = "sha256-JpXwf+GLjn0krsb5KnEhVuemWa0V2+wF10neDsdtOfI="; }; nativeBuildInputs = [ pythonRelaxDepsHook pytest-runner ]; pythonRelaxDeps = [ "flatbuffers" ]; propagatedBuildInputs = [ numpy onnx requests six flatbuffers protobuf tensorflow onnxruntime ]; pythonImportsCheck = [ "tf2onnx" ]; nativeCheckInputs = [ pytestCheckHook graphviz parameterized pytest-cov pyyaml timeout-decorator keras ]; # TODO investigate the failures disabledTestPaths = [ "tests/test_backend.py" "tests/test_einsum_helper.py" "tests/test_einsum_optimizers.py" ]; disabledTests = [ "test_profile_conversion_time" ]; meta = with lib; { description = "Convert TensorFlow, Keras, Tensorflow.js and Tflite models to ONNX"; homepage = "https://github.com/onnx/tensorflow-onnx"; license = licenses.asl20; maintainers = with maintainers; [ happysalada ]; }; } pkgs/development/python-modules/transformers/default.nix +3 −2 Original line number Diff line number Diff line Loading @@ -18,6 +18,7 @@ , scikit-learn , tensorflow , onnxconverter-common , tf2onnx , torch , accelerate , faiss Loading Loading @@ -99,7 +100,7 @@ buildPythonPackage rec { tf = [ tensorflow onnxconverter-common # tf2onnx tf2onnx # tensorflow-text # keras-nlp ]; Loading @@ -119,7 +120,7 @@ buildPythonPackage rec { ]; onnx = [ onnxconverter-common # tf2onnx tf2onnx onnxruntime ]; modelcreation = [ Loading pkgs/top-level/python-packages.nix +2 −0 Original line number Diff line number Diff line Loading @@ -12387,6 +12387,8 @@ self: super: with self; { textx = callPackage ../development/python-modules/textx { }; tf2onnx = callPackage ../development/python-modules/tf2onnx { }; tflearn = callPackage ../development/python-modules/tflearn { }; tftpy = callPackage ../development/python-modules/tftpy { }; Loading Loading
pkgs/development/python-modules/tf2onnx/default.nix 0 → 100644 +86 −0 Original line number Diff line number Diff line { lib , buildPythonPackage , fetchFromGitHub , pythonRelaxDepsHook , pytest-runner # runtime dependencies , numpy , onnx , requests , six , flatbuffers , protobuf , tensorflow # check dependencies , pytestCheckHook , graphviz , parameterized , pytest-cov , pyyaml , timeout-decorator , onnxruntime , keras }: buildPythonPackage rec { pname = "tf2onnx"; version = "1.14.0"; format = "setuptools"; src = fetchFromGitHub { owner = "onnx"; repo = "tensorflow-onnx"; rev = "v${version}"; hash = "sha256-JpXwf+GLjn0krsb5KnEhVuemWa0V2+wF10neDsdtOfI="; }; nativeBuildInputs = [ pythonRelaxDepsHook pytest-runner ]; pythonRelaxDeps = [ "flatbuffers" ]; propagatedBuildInputs = [ numpy onnx requests six flatbuffers protobuf tensorflow onnxruntime ]; pythonImportsCheck = [ "tf2onnx" ]; nativeCheckInputs = [ pytestCheckHook graphviz parameterized pytest-cov pyyaml timeout-decorator keras ]; # TODO investigate the failures disabledTestPaths = [ "tests/test_backend.py" "tests/test_einsum_helper.py" "tests/test_einsum_optimizers.py" ]; disabledTests = [ "test_profile_conversion_time" ]; meta = with lib; { description = "Convert TensorFlow, Keras, Tensorflow.js and Tflite models to ONNX"; homepage = "https://github.com/onnx/tensorflow-onnx"; license = licenses.asl20; maintainers = with maintainers; [ happysalada ]; }; }
pkgs/development/python-modules/transformers/default.nix +3 −2 Original line number Diff line number Diff line Loading @@ -18,6 +18,7 @@ , scikit-learn , tensorflow , onnxconverter-common , tf2onnx , torch , accelerate , faiss Loading Loading @@ -99,7 +100,7 @@ buildPythonPackage rec { tf = [ tensorflow onnxconverter-common # tf2onnx tf2onnx # tensorflow-text # keras-nlp ]; Loading @@ -119,7 +120,7 @@ buildPythonPackage rec { ]; onnx = [ onnxconverter-common # tf2onnx tf2onnx onnxruntime ]; modelcreation = [ Loading
pkgs/top-level/python-packages.nix +2 −0 Original line number Diff line number Diff line Loading @@ -12387,6 +12387,8 @@ self: super: with self; { textx = callPackage ../development/python-modules/textx { }; tf2onnx = callPackage ../development/python-modules/tf2onnx { }; tflearn = callPackage ../development/python-modules/tflearn { }; tftpy = callPackage ../development/python-modules/tftpy { }; Loading