From 72269ab58cd32e7cb1a0a944fb7394ec30ece574 Mon Sep 17 00:00:00 2001 From: mertalev <101130780+mertalev@users.noreply.github.com> Date: Thu, 11 Jul 2024 19:12:55 -0400 Subject: [PATCH] add cli --- machine-learning/export/ann/Dockerfile | 41 +- machine-learning/export/ann/conda-lock.yml | 1600 +++++++++++++++++ machine-learning/export/ann/env.yaml | 214 +-- .../export/ann/onnx2ann/__init__.py | 0 .../export/ann/onnx2ann/__main__.py | 99 + .../export/ann/onnx2ann/export.py | 129 ++ .../export/ann/onnx2ann/helpers.py | 260 +++ machine-learning/export/ann/pyproject.toml | 56 + machine-learning/export/ann/run.py | 475 ----- .../export/ann/{ => scripts}/ann.cpp | 0 .../ann/{ => scripts}/build-converter.sh | 0 .../export/ann/{ => scripts}/build.sh | 0 .../ann/{ => scripts}/download-armnn.sh | 0 13 files changed, 2185 insertions(+), 689 deletions(-) create mode 100644 machine-learning/export/ann/conda-lock.yml create mode 100644 machine-learning/export/ann/onnx2ann/__init__.py create mode 100644 machine-learning/export/ann/onnx2ann/__main__.py create mode 100644 machine-learning/export/ann/onnx2ann/export.py create mode 100644 machine-learning/export/ann/onnx2ann/helpers.py create mode 100644 machine-learning/export/ann/pyproject.toml delete mode 100644 machine-learning/export/ann/run.py rename machine-learning/export/ann/{ => scripts}/ann.cpp (100%) rename machine-learning/export/ann/{ => scripts}/build-converter.sh (100%) rename machine-learning/export/ann/{ => scripts}/build.sh (100%) rename machine-learning/export/ann/{ => scripts}/download-armnn.sh (100%) diff --git a/machine-learning/export/ann/Dockerfile b/machine-learning/export/ann/Dockerfile index b6dcdc99a5..9c36d3ed5f 100644 --- a/machine-learning/export/ann/Dockerfile +++ b/machine-learning/export/ann/Dockerfile @@ -1,28 +1,35 @@ FROM mambaorg/micromamba:bookworm-slim@sha256:333f7598ff2c2400fb10bfe057709c68b7daab5d847143af85abcf224a07271a as builder -ENV TRANSFORMERS_CACHE=/cache \ - PYTHONDONTWRITEBYTECODE=1 \ - PYTHONUNBUFFERED=1 \ - PATH="/opt/venv/bin:$PATH" - -WORKDIR /export/ann - USER root RUN apt-get update && apt-get install -y --no-install-recommends \ build-essential \ + cmake \ curl \ git - USER $MAMBA_USER -COPY --chown=$MAMBA_USER:$MAMBA_USER env.yaml ./ -RUN micromamba install -y -f env.yaml -COPY --chown=$MAMBA_USER:$MAMBA_USER *.sh *.cpp ./ -ENV ARMNN_PATH=/export/ann/armnn +WORKDIR /home/mambauser +ENV ARMNN_PATH=armnn +COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/* . RUN ./download-armnn.sh && \ - ./build-converter.sh && \ - ./build.sh -COPY --chown=$MAMBA_USER:$MAMBA_USER run.py ./ + ./build-converter.sh && \ + ./build.sh -ENTRYPOINT ["/usr/local/bin/_entrypoint.sh"] -CMD ["python", "run.py"] +COPY --chown=$MAMBA_USER:$MAMBA_USER conda-lock.yml . +RUN micromamba create -y -p /home/mambauser/venv -f conda-lock.yml && \ + micromamba clean --all --yes +ENV PATH="/home/mambauser/venv/bin:${PATH}" + +FROM gcr.io/distroless/base-debian12 +# FROM mambaorg/micromamba:bookworm-slim@sha256:333f7598ff2c2400fb10bfe057709c68b7daab5d847143af85abcf224a07271a + +WORKDIR /export/ann +ENV PYTHONDONTWRITEBYTECODE=1 \ + LD_LIBRARY_PATH=/export/ann/armnn \ + PATH="/opt/venv/bin:${PATH}" + +COPY --from=builder /home/mambauser/armnnconverter /home/mambauser/armnn ./ +COPY --from=builder /home/mambauser/venv /opt/venv +COPY --chown=$MAMBA_USER:$MAMBA_USER onnx2ann onnx2ann + +ENTRYPOINT ["python", "-m", "onnx2ann"] diff --git a/machine-learning/export/ann/conda-lock.yml b/machine-learning/export/ann/conda-lock.yml new file mode 100644 index 0000000000..de3d50901e --- /dev/null +++ b/machine-learning/export/ann/conda-lock.yml @@ -0,0 +1,1600 @@ +# This lock file was generated by conda-lock (https://github.com/conda/conda-lock). DO NOT EDIT! +# +# A "lock file" contains a concrete list of package versions (with checksums) to be installed. Unlike +# e.g. `conda env create`, the resulting environment will not change as new package versions become +# available, unless you explicitly update the lock file. +# +# Install this environment as "YOURENV" with: +# conda-lock install -n YOURENV conda-lock.yml +# To update a single package to the latest version compatible with the version constraints in the source: +# conda-lock lock --lockfile conda-lock.yml --update PACKAGE +# To re-solve the entire environment, e.g. after changing a version constraint in the source file: +# conda-lock -f env.yaml --lockfile conda-lock.yml +version: 1 +metadata: + content_hash: + linux-64: a3ef8276903bc5c0ffbd5f80a901f8a9fb92705af02b6e2dad5a4319bb4541a9 + channels: + - url: conda-forge + used_env_vars: [] + platforms: + - linux-64 + sources: + - env.yaml +package: +- name: _libgcc_mutex + version: '0.1' + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + hash: + md5: d7c89558ba9fa0495403155b64376d81 + sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 + category: main + optional: false +- name: _openmp_mutex + version: '4.5' + manager: conda + platform: linux-64 + dependencies: + _libgcc_mutex: '0.1' + libgomp: '>=7.5.0' + url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + hash: + md5: 73aaf86a425cc6e73fcf236a5a46396d + sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 + category: main + optional: false +- name: brotli-python + version: 1.1.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + python: '>=3.12.0rc3,<3.13.0a0' + python_abi: 3.12.* + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h30efb56_1.conda + hash: + md5: 45801a89533d3336a365284d93298e36 + sha256: b68706698b6ac0d31196a8bcb061f0d1f35264bcd967ea45e03e108149a74c6f + category: main + optional: false +- name: bzip2 + version: 1.0.8 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda + hash: + md5: 69b8b6202a07720f448be700e300ccf4 + sha256: 242c0c324507ee172c0e0dd2045814e746bb303d1eb78870d182ceb0abc726a8 + category: main + optional: false +- name: ca-certificates + version: 2024.7.4 + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda + hash: + md5: 23ab7665c5f63cfb9f1f6195256daac6 + sha256: c1548a3235376f464f9931850b64b02492f379b2f2bb98bc786055329b080446 + category: main + optional: false +- name: certifi + version: 2024.7.4 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/certifi-2024.7.4-pyhd8ed1ab_0.conda + hash: + md5: 24e7fd6ca65997938fff9e5ab6f653e4 + sha256: dd3577bb5275062c388c46b075dcb795f47f8dac561da7dd35fe504b936934e5 + category: main + optional: false +- name: cffi + version: 1.16.0 + manager: conda + platform: linux-64 + dependencies: + libffi: '>=3.4,<4.0a0' + libgcc-ng: '>=12' + pycparser: '' + python: '>=3.12.0rc3,<3.13.0a0' + python_abi: 3.12.* + url: https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py312hf06ca03_0.conda + hash: + md5: 56b0ca764ce23cc54f3f7e2a7b970f6d + sha256: 5a36e2c254603c367d26378fa3a205bd92263e30acf195f488749562b4c44251 + category: main + optional: false +- name: charset-normalizer + version: 3.3.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda + hash: + md5: 7f4a9e3fcff3f6356ae99244a014da6a + sha256: 20cae47d31fdd58d99c4d2e65fbdcefa0b0de0c84e455ba9d6356a4bdbc4b5b9 + category: main + optional: false +- name: click + version: 8.1.7 + manager: conda + platform: linux-64 + dependencies: + __unix: '' + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda + hash: + md5: f3ad426304898027fc619827ff428eca + sha256: f0016cbab6ac4138a429e28dbcb904a90305b34b3fe41a9b89d697c90401caec + category: main + optional: false +- name: colorama + version: 0.4.6 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 3faab06a954c2a04039983f2c4a50d99 + sha256: 2c1b2e9755ce3102bca8d69e8f26e4f087ece73f50418186aee7c74bef8e1698 + category: main + optional: false +- name: filelock + version: 3.15.4 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/filelock-3.15.4-pyhd8ed1ab_0.conda + hash: + md5: 0e7e4388e9d5283e22b35a9443bdbcc9 + sha256: f78d9c0be189a77cb0c67d02f33005f71b89037a85531996583fb79ff3fe1a0a + category: main + optional: false +- name: flatbuffers + version: 24.3.25 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/flatbuffers-24.3.25-h59595ed_0.conda + hash: + md5: 2941a8c4e4871cdfa738c8c1a7611533 + sha256: 0f3b8d6a958d40d5b2ac105ba0ec09f61dd4ce78cafdf99ab2d0fc298dc54d75 + category: main + optional: false +- name: fsspec + version: 2024.6.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.6.1-pyhff2d567_0.conda + hash: + md5: 996bf792cdb8c0ac38ff54b9fde56841 + sha256: 2b8e98294c70d9a33ee0ef27539a8a8752a26efeafa0225e85dc876ef5bb49f4 + category: main + optional: false +- name: h2 + version: 4.1.0 + manager: conda + platform: linux-64 + dependencies: + hpack: '>=4.0,<5' + hyperframe: '>=6.0,<7' + python: '>=3.6.1' + url: https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2 + hash: + md5: b748fbf7060927a6e82df7cb5ee8f097 + sha256: bfc6a23849953647f4e255c782e74a0e18fe16f7e25c7bb0bc57b83bb6762c7a + category: main + optional: false +- name: hpack + version: 4.0.0 + manager: conda + platform: linux-64 + dependencies: + python: '' + url: https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2 + hash: + md5: 914d6646c4dbb1fd3ff539830a12fd71 + sha256: 5dec948932c4f740674b1afb551223ada0c55103f4c7bf86a110454da3d27cb8 + category: main + optional: false +- name: huggingface_hub + version: 0.23.4 + manager: conda + platform: linux-64 + dependencies: + filelock: '' + fsspec: '>=2023.5.0' + packaging: '>=20.9' + python: '>=3.8' + pyyaml: '>=5.1' + requests: '' + tqdm: '>=4.42.1' + typing-extensions: '>=3.7.4.3' + url: https://conda.anaconda.org/conda-forge/noarch/huggingface_hub-0.23.4-pyhd8ed1ab_0.conda + hash: + md5: 759dfbd44e93d75a23b203fe50dade8d + sha256: 298cf27a319a97f70e826364fcc92aeccb3197e462d40b28297e5ced0cbd753d + category: main + optional: false +- name: hyperframe + version: 6.0.1 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2 + hash: + md5: 9f765cbfab6870c8435b9eefecd7a1f4 + sha256: e374a9d0f53149328134a8d86f5d72bca4c6dcebed3c0ecfa968c02996289330 + category: main + optional: false +- name: idna + version: '3.7' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/idna-3.7-pyhd8ed1ab_0.conda + hash: + md5: c0cc1420498b17414d8617d0b9f506ca + sha256: 9687ee909ed46169395d4f99a0ee94b80a52f87bed69cd454bb6d37ffeb0ec7b + category: main + optional: false +- name: ld_impl_linux-64 + version: '2.40' + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda + hash: + md5: b80f2f396ca2c28b8c14c437a4ed1e74 + sha256: 764b6950aceaaad0c67ef925417594dd14cd2e22fff864aeef455ac259263d15 + category: main + optional: false +- name: libabseil + version: '20240116.2' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_h59595ed_0.conda + hash: + md5: 682bdbe046a68f749769b492f3625c5c + sha256: 19b789dc38dff64eee2002675991e63f381eedf5efd5c85f2dac512ed97376d7 + category: main + optional: false +- name: libblas + version: 3.9.0 + manager: conda + platform: linux-64 + dependencies: + libopenblas: '>=0.3.27,<1.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-22_linux64_openblas.conda + hash: + md5: 1a2a0cd3153464fee6646f3dd6dad9b8 + sha256: 082b8ac20d43a7bbcdc28b3b1cd40e4df3a8b5daf0a2d23d68953a44d2d12c1b + category: main + optional: false +- name: libcblas + version: 3.9.0 + manager: conda + platform: linux-64 + dependencies: + libblas: 3.9.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-22_linux64_openblas.conda + hash: + md5: 4b31699e0ec5de64d5896e580389c9a1 + sha256: da1b2faa017663c8f5555c1c5518e96ac4cd8e0be2a673c1c9e2cb8507c8fe46 + category: main + optional: false +- name: libexpat + version: 2.6.2 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda + hash: + md5: e7ba12deb7020dd080c6c70e7b6f6a3d + sha256: 331bb7c7c05025343ebd79f86ae612b9e1e74d2687b8f3179faec234f986ce19 + category: main + optional: false +- name: libffi + version: 3.4.2 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=9.4.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 + hash: + md5: d645c6d2ac96843a2bfaccd2d62b3ac3 + sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e + category: main + optional: false +- name: libgcc-ng + version: 14.1.0 + manager: conda + platform: linux-64 + dependencies: + _libgcc_mutex: '0.1' + _openmp_mutex: '>=4.5' + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda + hash: + md5: ca0fad6a41ddaef54a153b78eccb5037 + sha256: b8e869ac96591cda2704bf7e77a301025e405227791a0bddf14a3dac65125538 + category: main + optional: false +- name: libgfortran-ng + version: 14.1.0 + manager: conda + platform: linux-64 + dependencies: + libgfortran5: 14.1.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_0.conda + hash: + md5: f4ca84fbd6d06b0a052fb2d5b96dde41 + sha256: ef624dacacf97b2b0af39110b36e2fd3e39e358a1a6b7b21b85c9ac22d8ffed9 + category: main + optional: false +- name: libgfortran5 + version: 14.1.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=14.1.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_0.conda + hash: + md5: 6456c2620c990cd8dde2428a27ba0bc5 + sha256: a67d66b1e60a8a9a9e4440cee627c959acb4810cb182e089a4b0729bfdfbdf90 + category: main + optional: false +- name: libgomp + version: 14.1.0 + manager: conda + platform: linux-64 + dependencies: + _libgcc_mutex: '0.1' + url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda + hash: + md5: ae061a5ed5f05818acdf9adab72c146d + sha256: 7699df61a1f6c644b3576a40f54791561f2845983120477a16116b951c9cdb05 + category: main + optional: false +- name: liblapack + version: 3.9.0 + manager: conda + platform: linux-64 + dependencies: + libblas: 3.9.0 + url: https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-22_linux64_openblas.conda + hash: + md5: b083767b6c877e24ee597d93b87ab838 + sha256: db246341d42f9100d45adeb1a7ba8b1ef5b51ceb9056fd643e98046a3259fde6 + category: main + optional: false +- name: libnsl + version: 2.0.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda + hash: + md5: 30fd6e37fe21f86f4bd26d6ee73eeec7 + sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6 + category: main + optional: false +- name: libopenblas + version: 0.3.27 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libgfortran-ng: '' + libgfortran5: '>=12.3.0' + url: https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda + hash: + md5: ae05ece66d3924ac3d48b4aa3fa96cec + sha256: 714cb82d7c4620ea2635a92d3df263ab841676c9b183d0c01992767bb2451c39 + category: main + optional: false +- name: libprotobuf + version: 4.25.3 + manager: conda + platform: linux-64 + dependencies: + libabseil: '>=20240116.1,<20240117.0a0' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + libzlib: '>=1.2.13,<2.0.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-h08a7969_0.conda + hash: + md5: 6945825cebd2aeb16af4c69d97c32c13 + sha256: 70e0eef046033af2e8d21251a785563ad738ed5281c74e21c31c457780845dcd + category: main + optional: false +- name: libsqlite + version: 3.46.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libzlib: '>=1.2.13,<2.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda + hash: + md5: 18aa975d2094c34aef978060ae7da7d8 + sha256: daee3f68786231dad457d0dfde3f7f1f9a7f2018adabdbb864226775101341a8 + category: main + optional: false +- name: libstdcxx-ng + version: 14.1.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: 14.1.0 + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-hc0a3c3a_0.conda + hash: + md5: 1cb187a157136398ddbaae90713e2498 + sha256: 88c42b388202ffe16adaa337e36cf5022c63cf09b0405cf06fc6aeacccbe6146 + category: main + optional: false +- name: libuuid + version: 2.38.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda + hash: + md5: 40b61aab5c7ba9ff276c41cfffe6b80b + sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 + category: main + optional: false +- name: libxcrypt + version: 4.4.36 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + hash: + md5: 5aa797f8787fe7a17d1b0821485b5adc + sha256: 6ae68e0b86423ef188196fff6207ed0c8195dd84273cb5623b85aa08033a410c + category: main + optional: false +- name: libzlib + version: 1.3.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda + hash: + md5: 57d7dc60e9325e3de37ff8dffd18e814 + sha256: adf6096f98b537a11ae3729eaa642b0811478f0ea0402ca67b5108fe2cb0010d + category: main + optional: false +- name: ml_dtypes + version: 0.4.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + numpy: '>=1.19,<3' + python: '>=3.12,<3.13.0a0' + python_abi: 3.12.* + url: https://conda.anaconda.org/conda-forge/linux-64/ml_dtypes-0.4.0-py312h1d6d2e6_1.conda + hash: + md5: ff893cf9cee50a89122e5b765e8a1c37 + sha256: 7a27970a62c65eba0da9aa20c08bee3593ca0d0473e42c674fa0b96fa5d1da72 + category: main + optional: false +- name: ncurses + version: '6.5' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda + hash: + md5: fcea371545eda051b6deafb24889fc69 + sha256: 4fc3b384f4072b68853a0013ea83bdfd3d66b0126e2238e1d6e1560747aa7586 + category: main + optional: false +- name: numpy + version: 2.0.0 + manager: conda + platform: linux-64 + dependencies: + libblas: '>=3.9.0,<4.0a0' + libcblas: '>=3.9.0,<4.0a0' + libgcc-ng: '>=12' + liblapack: '>=3.9.0,<4.0a0' + libstdcxx-ng: '>=12' + python: '>=3.12,<3.13.0a0' + python_abi: 3.12.* + url: https://conda.anaconda.org/conda-forge/linux-64/numpy-2.0.0-py312h22e1c76_0.conda + hash: + md5: 7956c7d65f87aecaba720af6088e72c3 + sha256: e5fc4a1053c8f02db78d4a50733d6c84d04e3c781749ae7478876ecdcd8c87ca + category: main + optional: false +- name: onnx + version: 1.16.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libprotobuf: '>=4.25.3,<4.25.4.0a0' + libstdcxx-ng: '>=12' + numpy: '>=1.19,<3' + protobuf: '' + python: '>=3.12,<3.13.0a0' + python_abi: 3.12.* + typing-extensions: '>=3.6.2.1' + url: https://conda.anaconda.org/conda-forge/linux-64/onnx-1.16.1-py312hb223586_0.conda + hash: + md5: 97d122c89a3eb38e5ee1aeedc749ca95 + sha256: a230f010f344e526208891cc7bc9da557b68fc407beba1d6591c151487b9dffb + category: main + optional: false +- name: openssl + version: 3.3.1 + manager: conda + platform: linux-64 + dependencies: + ca-certificates: '' + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4ab18f5_1.conda + hash: + md5: b1e9d076f14e8d776213fd5047b4c3d9 + sha256: ff3faf8d4c1c9aa4bd3263b596a68fcc6ac910297f354b2ce28718a3509db6d9 + category: main + optional: false +- name: packaging + version: '24.1' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda + hash: + md5: cbe1bb1f21567018ce595d9c2be0f0db + sha256: 36aca948219e2c9fdd6d80728bcc657519e02f06c2703d8db3446aec67f51d81 + category: main + optional: false +- name: pip + version: '24.0' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.7' + setuptools: '' + wheel: '' + url: https://conda.anaconda.org/conda-forge/noarch/pip-24.0-pyhd8ed1ab_0.conda + hash: + md5: f586ac1e56c8638b64f9c8122a7b8a67 + sha256: b7c1c5d8f13e8cb491c4bd1d0d1896a4cf80fc47de01059ad77509112b664a4a + category: main + optional: false +- name: protobuf + version: 4.25.3 + manager: conda + platform: linux-64 + dependencies: + libabseil: '>=20240116.1,<20240117.0a0' + libgcc-ng: '>=12' + libprotobuf: '>=4.25.3,<4.25.4.0a0' + libstdcxx-ng: '>=12' + python: '>=3.12,<3.13.0a0' + python_abi: 3.12.* + setuptools: '' + url: https://conda.anaconda.org/conda-forge/linux-64/protobuf-4.25.3-py312h72fbbdf_0.conda + hash: + md5: 83b85f6cee7bbdb9b5ad2f1b785234c8 + sha256: 9bb6d2a6f1b1ae085eb9e3d7bea21da9583edbbc501eaf473511f7ddf0e741bd + category: main + optional: false +- name: psutil + version: 6.0.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + python: '>=3.12,<3.13.0a0' + python_abi: 3.12.* + url: https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h9a8786e_0.conda + hash: + md5: 1aeffa86c55972ca4e88ac843eccedf2 + sha256: d629363515df957507411fd24db2a0635ac893e5d60b2ee2f656b53be9c70b1d + category: main + optional: false +- name: pycparser + version: '2.22' + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda + hash: + md5: 844d9eb3b43095b031874477f7d70088 + sha256: 406001ebf017688b1a1554b49127ca3a4ac4626ec0fd51dc75ffa4415b720b64 + category: main + optional: false +- name: pysocks + version: 1.7.1 + manager: conda + platform: linux-64 + dependencies: + __unix: '' + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 + hash: + md5: 2a7de29fb590ca14b5243c4c812c8025 + sha256: a42f826e958a8d22e65b3394f437af7332610e43ee313393d1cf143f0a2d274b + category: main + optional: false +- name: python + version: 3.12.4 + manager: conda + platform: linux-64 + dependencies: + bzip2: '>=1.0.8,<2.0a0' + ld_impl_linux-64: '>=2.36.1' + libexpat: '>=2.6.2,<3.0a0' + libffi: '>=3.4,<4.0a0' + libgcc-ng: '>=12' + libnsl: '>=2.0.1,<2.1.0a0' + libsqlite: '>=3.46.0,<4.0a0' + libuuid: '>=2.38.1,<3.0a0' + libxcrypt: '>=4.4.36' + libzlib: '>=1.3.1,<2.0a0' + ncurses: '>=6.5,<7.0a0' + openssl: '>=3.3.1,<4.0a0' + readline: '>=8.2,<9.0a0' + tk: '>=8.6.13,<8.7.0a0' + tzdata: '' + xz: '>=5.2.6,<6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/python-3.12.4-h194c7f8_0_cpython.conda + hash: + md5: d73490214f536cccb5819e9873048c92 + sha256: 97a78631e6c928bf7ad78d52f7f070fcf3bd37619fa48dc4394c21cf3058cdee + category: main + optional: false +- name: python_abi + version: '3.12' + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-4_cp312.conda + hash: + md5: dccc2d142812964fcc6abdc97b672dff + sha256: 182a329de10a4165f6e8a3804caf751f918f6ea6176dd4e5abcdae1ed3095bf6 + category: main + optional: false +- name: pyyaml + version: 6.0.1 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + python: '>=3.12.0rc3,<3.13.0a0' + python_abi: 3.12.* + yaml: '>=0.2.5,<0.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py312h98912ed_1.conda + hash: + md5: e3fd78d8d490af1d84763b9fe3f2e552 + sha256: 7f347a10a7121b08d79d21cd4f438c07c23479ea0c74dfb89d6dc416f791bb7f + category: main + optional: false +- name: readline + version: '8.2' + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + ncurses: '>=6.3,<7.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda + hash: + md5: 47d31b792659ce70f470b5c82fdfb7a4 + sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 + category: main + optional: false +- name: requests + version: 2.32.3 + manager: conda + platform: linux-64 + dependencies: + certifi: '>=2017.4.17' + charset-normalizer: '>=2,<4' + idna: '>=2.5,<4' + python: '>=3.8' + urllib3: '>=1.21.1,<3' + url: https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda + hash: + md5: 5ede4753180c7a550a443c430dc8ab52 + sha256: 5845ffe82a6fa4d437a2eae1e32a1ad308d7ad349f61e337c0a890fe04c513cc + category: main + optional: false +- name: setuptools + version: 70.2.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/setuptools-70.2.0-pyhd8ed1ab_0.conda + hash: + md5: 10170a48c48cfe65eab923f76f982087 + sha256: 354781a1ce4f8f229bf4a19fe48550d5f73d5b511df78a07b1b78fb2c78e52ad + category: main + optional: false +- name: tk + version: 8.6.13 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libzlib: '>=1.2.13,<2.0.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda + hash: + md5: d453b98d9c83e71da0741bb0ff4d76bc + sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e + category: main + optional: false +- name: tqdm + version: 4.66.4 + manager: conda + platform: linux-64 + dependencies: + colorama: '' + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/tqdm-4.66.4-pyhd8ed1ab_0.conda + hash: + md5: e74cd796e70a4261f86699ee0a3a7a24 + sha256: 75342f40a69e434a1a23003c3e254a95dca695fb14955bc32f1819cd503964b2 + category: main + optional: false +- name: typer-slim + version: 0.12.3 + manager: conda + platform: linux-64 + dependencies: + click: '>=8.0.0' + python: '>=3.7' + typing_extensions: '>=3.7.4.3' + url: https://conda.anaconda.org/conda-forge/noarch/typer-slim-0.12.3-pyhd8ed1ab_0.conda + hash: + md5: cf2c3a89f89644c53cadbfeb124914e9 + sha256: 01dcb54375c8eae54d13374ed3d5823635401c552340b87e67fdbbb507760596 + category: main + optional: false +- name: typing-extensions + version: 4.12.2 + manager: conda + platform: linux-64 + dependencies: + typing_extensions: 4.12.2 + url: https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda + hash: + md5: 52d648bd608f5737b123f510bb5514b5 + sha256: d3b9a8ed6da7c9f9553c5fd8a4fca9c3e0ab712fa5f497859f82337d67533b73 + category: main + optional: false +- name: typing_extensions + version: 4.12.2 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda + hash: + md5: ebe6952715e1d5eb567eeebf25250fa7 + sha256: 0fce54f8ec3e59f5ef3bb7641863be4e1bf1279623e5af3d3fa726e8f7628ddb + category: main + optional: false +- name: tzdata + version: 2024a + manager: conda + platform: linux-64 + dependencies: {} + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda + hash: + md5: 161081fc7cec0bfda0d86d7cb595f8d8 + sha256: 7b2b69c54ec62a243eb6fba2391b5e443421608c3ae5dbff938ad33ca8db5122 + category: main + optional: false +- name: urllib3 + version: 2.2.2 + manager: conda + platform: linux-64 + dependencies: + brotli-python: '>=1.0.9' + h2: '>=4,<5' + pysocks: '>=1.5.6,<2.0,!=1.5.7' + python: '>=3.8' + zstandard: '>=0.18.0' + url: https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda + hash: + md5: e804c43f58255e977093a2298e442bb8 + sha256: 00c47c602c03137e7396f904eccede8cc64cc6bad63ce1fc355125df8882a748 + category: main + optional: false +- name: wheel + version: 0.43.0 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/wheel-0.43.0-pyhd8ed1ab_1.conda + hash: + md5: 0b5293a157c2b5cd513dd1b03d8d3aae + sha256: cb318f066afd6fd64619f14c030569faf3f53e6f50abf743b4c865e7d95b96bc + category: main + optional: false +- name: xz + version: 5.2.6 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 + hash: + md5: 2161070d867d1b1204ea749c8eec4ef0 + sha256: 03a6d28ded42af8a347345f82f3eebdd6807a08526d47899a42d62d319609162 + category: main + optional: false +- name: yaml + version: 0.2.5 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=9.4.0' + url: https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 + hash: + md5: 4cb3ad778ec2d5a7acbdf254eb1c42ae + sha256: a4e34c710eeb26945bdbdaba82d3d74f60a78f54a874ec10d373811a5d217535 + category: main + optional: false +- name: zstandard + version: 0.22.0 + manager: conda + platform: linux-64 + dependencies: + cffi: '>=1.11' + libgcc-ng: '>=12' + python: '>=3.12,<3.13.0a0' + python_abi: 3.12.* + zstd: '>=1.5.6,<1.6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.22.0-py312h5b18bf6_1.conda + hash: + md5: 27fe79bbc4dd3767be554fb171df362c + sha256: 3bd22e769ea6bf2c9f59cc9905b9b43058208bde1ecca9d9f656ecd834c137d0 + category: main + optional: false +- name: zstd + version: 1.5.6 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + libzlib: '>=1.2.13,<2.0.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda + hash: + md5: 4d056880988120e29d75bfff282e0f45 + sha256: c558b9cc01d9c1444031bd1ce4b9cff86f9085765f17627a6cd85fc623c8a02b + category: main + optional: false +- name: absl-py + version: 2.1.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/a2/ad/e0d3c824784ff121c03cc031f944bc7e139a8f1870ffd2845cc2dd76f6c4/absl_py-2.1.0-py3-none-any.whl + hash: + sha256: 526a04eadab8b4ee719ce68f204172ead1027549089702d99b9059f129ff1308 + category: main + optional: false +- name: astunparse + version: 1.6.3 + manager: pip + platform: linux-64 + dependencies: + six: '>=1.6.1,<2.0' + url: https://files.pythonhosted.org/packages/2b/03/13dde6512ad7b4557eb792fbcf0c653af6076b81e5941d36ec61f7ce6028/astunparse-1.6.3-py2.py3-none-any.whl + hash: + sha256: c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8 + category: main + optional: false +- name: coloredlogs + version: 15.0.1 + manager: pip + platform: linux-64 + dependencies: + humanfriendly: '>=9.1' + url: https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl + hash: + sha256: 612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934 + category: main + optional: false +- name: flatbuffers + version: 24.3.25 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/41/f0/7e988a019bc54b2dbd0ad4182ef2d53488bb02e58694cd79d61369e85900/flatbuffers-24.3.25-py2.py3-none-any.whl + hash: + sha256: 8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812 + category: main + optional: false +- name: gast + version: 0.6.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/a3/61/8001b38461d751cd1a0c3a6ae84346796a5758123f3ed97a1b121dfbf4f3/gast-0.6.0-py3-none-any.whl + hash: + sha256: 52b182313f7330389f72b069ba00f174cfe2a06411099547288839c6cbafbd54 + category: main + optional: false +- name: google-pasta + version: 0.2.0 + manager: pip + platform: linux-64 + dependencies: + six: '*' + url: https://files.pythonhosted.org/packages/a3/de/c648ef6835192e6e2cc03f40b19eeda4382c49b5bafb43d88b931c4c74ac/google_pasta-0.2.0-py3-none-any.whl + hash: + sha256: b32482794a366b5366a32c92a9a9201b107821889935a02b3e51f6b432ea84ed + category: main + optional: false +- name: grpcio + version: 1.64.1 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/0d/fb/37265dea2dcd3959100e616b79dfe4c2d4ed178ea657519482cf0ba8d6bb/grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1 + category: main + optional: false +- name: h5py + version: 3.11.0 + manager: pip + platform: linux-64 + dependencies: + numpy: '>=1.17.3' + url: https://files.pythonhosted.org/packages/db/7e/fedac8bb8c4729409e2dec5e4136a289116d701d54f69ce73c5617afc5f0/h5py-3.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: aa6ae84a14103e8dc19266ef4c3e5d7c00b68f21d07f2966f0ca7bdb6c2761fb + category: main + optional: false +- name: humanfriendly + version: '10.0' + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl + hash: + sha256: 1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477 + category: main + optional: false +- name: json2onnx + version: 2.0.3 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/31/0a/60e7581ffe96b5739858797f0c635b27c289465c3467c7930d2182369dd6/json2onnx-2.0.3-py3-none-any.whl + hash: + sha256: 4f43a8982d3611c6884b6a035e58e31f4f50c60d0b3ea00dc9a510ffa2328982 + category: main + optional: false +- name: keras + version: 3.4.1 + manager: pip + platform: linux-64 + dependencies: + absl-py: '*' + numpy: '*' + rich: '*' + namex: '*' + h5py: '*' + optree: '*' + ml-dtypes: '*' + packaging: '*' + url: https://files.pythonhosted.org/packages/46/43/03fa53f027e78af4a6bee3564d05cb34d9f5b924dc69c85f8ef5cb950ff1/keras-3.4.1-py3-none-any.whl + hash: + sha256: 15599c51e2090c12f39de6db6489a0cf265ddf6653f0731b82db5af2bfa19105 + category: main + optional: false +- name: libclang + version: 18.1.1 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/1d/fc/716c1e62e512ef1c160e7984a73a5fc7df45166f2ff3f254e71c58076f7c/libclang-18.1.1-py2.py3-none-manylinux2010_x86_64.whl + hash: + sha256: c533091d8a3bbf7460a00cb6c1a71da93bffe148f172c7d03b1c31fbf8aa2a0b + category: main + optional: false +- name: markdown + version: '3.6' + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/fc/b3/0c0c994fe49cd661084f8d5dc06562af53818cc0abefaca35bdc894577c3/Markdown-3.6-py3-none-any.whl + hash: + sha256: 48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f + category: main + optional: false +- name: markdown-it-py + version: 3.0.0 + manager: pip + platform: linux-64 + dependencies: + mdurl: '>=0.1,<1.0' + url: https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl + hash: + sha256: 355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 + category: main + optional: false +- name: markupsafe + version: 2.1.5 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/0a/0d/2454f072fae3b5a137c119abf15465d1771319dfe9e4acbb31722a0fff91/MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 + category: main + optional: false +- name: mdurl + version: 0.1.2 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl + hash: + sha256: 84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 + category: main + optional: false +- name: ml-dtypes + version: 0.3.2 + manager: pip + platform: linux-64 + dependencies: + numpy: '>=1.26.0' + url: https://files.pythonhosted.org/packages/e5/f1/93219c44bae4017e6e43391fa4433592de08e05def9d885227d3596f21a5/ml_dtypes-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: e8505946df1665db01332d885c2020b4cb9e84a8b1241eb4ba69d59591f65855 + category: main + optional: false +- name: mpmath + version: 1.3.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl + hash: + sha256: a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c + category: main + optional: false +- name: namex + version: 0.0.8 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/73/59/7854fbfb59f8ae35483ce93493708be5942ebb6328cd85b3a609df629736/namex-0.0.8-py3-none-any.whl + hash: + sha256: 7ddb6c2bb0e753a311b7590f84f6da659dd0c05e65cb89d519d54c0a250c0487 + category: main + optional: false +- name: numpy + version: 1.26.4 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed + category: main + optional: false +- name: onnx-graphsurgeon + version: 0.5.2 + manager: pip + platform: linux-64 + dependencies: + numpy: '*' + onnx: '>=1.14.0' + url: https://files.pythonhosted.org/packages/0d/20/93e7143af3a0b3b3d9f3306bfc46e55d0d307242b4c1bf36ff108460e5a3/onnx_graphsurgeon-0.5.2-py2.py3-none-any.whl + hash: + sha256: 10c130d6129fdeee02945f8103b5b112e6fd4d9b356e2dd3e80f53e0ebee7b5c + category: main + optional: false +- name: onnx2json + version: 2.0.4 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/4d/1e/01d79c5317a48a1819ed7d9f4ff5d6fa1e23b97be464ff6862213671a71c/onnx2json-2.0.4-py3-none-any.whl + hash: + sha256: 281d86a7b4e4b8e33c1972cfbde3a3d77ef7848074907ae5a8c8b901d0b50e82 + category: main + optional: false +- name: onnx2tf + version: 1.24.1 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/42/83/0f5a89f9ecae4ed601a1afc5ea62681b3467245309e8e63146c8eef3979f/onnx2tf-1.24.1-py3-none-any.whl + hash: + sha256: 6040ffe13d4eac16086298c0e17d434629182b3e7322e1817234398c006082e1 + category: main + optional: false +- name: onnxruntime + version: 1.18.1 + manager: pip + platform: linux-64 + dependencies: + coloredlogs: '*' + flatbuffers: '*' + numpy: '>=1.21.6,<2.0' + packaging: '*' + protobuf: '*' + sympy: '*' + url: https://files.pythonhosted.org/packages/a3/0a/89bc7acdf7b311ec5cdf6c01983e8ecb23f7b1ba7a1b2d2fd10d33dfd24a/onnxruntime-1.18.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl + hash: + sha256: 781aa9873640f5df24524f96f6070b8c550c66cb6af35710fd9f92a20b4bfbf6 + category: main + optional: false +- name: onnxsim + version: 0.4.36 + manager: pip + platform: linux-64 + dependencies: + onnx: '*' + rich: '*' + url: https://files.pythonhosted.org/packages/ce/9e/f34238413ebeda9a3a8802feeaa5013934455466b9ab390b48ad9c7e184f/onnxsim-0.4.36.tar.gz + hash: + sha256: 6e0ee9d6d4a83042bdef7319fbe58352d9fda5f253386be2b267c7c27f0638ee + category: main + optional: false +- name: opt-einsum + version: 3.3.0 + manager: pip + platform: linux-64 + dependencies: + numpy: '>=1.7' + url: https://files.pythonhosted.org/packages/bc/19/404708a7e54ad2798907210462fd950c3442ea51acc8790f3da48d2bee8b/opt_einsum-3.3.0-py3-none-any.whl + hash: + sha256: 2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147 + category: main + optional: false +- name: optree + version: 0.12.1 + manager: pip + platform: linux-64 + dependencies: + typing-extensions: '>=4.5.0' + url: https://files.pythonhosted.org/packages/31/93/51ca86599d53d6fed0047ff3abbc4dacd9c4212d4e8c2ff92dc885e66d91/optree-0.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 24d74a9d97d7bdbdbb30356850f204950c39ab8fad7f273ed29d1feda19060b2 + category: main + optional: false +- name: pygments + version: 2.18.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl + hash: + sha256: b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a + category: main + optional: false +- name: rich + version: 13.7.1 + manager: pip + platform: linux-64 + dependencies: + pygments: '>=2.13.0,<3.0.0' + markdown-it-py: '>=2.2.0' + url: https://files.pythonhosted.org/packages/87/67/a37f6214d0e9fe57f6ae54b2956d550ca8365857f42a1ce0392bb21d9410/rich-13.7.1-py3-none-any.whl + hash: + sha256: 4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 + category: main + optional: false +- name: sam4onnx + version: 1.0.16 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/60/7e/c51c2c28134527f0fcc7c872c8f6ee363d230c3019c3539b95212fe51603/sam4onnx-1.0.16-py3-none-any.whl + hash: + sha256: 9fb07e8eaf5a99210b5a7e6f62b0332cfecf6e2f65a74d572642ed2284a7d029 + category: main + optional: false +- name: sbi4onnx + version: 1.0.7 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/cc/75/daa32d109a94c169ec1b49a16cf7ed3473a029bf6ebaaa8255ca10d1e4f1/sbi4onnx-1.0.7-py3-none-any.whl + hash: + sha256: 44845074b09d466a8b004fd98c601ac1f0cd36635167c6b1991397a276eb08d6 + category: main + optional: false +- name: scc4onnx + version: 1.0.6 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/34/6b/b583a8e4deedaa324f3aadae752a62b7706e84f10f2b2cf08cc1e950bc64/scc4onnx-1.0.6-py3-none-any.whl + hash: + sha256: cd71161916e8ef4e653d7cdbf12748047ea2c4b4037c64af46766f8c52b073b5 + category: main + optional: false +- name: scs4onnx + version: 1.0.18 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/fb/d6/69201bfaeb6e7baea6681453ffe5bd52118a83d138d9b72553c0e1f13fe4/scs4onnx-1.0.18-py3-none-any.whl + hash: + sha256: 38dcf757a498f579483ae5f8c7669df9c7c515be990dfe255a7c86a087aab77e + category: main + optional: false +- name: sde4onnx + version: 1.0.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/44/a2/9ecae8aa06a3585fff9b232dcc8494ab37e95979f97959e2c98800869923/sde4onnx-1.0.0-py3-none-any.whl + hash: + sha256: f77595bb7f97eaa72bf3068cea3e317d6affd140431021e6c380b9501a750ed2 + category: main + optional: false +- name: sed4onnx + version: 1.0.5 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/c2/37/0dd964355edff208522bbe978e82c5eefc1105536afd52a41ce9063b7812/sed4onnx-1.0.5-py3-none-any.whl + hash: + sha256: db80a1c229d7783bed1c88efc868867bd0dfe7180605546d0b05f8aa15c2a50e + category: main + optional: false +- name: simple-onnx-processing-tools + version: 1.1.32 + manager: pip + platform: linux-64 + dependencies: + snc4onnx: '>=1.0.12' + sne4onnx: '>=1.0.11' + snd4onnx: '>=1.1.6' + scs4onnx: '>=1.0.18' + sog4onnx: '>=1.0.16' + sam4onnx: '>=1.0.14' + soc4onnx: '>=1.0.2' + scc4onnx: '>=1.0.5' + sna4onnx: '>=1.0.6' + sbi4onnx: '>=1.0.5' + sor4onnx: '>=1.0.5' + sit4onnx: '>=1.0.7' + onnx2json: '>=2.0.4' + json2onnx: '>=2.0.3' + sed4onnx: '>=1.0.5' + soa4onnx: '>=1.0.4' + sod4onnx: '>=1.0.0' + ssi4onnx: '>=1.0.2' + ssc4onnx: '>=1.0.5' + sio4onnx: '>=1.0.2' + svs4onnx: '>=1.0.0' + onnx2tf: '>=1.20.1' + sng4onnx: '>=1.0.2' + sde4onnx: '>=1.0.0' + spo4onnx: '>=1.0.4' + url: https://files.pythonhosted.org/packages/20/a6/6df4a132bf75aec6ee9a5822b1630364e26ea9cb555173e761ac19d59a46/simple_onnx_processing_tools-1.1.32-py3-none-any.whl + hash: + sha256: af575ff69b606822d6218a6c5e2224c15b424685ce998f1e4bab38f3968ee18d + category: main + optional: false +- name: sio4onnx + version: 1.0.2 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/7e/16/61b40a5445ef890262227bc1dd7c61073049c02595cb8e423765a7979962/sio4onnx-1.0.2-py3-none-any.whl + hash: + sha256: 7e5b5bc7097740b2f988de745ed2c5a0c69c91cfdc3248d7f7b5d29df9acf46f + category: main + optional: false +- name: sit4onnx + version: 1.0.8 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/0c/7c/3506284ccac1fa2f3c75aeed0b0006157a23a8a5589f5aaa6370454d9638/sit4onnx-1.0.8-py3-none-any.whl + hash: + sha256: 8099dcfd4d03e778955dd448a568688744ef6d347e100b866281886756e76014 + category: main + optional: false +- name: six + version: 1.16.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl + hash: + sha256: 8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + category: main + optional: false +- name: sna4onnx + version: 1.0.6 + manager: pip + platform: linux-64 + dependencies: + sog4onnx: '*' + url: https://files.pythonhosted.org/packages/f7/52/c7f91fa3ff0d97356b3781b6ef1ad803b5a5f8e51a1feb5fc2b0cd6c0d5c/sna4onnx-1.0.6-py3-none-any.whl + hash: + sha256: 86dff8fdec51b31269b5d4bcaee6c8e6e9b3a1ed43c2e2ff2f6966e3e3eb86e0 + category: main + optional: false +- name: snc4onnx + version: 1.0.13 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/73/57/cf831fe44e0de2e7f7890f5d8c4d6a669fc5da73ce7d4d58b99cd35e9ecb/snc4onnx-1.0.13-py3-none-any.whl + hash: + sha256: 924f4ee8d3dc6294612a3a1b4bf610d9425ebf24be5792e8a82fdb19acc1da91 + category: main + optional: false +- name: snd4onnx + version: 1.1.6 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/91/3c/15c07ad4873af6d7186d793a85a95aa88308dae53ddaa832115f5928363b/snd4onnx-1.1.6-py3-none-any.whl + hash: + sha256: 8c1a57219a50ad076ea30fa8b9dffa63baa23ba47e4efe1b2b7560a0c2d56c39 + category: main + optional: false +- name: sne4onnx + version: 1.0.13 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/51/9c/1bb291a4655ad9281c83c2909276d9a263324e425ec3ef73e0d1e05b56fe/sne4onnx-1.0.13-py3-none-any.whl + hash: + sha256: eb6176239317a4b5b7ba55051f309bfbbb97255c3fd8ab348390bf4c537dc973 + category: main + optional: false +- name: sng4onnx + version: 1.0.4 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/6f/d8/9f6fc80c341d66473896edf58f02f53bbb60a7b0c0d927927d8c8fb3e916/sng4onnx-1.0.4-py3-none-any.whl + hash: + sha256: 1784d65df96c78532cfd755559a331471e80ccd42ded78044b40ec0d5d708ab4 + category: main + optional: false +- name: soa4onnx + version: 1.0.4 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/b6/a2/e920ebb2aa22ee5d208d653d904051ed1d761aa00102817e48bab09b25c9/soa4onnx-1.0.4-py3-none-any.whl + hash: + sha256: 2ed6697bf0228d5f0304aa3f2c88df62f32b303952e19bed48df48399e9d3282 + category: main + optional: false +- name: soc4onnx + version: 1.0.2 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/6a/61/d89a0c06062c46e59a588ffe693f40c80c5c7a736ee15bc0742e3d269b8f/soc4onnx-1.0.2-py3-none-any.whl + hash: + sha256: e89a1b230eef184555489184588528bd2ca61197d5c842a466b00d931cca9961 + category: main + optional: false +- name: sod4onnx + version: 1.0.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/93/ee/fb6011242484fa893c12d0918126eeb223d2d7c83b4d94a6eb725a600de8/sod4onnx-1.0.0-py3-none-any.whl + hash: + sha256: 3cfdf98ced67af0d87e13dbfbe673c03d3dd2aaf2a5c5577248fef89c9b0b183 + category: main + optional: false +- name: sog4onnx + version: 1.0.17 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/3c/5c/a37a324f67950adbbb653e784d2bede94cc3f58cca6ebd7cfd0cb718b778/sog4onnx-1.0.17-py3-none-any.whl + hash: + sha256: 6b912e312b3d45b3f17a9563301381602d947f24c505e46c43232365e50c0e9a + category: main + optional: false +- name: sor4onnx + version: 1.0.7 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/f1/c4/2091efd5236c0a9f755f6de4dfec1985cb2dfd270f92237486c521815778/sor4onnx-1.0.7-py3-none-any.whl + hash: + sha256: aca7a9b71bf0319415141332ccbd0c01fb8f7feb485e66a2342dda2104fd3e95 + category: main + optional: false +- name: spo4onnx + version: 1.0.5 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/96/a0/2bcda978f76fb5a76c48b109ab17686267daedb1b712fb6a23d43fe9e6b6/spo4onnx-1.0.5-py3-none-any.whl + hash: + sha256: 3275651f14b33fbb4fe6a3b8dd196acd6dd6ac5aa49829c129b1b487c2cb8c99 + category: main + optional: false +- name: ssc4onnx + version: 1.0.8 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/94/75/ed24e1662eb178bfbe3d93617c90cbb51ad02ac89f8de5e36468971c8fe2/ssc4onnx-1.0.8-py3-none-any.whl + hash: + sha256: 6579d2315b142d0e23d40c7dc25bef6542e450b8e755a9f68693e58ccc9175db + category: main + optional: false +- name: ssi4onnx + version: 1.0.4 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/ec/72/277e24966c0bb7918b9f94e84b1477d761154cf989d6d94f60860309fb95/ssi4onnx-1.0.4-py3-none-any.whl + hash: + sha256: 8e6d3593ea2f624d6ffab39117e22c162f547c30ea53dcbc1d4e7070674d5d9c + category: main + optional: false +- name: svs4onnx + version: 1.0.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/bc/b5/4b32c0151c0846b32bab8218223c0f1776ab5ecf263abdf350e237b32d1f/svs4onnx-1.0.0-py3-none-any.whl + hash: + sha256: 55cb7138866ca9a64931cc80068c6a1e0e7dce14ea8d27b125e0fa220282e05b + category: main + optional: false +- name: sympy + version: 1.13.0 + manager: pip + platform: linux-64 + dependencies: + mpmath: '>=1.1.0,<1.4' + url: https://files.pythonhosted.org/packages/62/74/7e6c65ee89ff43942bffffdbb238634f16967bf327aee3c76efcf6e49587/sympy-1.13.0-py3-none-any.whl + hash: + sha256: 6b0b32a4673fb91bd3cac3b55406c8e01d53ae22780be467301cc452f6680c92 + category: main + optional: false +- name: tensorboard + version: 2.16.2 + manager: pip + platform: linux-64 + dependencies: + absl-py: '>=0.4' + grpcio: '>=1.48.2' + markdown: '>=2.6.8' + numpy: '>=1.12.0' + protobuf: '>=3.19.6,<4.24.0 || >4.24.0' + six: '>1.9' + tensorboard-data-server: '>=0.7.0,<0.8.0' + werkzeug: '>=1.0.1' + url: https://files.pythonhosted.org/packages/3a/d0/b97889ffa769e2d1fdebb632084d5e8b53fc299d43a537acee7ec0c021a3/tensorboard-2.16.2-py3-none-any.whl + hash: + sha256: 9f2b4e7dad86667615c0e5cd072f1ea8403fc032a299f0072d6f74855775cc45 + category: main + optional: false +- name: tensorboard-data-server + version: 0.7.2 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/7a/13/e503968fefabd4c6b2650af21e110aa8466fe21432cd7c43a84577a89438/tensorboard_data_server-0.7.2-py3-none-any.whl + hash: + sha256: 7e0610d205889588983836ec05dc098e80f97b7e7bbff7e994ebb78f578d0ddb + category: main + optional: false +- name: tensorflow + version: 2.16.2 + manager: pip + platform: linux-64 + dependencies: + absl-py: '>=1.0.0' + astunparse: '>=1.6.0' + flatbuffers: '>=23.5.26' + gast: '>=0.2.1,<0.5.0 || >0.5.0,<0.5.1 || >0.5.1,<0.5.2 || >0.5.2' + google-pasta: '>=0.1.1' + h5py: '>=3.10.0' + libclang: '>=13.0.0' + ml-dtypes: '>=0.3.1,<0.4.0' + opt-einsum: '>=2.3.2' + packaging: '*' + protobuf: '>=3.20.3,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 + || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev' + requests: '>=2.21.0,<3' + six: '>=1.12.0' + termcolor: '>=1.1.0' + typing-extensions: '>=3.6.6' + wrapt: '>=1.11.0' + grpcio: '>=1.24.3,<2.0' + tensorboard: '>=2.16,<2.17' + keras: '>=3.0.0' + numpy: '>=1.26.0,<2.0.0' + url: https://files.pythonhosted.org/packages/d6/5c/691ab570c3637ba26d76f24d743a71f6afd952fc74e42243c108690d9f66/tensorflow-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 5badc6744672a3181c012b6ab2815975be34d0573db3b561383634acc0d46a55 + category: main + optional: false +- name: termcolor + version: 2.4.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/d9/5f/8c716e47b3a50cbd7c146f45881e11d9414def768b7cd9c5e6650ec2a80a/termcolor-2.4.0-py3-none-any.whl + hash: + sha256: 9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63 + category: main + optional: false +- name: tf-keras + version: 2.16.0 + manager: pip + platform: linux-64 + dependencies: + tensorflow: '>=2.16,<2.17' + url: https://files.pythonhosted.org/packages/75/aa/cf09f8956d4f276f655b13674e15d8d6015fd832f9689aa9ff2a515781ab/tf_keras-2.16.0-py3-none-any.whl + hash: + sha256: b2ad0541fa7d9e92c4b7a1b96593377afb58aaff374299a6ca6be1a42f51d899 + category: main + optional: false +- name: werkzeug + version: 3.0.3 + manager: pip + platform: linux-64 + dependencies: + markupsafe: '>=2.1.1' + url: https://files.pythonhosted.org/packages/9d/6e/e792999e816d19d7fcbfa94c730936750036d65656a76a5a688b57a656c4/werkzeug-3.0.3-py3-none-any.whl + hash: + sha256: fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8 + category: main + optional: false +- name: wrapt + version: 1.16.0 + manager: pip + platform: linux-64 + dependencies: {} + url: https://files.pythonhosted.org/packages/62/62/30ca2405de6a20448ee557ab2cd61ab9c5900be7cbd18a2639db595f0b98/wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b + category: main + optional: false diff --git a/machine-learning/export/ann/env.yaml b/machine-learning/export/ann/env.yaml index c5e656cd46..141ebff086 100644 --- a/machine-learning/export/ann/env.yaml +++ b/machine-learning/export/ann/env.yaml @@ -1,201 +1,21 @@ -name: annexport +name: onnx2ann channels: - - pytorch - - nvidia - conda-forge dependencies: - - _libgcc_mutex=0.1=conda_forge - - _openmp_mutex=4.5=2_kmp_llvm - - aiohttp=3.9.1=py310h2372a71_0 - - aiosignal=1.3.1=pyhd8ed1ab_0 - - arpack=3.8.0=nompi_h0baa96a_101 - - async-timeout=4.0.3=pyhd8ed1ab_0 - - attrs=23.1.0=pyh71513ae_1 - - aws-c-auth=0.7.3=h28f7589_1 - - aws-c-cal=0.6.1=hc309b26_1 - - aws-c-common=0.9.0=hd590300_0 - - aws-c-compression=0.2.17=h4d4d85c_2 - - aws-c-event-stream=0.3.1=h2e3709c_4 - - aws-c-http=0.7.11=h00aa349_4 - - aws-c-io=0.13.32=he9a53bd_1 - - aws-c-mqtt=0.9.3=hb447be9_1 - - aws-c-s3=0.3.14=hf3aad02_1 - - aws-c-sdkutils=0.1.12=h4d4d85c_1 - - aws-checksums=0.1.17=h4d4d85c_1 - - aws-crt-cpp=0.21.0=hb942446_5 - - aws-sdk-cpp=1.10.57=h85b1a90_19 - - blas=2.120=openblas - - blas-devel=3.9.0=20_linux64_openblas - - brotli-python=1.0.9=py310hd8f1fbe_9 - - bzip2=1.0.8=hd590300_5 - - c-ares=1.23.0=hd590300_0 - - ca-certificates=2023.11.17=hbcca054_0 - - certifi=2023.11.17=pyhd8ed1ab_0 - - charset-normalizer=3.3.2=pyhd8ed1ab_0 - - click=8.1.7=unix_pyh707e725_0 - - colorama=0.4.6=pyhd8ed1ab_0 - - coloredlogs=15.0.1=pyhd8ed1ab_3 - - cuda-cudart=11.7.99=0 - - cuda-cupti=11.7.101=0 - - cuda-libraries=11.7.1=0 - - cuda-nvrtc=11.7.99=0 - - cuda-nvtx=11.7.91=0 - - cuda-runtime=11.7.1=0 - - dataclasses=0.8=pyhc8e2a94_3 - - datasets=2.14.7=pyhd8ed1ab_0 - - dill=0.3.7=pyhd8ed1ab_0 - - filelock=3.13.1=pyhd8ed1ab_0 - - flatbuffers=23.5.26=h59595ed_1 - - freetype=2.12.1=h267a509_2 - - frozenlist=1.4.0=py310h2372a71_1 - - fsspec=2023.10.0=pyhca7485f_0 - - ftfy=6.1.3=pyhd8ed1ab_0 - - gflags=2.2.2=he1b5a44_1004 - - glog=0.6.0=h6f12383_0 - - glpk=5.0=h445213a_0 - - gmp=6.3.0=h59595ed_0 - - gmpy2=2.1.2=py310h3ec546c_1 - - huggingface_hub=0.17.3=pyhd8ed1ab_0 - - humanfriendly=10.0=pyhd8ed1ab_6 - - icu=73.2=h59595ed_0 - - idna=3.6=pyhd8ed1ab_0 - - importlib-metadata=7.0.0=pyha770c72_0 - - importlib_metadata=7.0.0=hd8ed1ab_0 - - joblib=1.3.2=pyhd8ed1ab_0 - - keyutils=1.6.1=h166bdaf_0 - - krb5=1.21.2=h659d440_0 - - lcms2=2.15=h7f713cb_2 - - ld_impl_linux-64=2.40=h41732ed_0 - - lerc=4.0.0=h27087fc_0 - - libabseil=20230125.3=cxx17_h59595ed_0 - - libarrow=12.0.1=hb87d912_8_cpu - - libblas=3.9.0=20_linux64_openblas - - libbrotlicommon=1.0.9=h166bdaf_9 - - libbrotlidec=1.0.9=h166bdaf_9 - - libbrotlienc=1.0.9=h166bdaf_9 - - libcblas=3.9.0=20_linux64_openblas - - libcrc32c=1.1.2=h9c3ff4c_0 - - libcublas=11.10.3.66=0 - - libcufft=10.7.2.124=h4fbf590_0 - - libcufile=1.8.1.2=0 - - libcurand=10.3.4.101=0 - - libcurl=8.5.0=hca28451_0 - - libcusolver=11.4.0.1=0 - - libcusparse=11.7.4.91=0 - - libdeflate=1.19=hd590300_0 - - libedit=3.1.20191231=he28a2e2_2 - - libev=4.33=hd590300_2 - - libevent=2.1.12=hf998b51_1 - - libffi=3.4.2=h7f98852_5 - - libgcc-ng=13.2.0=h807b86a_3 - - libgfortran-ng=13.2.0=h69a702a_3 - - libgfortran5=13.2.0=ha4646dd_3 - - libgoogle-cloud=2.12.0=hac9eb74_1 - - libgrpc=1.54.3=hb20ce57_0 - - libhwloc=2.9.3=default_h554bfaf_1009 - - libiconv=1.17=hd590300_1 - - libjpeg-turbo=2.1.5.1=hd590300_1 - - liblapack=3.9.0=20_linux64_openblas - - liblapacke=3.9.0=20_linux64_openblas - - libnghttp2=1.58.0=h47da74e_1 - - libnpp=11.7.4.75=0 - - libnsl=2.0.1=hd590300_0 - - libnuma=2.0.16=h0b41bf4_1 - - libnvjpeg=11.8.0.2=0 - - libopenblas=0.3.25=pthreads_h413a1c8_0 - - libpng=1.6.39=h753d276_0 - - libprotobuf=3.21.12=hfc55251_2 - - libsentencepiece=0.1.99=h180e1df_0 - - libsqlite=3.44.2=h2797004_0 - - libssh2=1.11.0=h0841786_0 - - libstdcxx-ng=13.2.0=h7e041cc_3 - - libthrift=0.18.1=h8fd135c_2 - - libtiff=4.6.0=h29866fb_1 - - libutf8proc=2.8.0=h166bdaf_0 - - libuuid=2.38.1=h0b41bf4_0 - - libwebp-base=1.3.2=hd590300_0 - - libxcb=1.15=h0b41bf4_0 - - libxml2=2.11.6=h232c23b_0 - - libzlib=1.2.13=hd590300_5 - - llvm-openmp=17.0.6=h4dfa4b3_0 - - lz4-c=1.9.4=hcb278e6_0 - - mkl=2022.2.1=h84fe81f_16997 - - mkl-devel=2022.2.1=ha770c72_16998 - - mkl-include=2022.2.1=h84fe81f_16997 - - mpc=1.3.1=hfe3b2da_0 - - mpfr=4.2.1=h9458935_0 - - mpmath=1.3.0=pyhd8ed1ab_0 - - multidict=6.0.4=py310h2372a71_1 - - multiprocess=0.70.15=py310h2372a71_1 - - ncurses=6.4=h59595ed_2 - - numpy=1.26.2=py310hb13e2d6_0 - - onnx=1.14.0=py310ha3deec4_1 - - onnx2torch=1.5.13=pyhd8ed1ab_0 - - onnxruntime=1.16.3=py310hd4b7fbc_1_cpu - - open-clip-torch=2.23.0=pyhd8ed1ab_1 - - openblas=0.3.25=pthreads_h7a3da1a_0 - - openjpeg=2.5.0=h488ebb8_3 - - openssl=3.2.0=hd590300_1 - - orc=1.9.0=h2f23424_1 - - packaging=23.2=pyhd8ed1ab_0 - - pandas=2.1.4=py310hcc13569_0 - - pillow=10.0.1=py310h29da1c1_1 - - pip=23.3.1=pyhd8ed1ab_0 - - protobuf=4.21.12=py310heca2aa9_0 - - pthread-stubs=0.4=h36c2ea0_1001 - - pyarrow=12.0.1=py310h0576679_8_cpu - - pyarrow-hotfix=0.6=pyhd8ed1ab_0 - - pysocks=1.7.1=pyha2e5f31_6 - - python=3.10.13=hd12c33a_0_cpython - - python-dateutil=2.8.2=pyhd8ed1ab_0 - - python-flatbuffers=23.5.26=pyhd8ed1ab_0 - - python-tzdata=2023.3=pyhd8ed1ab_0 - - python-xxhash=3.4.1=py310h2372a71_0 - - python_abi=3.10=4_cp310 - - pytorch=1.13.1=cpu_py310hd11e9c7_1 - - pytorch-cuda=11.7=h778d358_5 - - pytorch-mutex=1.0=cuda - - pytz=2023.3.post1=pyhd8ed1ab_0 - - pyyaml=6.0.1=py310h2372a71_1 - - rdma-core=28.9=h59595ed_1 - - re2=2023.03.02=h8c504da_0 - - readline=8.2=h8228510_1 - - regex=2023.10.3=py310h2372a71_0 - - requests=2.31.0=pyhd8ed1ab_0 - - s2n=1.3.49=h06160fa_0 - - sacremoses=0.0.53=pyhd8ed1ab_0 - - safetensors=0.3.3=py310hcb5633a_1 - - sentencepiece=0.1.99=hff52083_0 - - sentencepiece-python=0.1.99=py310hebdb9f0_0 - - sentencepiece-spm=0.1.99=h180e1df_0 - - setuptools=68.2.2=pyhd8ed1ab_0 - - six=1.16.0=pyh6c4a22f_0 - - sleef=3.5.1=h9b69904_2 - - snappy=1.1.10=h9fff704_0 - - sympy=1.12=pypyh9d50eac_103 - - tbb=2021.11.0=h00ab1b0_0 - - texttable=1.7.0=pyhd8ed1ab_0 - - timm=0.9.12=pyhd8ed1ab_0 - - tk=8.6.13=noxft_h4845f30_101 - - tokenizers=0.14.1=py310h320607d_2 - - torchvision=0.14.1=cpu_py310hd3d2ac3_1 - - tqdm=4.66.1=pyhd8ed1ab_0 - - transformers=4.35.2=pyhd8ed1ab_0 - - typing-extensions=4.9.0=hd8ed1ab_0 - - typing_extensions=4.9.0=pyha770c72_0 - - tzdata=2023c=h71feb2d_0 - - ucx=1.14.1=h64cca9d_5 - - urllib3=2.1.0=pyhd8ed1ab_0 - - wcwidth=0.2.12=pyhd8ed1ab_0 - - wheel=0.42.0=pyhd8ed1ab_0 - - xorg-libxau=1.0.11=hd590300_0 - - xorg-libxdmcp=1.1.3=h7f98852_0 - - xxhash=0.8.2=hd590300_0 - - xz=5.2.6=h166bdaf_0 - - yaml=0.2.5=h7f98852_2 - - yarl=1.9.3=py310h2372a71_0 - - zipp=3.17.0=pyhd8ed1ab_0 - - zlib=1.2.13=hd590300_5 - - zstd=1.5.5=hfc55251_0 + - python>=3.11,<4.0 + - onnx>=1.16.1 + # - onnxruntime>=1.18.1 # conda only has gpu version + - psutil>=6.0.0 + - flatbuffers>=24.3.25 + - ml_dtypes>=0.3.1 + - typer-slim>=0.12.3 + - huggingface_hub>=0.23.4 + - pip - pip: - - git+https://github.com/fyfrey/TinyNeuralNetwork.git + - onnxruntime>=1.18.1 # conda only has gpu version + - onnxsim>=0.4.36 + - onnx2tf>=1.24.1 + - onnx_graphsurgeon>=0.5.2 + - simple_onnx_processing_tools>=1.1.32 + - tf_keras>=2.16.0 + - git+https://github.com/microsoft/onnxconverter-common.git diff --git a/machine-learning/export/ann/onnx2ann/__init__.py b/machine-learning/export/ann/onnx2ann/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/machine-learning/export/ann/onnx2ann/__main__.py b/machine-learning/export/ann/onnx2ann/__main__.py new file mode 100644 index 0000000000..b59a21c964 --- /dev/null +++ b/machine-learning/export/ann/onnx2ann/__main__.py @@ -0,0 +1,99 @@ +import os +import platform +from typing import Annotated, Optional + +import typer + +from onnx2ann.export import Exporter, ModelType, Precision + +app = typer.Typer(add_completion=False, pretty_exceptions_show_locals=False) + + +@app.command() +def export( + model_name: Annotated[ + str, typer.Argument(..., help="The name of the model to be exported as it exists in Hugging Face.") + ], + model_type: Annotated[ModelType, typer.Option(..., "--type", "-t", help="The type of model to be exported.")], + input_shapes: Annotated[ + list[str], + typer.Option( + ..., + "--input-shape", + "-s", + help="The shape of an input tensor to the model, each dimension separated by commas. " + "Multiple shapes can be provided for multiple inputs.", + ), + ], + precision: Annotated[ + Precision, + typer.Option( + ..., + "--precision", + "-p", + help="The precision of the exported model. `float16` requires a GPU.", + ), + ] = Precision.FLOAT32, + cache_dir: Annotated[ + str, + typer.Option( + ..., + "--cache-dir", + "-c", + help="Directory where pre-export models will be stored.", + envvar="CACHE_DIR", + show_envvar=True, + ), + ] = "~/.cache/huggingface", + output_dir: Annotated[ + str, + typer.Option( + ..., + "--output-dir", + "-o", + help="Directory where exported models will be stored.", + ), + ] = "output", + auth_token: Annotated[ + Optional[str], + typer.Option( + ..., + "--auth-token", + "-t", + help="If uploading models to Hugging Face, the auth token of the user or organisation.", + envvar="HF_AUTH_TOKEN", + show_envvar=True, + ), + ] = None, + force_export: Annotated[ + bool, + typer.Option( + ..., + "--force-export", + "-f", + help="Export the model even if an exported model already exists in the output directory.", + ), + ] = False, +) -> None: + if platform.machine() not in ("x86_64", "AMD64"): + msg = f"Can only run on x86_64 / AMD64, not {platform.machine()}" + raise RuntimeError(msg) + os.environ.setdefault("LD_LIBRARY_PATH", "armnn") + parsed_input_shapes = [tuple(map(int, shape.split(","))) for shape in input_shapes] + model = Exporter( + model_name, model_type, input_shapes=parsed_input_shapes, cache_dir=cache_dir, force_export=force_export + ) + model_dir = os.path.join("output", model_name) + output_dir = os.path.join(model_dir, model_type) + armnn_model = model.to_armnn(output_dir, precision) + + if not auth_token: + return + + from huggingface_hub import upload_file + + relative_path = os.path.relpath(armnn_model, start=model_dir) + upload_file(path_or_fileobj=armnn_model, path_in_repo=relative_path, repo_id=model.repo_name, token=auth_token) + + +app() diff --git a/machine-learning/export/ann/onnx2ann/export.py b/machine-learning/export/ann/onnx2ann/export.py new file mode 100644 index 0000000000..935d421208 --- /dev/null +++ b/machine-learning/export/ann/onnx2ann/export.py @@ -0,0 +1,129 @@ +import os +import subprocess +from enum import StrEnum + +from onnx2ann.helpers import onnx_make_armnn_compatible, onnx_make_inputs_fixed + + +class ModelType(StrEnum): + VISUAL = "visual" + TEXTUAL = "textual" + RECOGNITION = "recognition" + DETECTION = "detection" + + +class Precision(StrEnum): + FLOAT16 = "float16" + FLOAT32 = "float32" + + +class Exporter: + def __init__( + self, + model_name: str, + model_type: str, + input_shapes: list[tuple[int, ...]], + optimization_level: int = 5, + cache_dir: str = os.environ.get("CACHE_DIR", "~/.cache/huggingface"), + force_export: bool = False, + ): + self.model_name = model_name.split("/")[-1] + self.model_type = model_type + self.optimize = optimization_level + self.input_shapes = input_shapes + self.cache_dir = os.path.join(cache_dir, self.repo_name) + self.force_export = force_export + + def download(self) -> str: + model_path = os.path.join(self.cache_dir, self.model_type, "model.onnx") + if os.path.isfile(model_path): + print(f"Model is already downloaded at {model_path}") + return model_path + from huggingface_hub import snapshot_download + + snapshot_download( + self.repo_name, cache_dir=self.cache_dir, local_dir=self.cache_dir, local_dir_use_symlinks=False + ) + return model_path + + def to_onnx_static(self, precision: Precision) -> str: + import onnx + from onnxconverter_common import float16 + onnx_path_original = self.download() + static_dir = os.path.join(self.cache_dir, self.model_type, "static") + + static_path = os.path.join(static_dir, f"model.onnx") + if self.force_export and not os.path.isfile(static_path): + print(f"Making {self} static") + os.makedirs(static_dir, exist_ok=True) + onnx_make_inputs_fixed(onnx_path_original, static_path, self.input_shapes) + onnx_make_armnn_compatible(static_path) + print(f"Finished making {self} static") + + model = onnx.load(static_path) + self.inputs = [input_.name for input_ in model.graph.input] + self.outputs = [output_.name for output_ in model.graph.output] + if precision == Precision.FLOAT16: + static_path = os.path.join(static_dir, f"model_{precision}.onnx") + print(f"Converting {self} to {precision} precision") + model = float16.convert_float_to_float16(model, keep_io_types=True, disable_shape_infer=True) + onnx.save(model, static_path) + print(f"Finished converting {self} to {precision} precision") + # self.inputs, self.outputs = onnx_get_inputs_outputs(static_path) + return static_path + + def to_tflite(self, output_dir: str, precision: Precision) -> str: + onnx_model = self.to_onnx_static(precision) + tflite_dir = os.path.join(output_dir, precision) + tflite_model = os.path.join(tflite_dir, f"model_{precision}.tflite") + if self.force_export or not os.path.isfile(tflite_model): + import onnx2tf + + print(f"Exporting {self} to TFLite with {precision} precision (this might take a few minutes)") + onnx2tf.convert( + input_onnx_file_path=onnx_model, + output_folder_path=tflite_dir, + keep_shape_absolutely_input_names=self.inputs, + # verbosity="warn", + copy_onnx_input_output_names_to_tflite=True, + output_signaturedefs=True, + not_use_onnxsim=True, + ) + print(f"Finished exporting {self} to TFLite with {precision} precision") + + return tflite_model + + def to_armnn(self, output_dir: str, precision: Precision) -> tuple[str, str]: + armnn_model = os.path.join(output_dir, "model.armnn") + if not self.force_export and os.path.isfile(armnn_model): + return armnn_model + + tflite_model_dir = os.path.join(output_dir, "tflite") + tflite_model = self.to_tflite(tflite_model_dir, precision) + + args = ["./armnnconverter", "-f", "tflite-binary", "-m", tflite_model, "-p", armnn_model] + args.append("-i") + args.extend(self.inputs) + args.append("-o") + args.extend(self.outputs) + + print(f"Exporting {self} to ARM NN with {precision} precision") + try: + if (stdout := subprocess.check_output(args, stderr=subprocess.STDOUT).decode()): + print(stdout) + print(f"Finished exporting {self} to ARM NN with {precision} precision") + except subprocess.CalledProcessError as e: + print(e.output.decode()) + try: + from shutil import rmtree + + rmtree(tflite_model_dir, ignore_errors=True) + finally: + raise e + + @property + def repo_name(self) -> str: + return f"immich-app/{self.model_name}" + + def __repr__(self) -> str: + return f"{self.model_name} ({self.model_type})" diff --git a/machine-learning/export/ann/onnx2ann/helpers.py b/machine-learning/export/ann/onnx2ann/helpers.py new file mode 100644 index 0000000000..4eafdfbc50 --- /dev/null +++ b/machine-learning/export/ann/onnx2ann/helpers.py @@ -0,0 +1,260 @@ +from typing import Any + + +def onnx_make_armnn_compatible(model_path: str) -> None: + """ + i can explain + armnn only supports up to 4d tranposes, but the model has a 5d transpose due to a redundant unsqueeze + this function folds the unsqueeze+transpose+squeeze into a single 4d transpose + it also switches from gather ops to slices since armnn has different dimension semantics for gathers + also fixes batch normalization being in training mode + """ + + import numpy as np + import onnx + from onnx_graphsurgeon import Constant, Node, Variable, export_onnx, import_onnx + + proto = onnx.load(model_path) + graph = import_onnx(proto) + + gather_idx = 1 + squeeze_idx = 1 + for node in graph.nodes: + for link1 in node.outputs: + if "Unsqueeze" in link1.name: + for node1 in link1.outputs: + for link2 in node1.outputs: + if "Transpose" in link2.name: + for node2 in link2.outputs: + if node2.attrs.get("perm") == [3, 1, 2, 0, 4]: + node2.attrs["perm"] = [2, 0, 1, 3] + link2.shape = link1.shape + for link3 in node2.outputs: + if "Squeeze" in link3.name: + link3.shape = [link3.shape[x] for x in [0, 1, 2, 4]] + for node3 in link3.outputs: + for link4 in node3.outputs: + link4.shape = link3.shape + try: + idx = link2.inputs.index(node1) + link2.inputs[idx] = node + except ValueError: + pass + + node.outputs = [link2] + if "Gather" in link4.name: + for node4 in link4.outputs: + axis = node1.attrs.get("axis", 0) + index = node4.inputs[1].values + slice_link = Variable( + f"onnx::Slice_123{gather_idx}", + dtype=link4.dtype, + shape=[1] + link3.shape[1:], + ) + slice_node = Node( + op="Slice", + inputs=[ + link3, + Constant( + f"SliceStart_123{gather_idx}", + np.array([index]), + ), + Constant( + f"SliceEnd_123{gather_idx}", + np.array([index + 1]), + ), + Constant( + f"SliceAxis_123{gather_idx}", + np.array([axis]), + ), + ], + outputs=[slice_link], + name=f"Slice_123{gather_idx}", + ) + graph.nodes.append(slice_node) + gather_idx += 1 + + for link5 in node4.outputs: + for node5 in link5.outputs: + try: + idx = node5.inputs.index(link5) + node5.inputs[idx] = slice_link + except ValueError: + pass + elif node.op == "LayerNormalization": + for node1 in link1.outputs: + if node1.op == "Gather": + for link2 in node1.outputs: + for node2 in link2.outputs: + axis = node1.attrs.get("axis", 0) + index = node1.inputs[1].values + slice_link = Variable( + f"onnx::Slice_123{gather_idx}", + dtype=link2.dtype, + shape=[1, *link2.shape], + ) + slice_node = Node( + op="Slice", + inputs=[ + node1.inputs[0], + Constant( + f"SliceStart_123{gather_idx}", + np.array([index]), + ), + Constant( + f"SliceEnd_123{gather_idx}", + np.array([index + 1]), + ), + Constant( + f"SliceAxis_123{gather_idx}", + np.array([axis]), + ), + ], + outputs=[slice_link], + name=f"Slice_123{gather_idx}", + ) + graph.nodes.append(slice_node) + gather_idx += 1 + + squeeze_link = Variable( + f"onnx::Squeeze_123{squeeze_idx}", + dtype=link2.dtype, + shape=link2.shape, + ) + squeeze_node = Node( + op="Squeeze", + inputs=[ + slice_link, + Constant( + f"SqueezeAxis_123{squeeze_idx}", + np.array([0]), + ), + ], + outputs=[squeeze_link], + name=f"Squeeze_123{squeeze_idx}", + ) + graph.nodes.append(squeeze_node) + squeeze_idx += 1 + try: + idx = node2.inputs.index(link2) + node2.inputs[idx] = squeeze_link + except ValueError: + pass + elif node.op == "Reshape": + for node1 in link1.outputs: + if node1.op == "Gather": + node2s = [n for link in node1.outputs for n in link.outputs] + if any(n.op == "Abs" for n in node2s): + axis = node1.attrs.get("axis", 0) + index = node1.inputs[1].values + slice_link = Variable( + f"onnx::Slice_123{gather_idx}", + dtype=node1.outputs[0].dtype, + shape=[1, *node1.outputs[0].shape], + ) + slice_node = Node( + op="Slice", + inputs=[ + node1.inputs[0], + Constant( + f"SliceStart_123{gather_idx}", + np.array([index]), + ), + Constant( + f"SliceEnd_123{gather_idx}", + np.array([index + 1]), + ), + Constant( + f"SliceAxis_123{gather_idx}", + np.array([axis]), + ), + ], + outputs=[slice_link], + name=f"Slice_123{gather_idx}", + ) + graph.nodes.append(slice_node) + gather_idx += 1 + + squeeze_link = Variable( + f"onnx::Squeeze_123{squeeze_idx}", + dtype=node1.outputs[0].dtype, + shape=node1.outputs[0].shape, + ) + squeeze_node = Node( + op="Squeeze", + inputs=[ + slice_link, + Constant( + f"SqueezeAxis_123{squeeze_idx}", + np.array([0]), + ), + ], + outputs=[squeeze_link], + name=f"Squeeze_123{squeeze_idx}", + ) + graph.nodes.append(squeeze_node) + squeeze_idx += 1 + for node2 in node2s: + node2.inputs[0] = squeeze_link + elif node.op == "BatchNormalization" and node.attrs.get("training_mode") == 1: + node.attrs["training_mode"] = 0 + node.outputs = node.outputs[:1] + + graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True) + graph.toposort() + graph.fold_constants() + updated = export_onnx(graph) + onnx_save(updated, model_path) + + # for some reason, reloading the model is necessary to apply the correct shape + proto = onnx.load(model_path) + graph = import_onnx(proto) + for node in graph.nodes: + if node.op == "Slice": + for link in node.outputs: + if "Slice_123" in link.name and link.shape[0] == 3: # noqa: PLR2004 + link.shape[0] = 1 + + graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True) + graph.toposort() + graph.fold_constants() + updated = export_onnx(graph) + onnx_save(updated, model_path) + onnx.shape_inference.infer_shapes_path(model_path, check_type=True, strict_mode=True, data_prop=True) + + +def onnx_make_inputs_fixed(input_path: str, output_path: str, input_shapes: list[tuple[int, ...]]) -> None: + import onnx + import onnxsim + from onnxruntime.tools.onnx_model_utils import fix_output_shapes, make_input_shape_fixed + + model, success = onnxsim.simplify(input_path) + if not success: + msg = f"Failed to simplify {input_path}" + raise RuntimeError(msg) + onnx_save(model, output_path) + onnx.shape_inference.infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True) + model = onnx.load_model(output_path) + for input_node, shape in zip(model.graph.input, input_shapes, strict=False): + make_input_shape_fixed(model.graph, input_node.name, shape) + fix_output_shapes(model) + onnx_save(model, output_path) + onnx.shape_inference.infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True) + + +def onnx_get_inputs_outputs(model_path: str) -> tuple[list[str], list[str]]: + import onnx + + model = onnx.load(model_path) + inputs = [input_.name for input_ in model.graph.input] + outputs = [output_.name for output_ in model.graph.output] + return inputs, outputs + + +def onnx_save(model: Any, output_path: str) -> None: + import onnx + + try: + onnx.save(model, output_path) + except: + onnx.save(model, output_path, save_as_external_data=True, all_tensors_to_one_file=False, size_threshold=1_000_000) \ No newline at end of file diff --git a/machine-learning/export/ann/pyproject.toml b/machine-learning/export/ann/pyproject.toml new file mode 100644 index 0000000000..3d29bd8e84 --- /dev/null +++ b/machine-learning/export/ann/pyproject.toml @@ -0,0 +1,56 @@ +[project] +name = "onnx2ann" +version = "1.107.2" +dependencies = [ + "onnx>=1.16.1", + "psutil>=6.0.0", + "flatbuffers>=24.3.25", + "ml_dtypes>=0.3.1,<1.0.0", + "typer-slim>=0.12.3,<1.0.0", + "huggingface_hub>=0.23.4,<1.0.0", + "onnxruntime>=1.18.1", + "onnxsim>=0.4.36,<1.0.0", + "onnx2tf>=1.24.0", + "onnx_graphsurgeon>=0.5.2,<1.0.0", + "simple_onnx_processing_tools>=1.1.32", + "tf_keras>=2.16.0", + "onnxconverter-common @ git+https://github.com/microsoft/onnxconverter-common" +] +requires-python = ">=3.11" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.sdist] +only-include = ["onnx2ann"] + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.mypy] +python_version = "3.12" +follow_imports = "silent" +warn_redundant_casts = true +disallow_any_generics = true +check_untyped_defs = true +disallow_untyped_defs = true +ignore_missing_imports = true + +[tool.pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_required_dynamic_aliases = true +warn_untyped_fields = true + +[tool.ruff] +line-length = 120 +target-version = "py312" + +[tool.ruff.lint] +extend-select = ["E", "F", "I"] +extend-ignore = ["FBT001", "FBT002"] + +[tool.black] +line-length = 120 +target-version = ['py312'] diff --git a/machine-learning/export/ann/run.py b/machine-learning/export/ann/run.py deleted file mode 100644 index 5133e204ae..0000000000 --- a/machine-learning/export/ann/run.py +++ /dev/null @@ -1,475 +0,0 @@ -import os -import platform -import subprocess -from typing import Callable, ClassVar - -import onnx -from onnx_graphsurgeon import Constant, Node, Variable, import_onnx, export_onnx -from onnxruntime.tools.onnx_model_utils import fix_output_shapes, make_input_shape_fixed -from huggingface_hub import snapshot_download -from onnx.shape_inference import infer_shapes_path -from huggingface_hub import login, upload_file -import onnx2tf -import numpy as np -import onnxsim -from shutil import rmtree - -# hack: changed Mul op in onnx2tf to skip broadcast if graph_node.o().op == 'Sigmoid' - -# i can explain -# armnn only supports up to 4d tranposes, but the model has a 5d transpose due to a redundant unsqueeze -# this function folds the unsqueeze+transpose+squeeze into a single 4d transpose -# it also switches from gather ops to slices since armnn has different dimension semantics for gathers -# also fixes batch normalization being in training mode -def make_onnx_armnn_compatible(model_path: str): - proto = onnx.load(model_path) - graph = import_onnx(proto) - - gather_idx = 1 - squeeze_idx = 1 - for node in graph.nodes: - for link1 in node.outputs: - if "Unsqueeze" in link1.name: - for node1 in link1.outputs: - for link2 in node1.outputs: - if "Transpose" in link2.name: - for node2 in link2.outputs: - if node2.attrs.get("perm") == [3, 1, 2, 0, 4]: - node2.attrs["perm"] = [2, 0, 1, 3] - link2.shape = link1.shape - for link3 in node2.outputs: - if "Squeeze" in link3.name: - link3.shape = [link3.shape[x] for x in [0, 1, 2, 4]] - for node3 in link3.outputs: - for link4 in node3.outputs: - link4.shape = link3.shape - try: - idx = link2.inputs.index(node1) - link2.inputs[idx] = node - except ValueError: - pass - - node.outputs = [link2] - if "Gather" in link4.name: - for node4 in link4.outputs: - axis = node1.attrs.get("axis", 0) - index = node4.inputs[1].values - slice_link = Variable( - f"onnx::Slice_123{gather_idx}", - dtype=link4.dtype, - shape=[1] + link3.shape[1:], - ) - slice_node = Node( - op="Slice", - inputs=[ - link3, - Constant( - f"SliceStart_123{gather_idx}", - np.array([index]), - ), - Constant( - f"SliceEnd_123{gather_idx}", - np.array([index + 1]), - ), - Constant( - f"SliceAxis_123{gather_idx}", - np.array([axis]), - ), - ], - outputs=[slice_link], - name=f"Slice_123{gather_idx}", - ) - graph.nodes.append(slice_node) - gather_idx += 1 - - for link5 in node4.outputs: - for node5 in link5.outputs: - try: - idx = node5.inputs.index(link5) - node5.inputs[idx] = slice_link - except ValueError: - pass - elif node.op == "LayerNormalization": - for node1 in link1.outputs: - if node1.op == "Gather": - for link2 in node1.outputs: - for node2 in link2.outputs: - axis = node1.attrs.get("axis", 0) - index = node1.inputs[1].values - slice_link = Variable( - f"onnx::Slice_123{gather_idx}", - dtype=link2.dtype, - shape=[1] + link2.shape, - ) - slice_node = Node( - op="Slice", - inputs=[ - node1.inputs[0], - Constant( - f"SliceStart_123{gather_idx}", - np.array([index]), - ), - Constant( - f"SliceEnd_123{gather_idx}", - np.array([index + 1]), - ), - Constant( - f"SliceAxis_123{gather_idx}", - np.array([axis]), - ), - ], - outputs=[slice_link], - name=f"Slice_123{gather_idx}", - ) - graph.nodes.append(slice_node) - gather_idx += 1 - - squeeze_link = Variable( - f"onnx::Squeeze_123{squeeze_idx}", - dtype=link2.dtype, - shape=link2.shape, - ) - squeeze_node = Node( - op="Squeeze", - inputs=[slice_link, Constant(f"SqueezeAxis_123{squeeze_idx}",np.array([0]),)], - outputs=[squeeze_link], - name=f"Squeeze_123{squeeze_idx}", - ) - graph.nodes.append(squeeze_node) - squeeze_idx += 1 - try: - idx = node2.inputs.index(link2) - node2.inputs[idx] = squeeze_link - except ValueError: - pass - elif node.op == "Reshape": - for node1 in link1.outputs: - if node1.op == "Gather": - node2s = [n for l in node1.outputs for n in l.outputs] - if any(n.op == "Abs" for n in node2s): - axis = node1.attrs.get("axis", 0) - index = node1.inputs[1].values - slice_link = Variable( - f"onnx::Slice_123{gather_idx}", - dtype=node1.outputs[0].dtype, - shape=[1] + node1.outputs[0].shape, - ) - slice_node = Node( - op="Slice", - inputs=[ - node1.inputs[0], - Constant( - f"SliceStart_123{gather_idx}", - np.array([index]), - ), - Constant( - f"SliceEnd_123{gather_idx}", - np.array([index + 1]), - ), - Constant( - f"SliceAxis_123{gather_idx}", - np.array([axis]), - ), - ], - outputs=[slice_link], - name=f"Slice_123{gather_idx}", - ) - graph.nodes.append(slice_node) - gather_idx += 1 - - squeeze_link = Variable( - f"onnx::Squeeze_123{squeeze_idx}", - dtype=node1.outputs[0].dtype, - shape=node1.outputs[0].shape, - ) - squeeze_node = Node( - op="Squeeze", - inputs=[slice_link, Constant(f"SqueezeAxis_123{squeeze_idx}",np.array([0]),)], - outputs=[squeeze_link], - name=f"Squeeze_123{squeeze_idx}", - ) - graph.nodes.append(squeeze_node) - squeeze_idx += 1 - for node2 in node2s: - node2.inputs[0] = squeeze_link - elif node.op == "BatchNormalization": - if node.attrs.get("training_mode") == 1: - node.attrs["training_mode"] = 0 - node.outputs = node.outputs[:1] - - graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True) - graph.toposort() - graph.fold_constants() - updated = export_onnx(graph) - onnx.save(updated, model_path) - # infer_shapes_path(updated, check_type=True, strict_mode=False, data_prop=True) - - # for some reason, reloading the model is necessary to apply the correct shape - proto = onnx.load(model_path) - graph = import_onnx(proto) - for node in graph.nodes: - if node.op == "Slice": - for link in node.outputs: - if "Slice_123" in link.name and link.shape[0] == 3: - link.shape[0] = 1 - - graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True) - graph.toposort() - graph.fold_constants() - updated = export_onnx(graph) - onnx.save(updated, model_path) - infer_shapes_path(model_path, check_type=True, strict_mode=True, data_prop=True) - - -def onnx_make_fixed(input_path: str, output_path: str, input_shape: tuple[int, ...]): - simplified, success = onnxsim.simplify(input_path) - if not success: - raise RuntimeError(f"Failed to simplify {input_path}") - try: - onnx.save(simplified, output_path) - except: - onnx.save(simplified, output_path, save_as_external_data=True, all_tensors_to_one_file=False) - infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True) - model = onnx.load_model(output_path) - make_input_shape_fixed(model.graph, model.graph.input[0].name, input_shape) - fix_output_shapes(model) - try: - onnx.save(model, output_path) - except: - onnx.save(model, output_path, save_as_external_data=True, all_tensors_to_one_file=False) - onnx.save(model, output_path) - infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True) - - -class ExportBase: - task: ClassVar[str] - - def __init__( - self, - name: str, - input_shape: tuple[int, ...], - pretrained: str | None = None, - optimization_level: int = 5, - ): - super().__init__() - self.name = name - self.optimize = optimization_level - self.input_shape = input_shape - self.pretrained = pretrained - self.cache_dir = os.path.join(os.environ["CACHE_DIR"], self.model_name) - - def download(self) -> str: - model_path = os.path.join(self.cache_dir, self.task, "model.onnx") - if not os.path.isfile(model_path): - print(f"Downloading {self.model_name}...") - snapshot_download(self.repo_name, cache_dir=self.cache_dir, local_dir=self.cache_dir, local_dir_use_symlinks=False) - return model_path - - def to_onnx_static(self) -> str: - onnx_path_original = self.download() - static_dir = os.path.join(self.cache_dir, self.task, "static") - os.makedirs(static_dir, exist_ok=True) - - static_path = os.path.join(static_dir, "model.onnx") - if not os.path.isfile(static_path): - print(f"Making {self.model_name} ({self.task}) static") - onnx_make_fixed(onnx_path_original, static_path, self.input_shape) - make_onnx_armnn_compatible(static_path) - static_model = onnx.load_model(static_path) - self.inputs = [input_.name for input_ in static_model.graph.input] - self.outputs = [output_.name for output_ in static_model.graph.output] - return static_path - - def to_tflite(self, output_dir: str) -> tuple[str, str]: - input_path = self.to_onnx_static() - tflite_fp32 = os.path.join(output_dir, "model_float32.tflite") - tflite_fp16 = os.path.join(output_dir, "model_float16.tflite") - if not os.path.isfile(tflite_fp32) or not os.path.isfile(tflite_fp16): - print(f"Exporting {self.model_name} ({self.task}) to TFLite (this might take a few minutes)") - onnx2tf.convert( - input_onnx_file_path=input_path, - output_folder_path=output_dir, - keep_shape_absolutely_input_names=self.inputs, - verbosity="warn", - copy_onnx_input_output_names_to_tflite=True, - output_signaturedefs=True, - ) - - return tflite_fp32, tflite_fp16 - - def to_armnn(self, output_dir: str) -> tuple[str, str]: - output_dir = os.path.abspath(output_dir) - tflite_model_dir = os.path.join(output_dir, "tflite") - tflite_fp32, tflite_fp16 = self.to_tflite(tflite_model_dir) - - fp16_dir = os.path.join(output_dir, "fp16") - os.makedirs(fp16_dir, exist_ok=True) - armnn_fp32 = os.path.join(output_dir, "model.armnn") - armnn_fp16 = os.path.join(fp16_dir, "model.armnn") - - args = ["./armnnconverter", "-f", "tflite-binary"] - args.append("-i") - args.extend(self.inputs) - args.append("-o") - args.extend(self.outputs) - - fp32_args = args.copy() - fp32_args.extend(["-m", tflite_fp32, "-p", armnn_fp32]) - - print(f"Exporting {self.model_name} ({self.task}) to ARM NN with fp32 precision") - try: - print(subprocess.check_output(fp32_args, stderr=subprocess.STDOUT).decode()) - except subprocess.CalledProcessError as e: - print(e.output.decode()) - try: - rmtree(tflite_model_dir, ignore_errors=True) - finally: - raise e - print(f"Finished exporting {self.model_name} ({self.task}) with fp32 precision") - - fp16_args = args.copy() - fp16_args.extend(["-m", tflite_fp16, "-p", armnn_fp16]) - - print(f"Exporting {self.model_name} ({self.task}) to ARM NN with fp16 precision") - try: - print(subprocess.check_output(fp16_args, stderr=subprocess.STDOUT).decode()) - except subprocess.CalledProcessError as e: - print(e.output.decode()) - try: - rmtree(tflite_model_dir, ignore_errors=True) - finally: - raise e - print(f"Finished exporting {self.model_name} ({self.task}) with fp16 precision") - - return armnn_fp32, armnn_fp16 - - @property - def model_name(self) -> str: - return f"{self.name}__{self.pretrained}" if self.pretrained else self.name - - @property - def repo_name(self) -> str: - return f"immich-app/{self.model_name}" - -class ArcFace(ExportBase): - task = "recognition" - - -class RetinaFace(ExportBase): - task = "detection" - - -class OpenClipVisual(ExportBase): - task = "visual" - - -class OpenClipTextual(ExportBase): - task = "textual" - - -class MClipTextual(ExportBase): - task = "textual" - - -def main() -> None: - if platform.machine() not in ("x86_64", "AMD64"): - raise RuntimeError(f"Can only run on x86_64 / AMD64, not {platform.machine()}") - hf_token = os.environ.get("HF_AUTH_TOKEN") - if hf_token: - login(token=hf_token) - os.environ["LD_LIBRARY_PATH"] = "armnn" - failed: list[Callable[[], ExportBase]] = [ - lambda: OpenClipVisual("ViT-H-14-378-quickgelu", (1, 3, 378, 378), pretrained="dfn5b"), # flatbuffers: cannot grow buffer beyond 2 gigabytes (will probably work with fp16) - lambda: OpenClipVisual("ViT-H-14-quickgelu", (1, 3, 224, 224), pretrained="dfn5b"), # flatbuffers: cannot grow buffer beyond 2 gigabytes (will probably work with fp16) - lambda: OpenClipVisual("ViT-H-14", (1, 3, 224, 224), pretrained="laion2b-s32b-b79k"), - lambda: OpenClipTextual("ViT-H-14", (1, 77), pretrained="laion2b-s32b-b79k"), - lambda: OpenClipVisual("ViT-g-14", (1, 3, 224, 224), pretrained="laion2b-s12b-b42k"), - lambda: OpenClipTextual("ViT-g-14", (1, 77), pretrained="laion2b-s12b-b42k"), - lambda: OpenClipVisual("XLM-Roberta-Large-Vit-B-16Plus", (1, 3, 240, 240)), - lambda: OpenClipVisual("XLM-Roberta-Large-ViT-H-14", (1, 3, 224, 224), pretrained="frozen_laion5b_s13b_b90k"), - lambda: MClipTextual("XLM-Roberta-Large-Vit-L-14", (1, 77)), # Expected normalized_shape to be at least 1-dimensional, i.e., containing at least one element, but got normalized_shape = [] - lambda: MClipTextual("XLM-Roberta-Large-Vit-B-16Plus", (1, 77)), # Expected normalized_shape to be at least 1-dimensional, i.e., containing at least one element, but got normalized_shape = [] - lambda: MClipTextual("LABSE-Vit-L-14", (1, 77)), # Expected normalized_shape to be at least 1-dimensional, i.e., containing at least one element, but got normalized_shape = [] - lambda: OpenClipTextual("XLM-Roberta-Large-ViT-H-14", (1, 77), pretrained="frozen_laion5b_s13b_b90k"), # Expected normalized_shape to be at least 1-dimensional, i.e., containing at least one element, but got normalized_shape = [] - ] - - oom = [ - lambda: OpenClipVisual("nllb-clip-base-siglip", (1, 3, 384, 384), pretrained="v1"), - lambda: OpenClipTextual("nllb-clip-base-siglip", (1, 77), pretrained="v1"), - lambda: OpenClipVisual("nllb-clip-large-siglip", (1, 3, 384, 384), pretrained="v1"), - lambda: OpenClipTextual("nllb-clip-large-siglip", (1, 77), pretrained="v1"), # ERROR (tinynn.converter.base) Unsupported ops: aten::logical_not - # lambda: OpenClipTextual("ViT-H-14-quickgelu", (1, 77), pretrained="dfn5b"), - # lambda: OpenClipTextual("ViT-H-14-378-quickgelu", (1, 77), pretrained="dfn5b"), - # lambda: OpenClipVisual("XLM-Roberta-Large-Vit-L-14", (1, 3, 224, 224)), - ] - - succeeded: list[Callable[[], ExportBase]] = [ - # lambda: OpenClipVisual("ViT-B-32", (1, 3, 224, 224), pretrained="laion2b_e16"), - # lambda: OpenClipTextual("ViT-B-32", (1, 77), pretrained="laion2b_e16"), - # lambda: OpenClipVisual("ViT-B-32", (1, 3, 224, 224), pretrained="laion400m_e31"), - # lambda: OpenClipTextual("ViT-B-32", (1, 77), pretrained="laion400m_e31"), - # lambda: OpenClipVisual("ViT-B-32", (1, 3, 224, 224), pretrained="laion400m_e32"), - # lambda: OpenClipTextual("ViT-B-32", (1, 77), pretrained="laion400m_e32"), - # lambda: OpenClipVisual("ViT-B-32", (1, 3, 224, 224), pretrained="laion2b-s34b-b79k"), - # lambda: OpenClipTextual("ViT-B-32", (1, 77), pretrained="laion2b-s34b-b79k"), - # lambda: OpenClipVisual("ViT-B-16", (1, 3, 224, 224), pretrained="laion400m_e31"), - # lambda: OpenClipTextual("ViT-B-16", (1, 77), pretrained="laion400m_e31"), - # lambda: OpenClipVisual("ViT-B-16", (1, 3, 224, 224), pretrained="laion400m_e32"), - # lambda: OpenClipTextual("ViT-B-16", (1, 77), pretrained="laion400m_e32"), - # lambda: OpenClipVisual("ViT-B-16-plus-240", (1, 3, 240, 240), pretrained="laion400m_e31"), - # lambda: OpenClipTextual("ViT-B-16-plus-240", (1, 77), pretrained="laion400m_e31"), - # lambda: OpenClipVisual("ViT-B-32", (1, 3, 224, 224), pretrained="openai"), - # lambda: OpenClipTextual("ViT-B-32", (1, 77), pretrained="openai"), - # lambda: OpenClipVisual("ViT-B-16", (1, 3, 224, 224), pretrained="openai"), - # lambda: OpenClipTextual("ViT-B-16", (1, 77), pretrained="openai"), - # lambda: OpenClipVisual("RN50", (1, 3, 224, 224), pretrained="openai"), - # lambda: OpenClipTextual("RN50", (1, 77), pretrained="openai"), - # lambda: OpenClipVisual("RN50", (1, 3, 224, 224), pretrained="yfcc15m"), - # lambda: OpenClipTextual("RN50", (1, 77), pretrained="yfcc15m"), - # lambda: OpenClipVisual("RN50", (1, 3, 224, 224), pretrained="cc12m"), - # lambda: OpenClipTextual("RN50", (1, 77), pretrained="cc12m"), - # lambda: OpenClipVisual("XLM-Roberta-Large-Vit-B-32", (1, 3, 224, 224)), - # lambda: OpenClipVisual("ViT-L-14", (1, 3, 224, 224), pretrained="openai"), - # lambda: OpenClipTextual("ViT-L-14", (1, 77), pretrained="openai"), - lambda: OpenClipVisual("ViT-L-14", (1, 3, 224, 224), pretrained="laion400m_e31"), - lambda: OpenClipTextual("ViT-L-14", (1, 77), pretrained="laion400m_e31"), - lambda: OpenClipVisual("ViT-L-14", (1, 3, 224, 224), pretrained="laion400m_e32"), - lambda: OpenClipTextual("ViT-L-14", (1, 77), pretrained="laion400m_e32"), - lambda: OpenClipVisual("ViT-L-14", (1, 3, 224, 224), pretrained="laion2b-s32b-b82k"), - lambda: OpenClipTextual("ViT-L-14", (1, 77), pretrained="laion2b-s32b-b82k"), - # lambda: OpenClipVisual("ViT-L-14-336", (1, 3, 336, 336), pretrained="openai"), - # lambda: OpenClipTextual("ViT-L-14-336", (1, 77), pretrained="openai"), - # lambda: ArcFace("buffalo_s", (1, 3, 112, 112), optimization_level=3), - # lambda: RetinaFace("buffalo_s", (1, 3, 640, 640), optimization_level=3), - # lambda: ArcFace("buffalo_m", (1, 3, 112, 112), optimization_level=3), - # lambda: RetinaFace("buffalo_m", (1, 3, 640, 640), optimization_level=3), - # lambda: ArcFace("buffalo_l", (1, 3, 112, 112), optimization_level=3), - # lambda: RetinaFace("buffalo_l", (1, 3, 640, 640), optimization_level=3), - # lambda: ArcFace("antelopev2", (1, 3, 112, 112), optimization_level=3), - # lambda: RetinaFace("antelopev2", (1, 3, 640, 640), optimization_level=3), - ] - - models: list[Callable[[], ExportBase]] = [*failed, *succeeded] - for _model in succeeded: - model = _model() - try: - model_dir = os.path.join("output", model.model_name) - output_dir = os.path.join(model_dir, model.task) - armnn_fp32, armnn_fp16 = model.to_armnn(output_dir) - relative_fp32 = os.path.relpath(armnn_fp32, start=model_dir) - relative_fp16 = os.path.relpath(armnn_fp16, start=model_dir) - if hf_token and os.path.isfile(armnn_fp32): - print(f"Uploading {model.model_name} ({model.task}) ARM NN model with fp32 precision") - upload_file(path_or_fileobj=armnn_fp32, path_in_repo=relative_fp32, repo_id=model.repo_name) - print(f"Finished uploading {model.model_name} ({model.task}) ARM NN model with fp32 precision") - if hf_token and os.path.isfile(armnn_fp16): - print(f"Uploading {model.model_name} ({model.task}) ARM NN model with fp16 precision") - upload_file(path_or_fileobj=armnn_fp16, path_in_repo=relative_fp16, repo_id=model.repo_name) - print(f"Finished uploading {model.model_name} ({model.task}) ARM NN model with fp16 precision") - except Exception as exc: - print(f"Failed to export {model.model_name} ({model.task}): {exc}") - raise exc - - -if __name__ == "__main__": - main() diff --git a/machine-learning/export/ann/ann.cpp b/machine-learning/export/ann/scripts/ann.cpp similarity index 100% rename from machine-learning/export/ann/ann.cpp rename to machine-learning/export/ann/scripts/ann.cpp diff --git a/machine-learning/export/ann/build-converter.sh b/machine-learning/export/ann/scripts/build-converter.sh similarity index 100% rename from machine-learning/export/ann/build-converter.sh rename to machine-learning/export/ann/scripts/build-converter.sh diff --git a/machine-learning/export/ann/build.sh b/machine-learning/export/ann/scripts/build.sh similarity index 100% rename from machine-learning/export/ann/build.sh rename to machine-learning/export/ann/scripts/build.sh diff --git a/machine-learning/export/ann/download-armnn.sh b/machine-learning/export/ann/scripts/download-armnn.sh similarity index 100% rename from machine-learning/export/ann/download-armnn.sh rename to machine-learning/export/ann/scripts/download-armnn.sh