diff options
author | Yuri Victorovich <yuri@FreeBSD.org> | 2025-08-19 08:44:51 -0700 |
---|---|---|
committer | Yuri Victorovich <yuri@FreeBSD.org> | 2025-08-19 08:45:31 -0700 |
commit | ab960d61e3e214f5a892307745286b8ad812ace0 (patch) | |
tree | dd7479f11696311683bc6d77003de6e9bb107299 | |
parent | misc/py-torchvision: update 0.22.0 → 0.23.0 (diff) |
misc/py-transformers: New port: State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow
-rw-r--r-- | misc/Makefile | 1 | ||||
-rw-r--r-- | misc/py-transformers/Makefile | 40 | ||||
-rw-r--r-- | misc/py-transformers/distinfo | 3 | ||||
-rw-r--r-- | misc/py-transformers/pkg-descr | 10 |
4 files changed, 54 insertions, 0 deletions
diff --git a/misc/Makefile b/misc/Makefile index 4d3466e4b671..653832b25188 100644 --- a/misc/Makefile +++ b/misc/Makefile @@ -548,6 +548,7 @@ SUBDIR += py-torchmetrics SUBDIR += py-torchvision SUBDIR += py-tqdm + SUBDIR += py-transformers SUBDIR += py-tvm SUBDIR += py-uhi SUBDIR += py-vaderSentiment diff --git a/misc/py-transformers/Makefile b/misc/py-transformers/Makefile new file mode 100644 index 000000000000..b84baaf3a945 --- /dev/null +++ b/misc/py-transformers/Makefile @@ -0,0 +1,40 @@ +PORTNAME= transformers +DISTVERSION= 4.55.2 +CATEGORIES= misc python # machine-learning +MASTER_SITES= PYPI +PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} + +MAINTAINER= yuri@FreeBSD.org +COMMENT= State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow +WWW= https://huggingface.co/transformers + +LICENSE= APACHE20 +LICENSE_FILE= ${WRKSRC}/LICENSE + +BUILD_DEPENDS= ${PY_SETUPTOOLS} \ + ${PYTHON_PKGNAMEPREFIX}wheel>0:devel/py-wheel@${PY_FLAVOR} +RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}filelock>=0:sysutils/py-filelock@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}huggingface-hub>=0.26.0:misc/py-huggingface-hub@${PY_FLAVOR} \ + ${PYNUMPY} \ + ${PYTHON_PKGNAMEPREFIX}packaging>=20.0:devel/py-packaging@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}pyyaml>=5.1:devel/py-pyyaml@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}regex>=2019.12.17:textproc/py-regex@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}requests>0:www/py-requests@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}safetensors>=0.4.3:misc/py-safetensors@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}tokenizers>=0.21:textproc/py-tokenizers@${PY_FLAVOR} \ + ${PYTHON_PKGNAMEPREFIX}tqdm>=4.27:misc/py-tqdm@${PY_FLAVOR} +RUN_DEPENDS+= ${PYTHON_PKGNAMEPREFIX}pytorch>=2.1:misc/py-pytorch@${PY_FLAVOR} + #${PYTHON_PKGNAMEPREFIX}torchaudio>0:audio/py-torchaudio@${PY_FLAVOR} \ + #${PYTHON_PKGNAMEPREFIX}torchvision>0:misc/py-torchvision@${PY_FLAVOR} + # the last two dependencies have some issues with shared libs +TEST_DEPENDS= ${PYTHON_PKGNAMEPREFIX}parameterized>0:devel/py-parameterized@${PY_FLAVOR} + +USES= python +USE_PYTHON= pep517 autoplist pytest + +TEST_ENV= ${MAKE_ENV} PYTHONPATH=${STAGEDIR}${PYTHONPREFIX_SITELIBDIR} +TEST_WRKSRC= ${WRKSRC}/tests + +NO_ARCH= yes + +.include <bsd.port.mk> diff --git a/misc/py-transformers/distinfo b/misc/py-transformers/distinfo new file mode 100644 index 000000000000..09c6a3081067 --- /dev/null +++ b/misc/py-transformers/distinfo @@ -0,0 +1,3 @@ +TIMESTAMP = 1755533766 +SHA256 (transformers-4.55.2.tar.gz) = a45ec60c03474fd67adbce5c434685051b7608b3f4f167c25aa6aeb1cad16d4f +SIZE (transformers-4.55.2.tar.gz) = 9571466 diff --git a/misc/py-transformers/pkg-descr b/misc/py-transformers/pkg-descr new file mode 100644 index 000000000000..df86f100b696 --- /dev/null +++ b/misc/py-transformers/pkg-descr @@ -0,0 +1,10 @@ +Transformers acts as the model-definition framework for state-of-the-art machine +learning models in text, computer vision, audio, video, and multimodal model, +for both inference and training. + +It centralizes the model definition so that this definition is agreed upon +across the ecosystem. transformers is the pivot across frameworks: if a model +definition is supported, it will be compatible with the majority of training +frameworks (Axolotl, Unsloth, DeepSpeed, FSDP, PyTorch-Lightning, ...), +inference engines (vLLM, SGLang, TGI, ...), and adjacent modeling libraries +(llama.cpp, mlx, ...) which leverage the model definition from transformers. |