summaryrefslogtreecommitdiff
path: root/misc/py-sagemaker-serve
diff options
context:
space:
mode:
Diffstat (limited to 'misc/py-sagemaker-serve')
-rw-r--r--misc/py-sagemaker-serve/Makefile29
-rw-r--r--misc/py-sagemaker-serve/distinfo3
-rw-r--r--misc/py-sagemaker-serve/files/patch-pyproject.toml44
-rw-r--r--misc/py-sagemaker-serve/pkg-descr11
4 files changed, 87 insertions, 0 deletions
diff --git a/misc/py-sagemaker-serve/Makefile b/misc/py-sagemaker-serve/Makefile
new file mode 100644
index 000000000000..cf7cb024e6ee
--- /dev/null
+++ b/misc/py-sagemaker-serve/Makefile
@@ -0,0 +1,29 @@
+PORTNAME= sagemaker-serve
+DISTVERSION= 1.0
+CATEGORIES= misc python # machine-learning
+MASTER_SITES= PYPI
+PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX}
+DISTNAME= ${PORTNAME:S/-/_/}-${PORTVERSION}
+
+MAINTAINER= yuri@FreeBSD.org
+COMMENT= SageMaker: Library for training & deploying models on Amazon SageMaker
+WWW= https://sagemaker.readthedocs.io/en/stable/ \
+ https://github.com/aws/sagemaker-python-sdk
+
+LICENSE= APACHE20
+
+BUILD_DEPENDS= ${PY_SETUPTOOLS} \
+ ${PYTHON_PKGNAMEPREFIX}wheel>0:devel/py-wheel@${PY_FLAVOR}
+RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}boto3>=1.35.75<2.0:www/py-boto3@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}botocore>=1.35.75<2.0:devel/py-botocore@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}psutil>=0:sysutils/py-psutil@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}sagemaker-core>=2.0.0:misc/py-sagemaker-core@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}sagemaker-train>=0.1.0:misc/py-sagemaker-train@${PY_FLAVOR} \
+ ${PYTHON_PKGNAMEPREFIX}tqdm>=0:misc/py-tqdm@${PY_FLAVOR}
+
+USES= python
+USE_PYTHON= pep517 autoplist
+
+NO_ARCH= yes
+
+.include <bsd.port.mk>
diff --git a/misc/py-sagemaker-serve/distinfo b/misc/py-sagemaker-serve/distinfo
new file mode 100644
index 000000000000..5af71bb8afb4
--- /dev/null
+++ b/misc/py-sagemaker-serve/distinfo
@@ -0,0 +1,3 @@
+TIMESTAMP = 1764171850
+SHA256 (sagemaker_serve-1.0.tar.gz) = f5aeaf376e2cb41d476c5b1a9c06c347868acb0c5e5ffa67c2da373ac54f97d4
+SIZE (sagemaker_serve-1.0.tar.gz) = 148682
diff --git a/misc/py-sagemaker-serve/files/patch-pyproject.toml b/misc/py-sagemaker-serve/files/patch-pyproject.toml
new file mode 100644
index 000000000000..9ee202738eec
--- /dev/null
+++ b/misc/py-sagemaker-serve/files/patch-pyproject.toml
@@ -0,0 +1,44 @@
+Removed dependencies:
+- deepdiff: declared but never imported or used in the codebase
+- mlflow: optional, only used conditionally when sagemaker_mlflow is installed
+- sagemaker_schema_inference_artifacts: optional, used in try/except fallback
+- pytest: test-only dependency, not needed at runtime
+- tqdm, psutil: handled via RUN_DEPENDS in Makefile
+- tritonclient[http]: optional, used in try/except for Triton validation
+- onnx, onnxruntime, torch: not available on FreeBSD; torch has a module-level
+ import in app.py but that module is only used for specific serving scenarios
+
+--- pyproject.toml.orig 2025-11-20 20:42:14 UTC
++++ pyproject.toml
+@@ -1,5 +1,5 @@
+ [build-system]
+-requires = ["setuptools>=64", "wheel"]
++requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+@@ -23,16 +23,6 @@ dependencies = [
+ "sagemaker-train>=0.1.0",
+ "boto3>=1.35.75,<2.0",
+ "botocore>=1.35.75,<2.0",
+- "deepdiff",
+- "mlflow",
+- "sagemaker_schema_inference_artifacts",
+- "pytest",
+- "tqdm",
+- "psutil",
+- "tritonclient[http]",
+- "onnx",
+- "onnxruntime",
+- "torch>=2.0.0"
+ ]
+
+ [project.optional-dependencies]
+@@ -49,7 +39,6 @@ dev = [
+ ]
+
+ [tool.setuptools]
+-package-dir = {"" = "src"}
+ include-package-data = true
+
+ [tool.setuptools.packages.find]
diff --git a/misc/py-sagemaker-serve/pkg-descr b/misc/py-sagemaker-serve/pkg-descr
new file mode 100644
index 000000000000..16dad05472d1
--- /dev/null
+++ b/misc/py-sagemaker-serve/pkg-descr
@@ -0,0 +1,11 @@
+sagemaker-train is a part of the SageMaker Python SDK.
+
+SageMaker Python SDK is an open source library for training and deploying
+machine learning models on Amazon SageMaker.
+
+With the SDK, you can train and deploy models using popular deep learning
+frameworks Apache MXNet and TensorFlow. You can also train and deploy
+models with Amazon algorithms, which are scalable implementations of core
+machine learning algorithms that are optimized for SageMaker and GPU training.
+If you have your own algorithms built into SageMaker compatible Docker
+containers, you can train and host models using these as well.