diff --git a/poetry.lock b/poetry.lock index a857a9711..fe9e88ce9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3698,7 +3698,7 @@ url = "runtimes/catboost" [[package]] name = "mlserver-huggingface" -version = "2.0.14" +version = "2.0.16" description = "HuggingFace runtime for MLServer" optional = false python-versions = ">=3.9,<3.12" @@ -3711,7 +3711,7 @@ bitsandbytes = "^0.42.0" mlserver = "*" optimum = {version = ">=1.4,<2.0", extras = ["onnxruntime"]} pillow = "*" -pydantic = "2.7.1" +pydantic = "^2.7.1" sentence-transformers = "2.5.1" tensorflow = "*" timm = "^1.0.13" @@ -8614,4 +8614,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "d03b8d726210caa2afdd46f7341a81d2b494f4cf59af47f58c7796a9aed36ed4" +content-hash = "34460288962c20fc619916f7d25a5309c53957f619a72f4a00c0cc37028fba85" diff --git a/pyproject.toml b/pyproject.toml index 52bf67b34..01dc738f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,8 +62,8 @@ gevent = "*" aiofiles = "*" orjson = "*" uvloop = {version = "*", markers = "sys_platform != 'win32' and (sys_platform != 'cygwin' and platform_python_implementation != 'PyPy')"} -pydantic = "2.7.1" -pydantic-settings = "2.2.1" +pydantic = "^2.7.1" +pydantic-settings = "^2.2.1" python-multipart = "*" h11 = "0.16.0" diff --git a/runtimes/huggingface/mlserver_huggingface/version.py b/runtimes/huggingface/mlserver_huggingface/version.py index 2ae493360..9aa3f9036 100644 --- a/runtimes/huggingface/mlserver_huggingface/version.py +++ b/runtimes/huggingface/mlserver_huggingface/version.py @@ -1 +1 @@ -__version__ = "2.0.14" +__version__ = "2.1.0" diff --git a/runtimes/huggingface/poetry.lock b/runtimes/huggingface/poetry.lock index 5261e9c29..78d635e4f 100644 --- a/runtimes/huggingface/poetry.lock +++ b/runtimes/huggingface/poetry.lock @@ -1301,13 +1301,13 @@ protobuf = ["grpcio-tools (>=1.64.1)"] [[package]] name = "h11" -version = "0.14.0" +version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] @@ -1714,6 +1714,7 @@ fastapi = ">=0.88.0,!=0.89.0,<=0.110.0" gevent = "*" geventhttpclient = "*" grpcio = "*" +h11 = "0.16.0" importlib-resources = ">=5.12,<7.0" numpy = "*" opentelemetry-exporter-otlp-proto-grpc = "^1.22.0" @@ -4737,4 +4738,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "1fcd8127de0384b9c6be2767206e40e88737cb8b80a92153a33918c9479643dc" +content-hash = "095e18b9d6074e2edbbcc75974358f60479d91b962be1a97f49aa2acce950475" diff --git a/runtimes/huggingface/pyproject.toml b/runtimes/huggingface/pyproject.toml index d9db173af..5e9c81dbb 100644 --- a/runtimes/huggingface/pyproject.toml +++ b/runtimes/huggingface/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "mlserver-huggingface" -version = "2.0.15" +version = "2.1.0" description = "HuggingFace runtime for MLServer" authors = ["Seldon Technologies Ltd. "] license = "Apache-2.0" @@ -13,7 +13,7 @@ mlserver = "*" tensorflow = "*" pillow = "*" optimum = {extras = ["onnxruntime"], version = ">=1.4,<2.0"} -pydantic = "2.7.1" +pydantic = "^2.7.1" accelerate = "^0.27.2" bitsandbytes = "^0.42.0" sentence-transformers = "2.5.1"