Skip to content

Commit 908cbda

Browse files
authored
Merge pull request #1321 from guardrails-ai/upgrade-transformers
More Version Updates
2 parents 724c7f0 + 636dd5c commit 908cbda

File tree

7 files changed

+97
-66
lines changed

7 files changed

+97
-66
lines changed

poetry.lock

Lines changed: 47 additions & 42 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ dependencies = [
2020
"rich>=13.6.0,<15.0.0",
2121
"pydantic>=2.0.0, <3.0",
2222
"typer>=0.9.0,<0.16",
23+
"click<8.2.0", # TODO: Upgrade typer range and test cli before allowing >=8.2.0
2324
"tenacity>=8.1.0,<10.0.0",
2425
"rstr>=3.2.2,<4.0.0",
2526
"typing-extensions>=4.8.0,<5.0.0",
@@ -96,8 +97,7 @@ docs-build = [
9697
"pydoc-markdown==4.8.2"
9798
]
9899
huggingface = [
99-
# Blocked from upgrading further by jsonformer
100-
"transformers>=4.38.0,<4.49.0",
100+
"transformers>=4.38.0,<5.0.0",
101101
"torch>=2.1.1,<3.0.0",
102102
"guardrails-jsonformer>=0.13.1,<1.0.0"
103103
]
@@ -206,4 +206,5 @@ dependencies = true # to load [tool.poetry.dependencies]
206206
[tool.liccheck.authorized_packages]
207207
aiocache = "0.12.3" # BSD 3-Clause
208208
aiohappyeyeballs = "2.4.3" # Python Software Foundation
209-
guardrails-api = "*" # Modified ELSSTIC
209+
guardrails-api = "*" # Modified ELSSTIC
210+
pondpond = "1.4.1" # Apache 2.0

server_ci/.dockerignore

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
.coverage
2+
.coverage.MacBook-Pro.local.75130.XUoRtLxx
3+
.dockerignore
4+
.docusaurus
5+
.git
6+
.github
7+
.gitignore
8+
.idea
9+
.pre-commit-config.yaml
10+
.pytest_cache
11+
.python-version
12+
.ruff_cache
13+
.venv
14+
.vscode
15+
build
16+
codecov.yml
17+
CONTRIBUTING.md
18+
docs
19+
docs-build
20+
docs-graveyard
21+
DOCS.md
22+
docusaurus
23+
htmlcov
24+
make.bat
25+
mlartifacts
26+
mlruns
27+
node_modules
28+
package-lock.json
29+
package.json
30+
tests

server_ci/Dockerfile.fastapi

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
FROM python:3.12-slim
1+
# New LiteLLM version has a dependency on madoka which requires g++ to build the wheel
2+
FROM python:3.12
23

34
ARG GUARDRAILS_TOKEN
45
ARG GUARDRAILS_TEMPLATE="guard-template.json"
@@ -19,9 +20,6 @@ RUN apt-get update && \
1920
apt-get clean && \
2021
rm -rf /var/lib/apt/lists/*
2122

22-
RUN pipx install poetry
23-
24-
# Ensure poetry is available in the PATH
2523
ENV PATH="/root/.local/bin:$PATH"
2624

2725
# Copy the entrypoint script
@@ -30,13 +28,9 @@ COPY ../ /app/guardrails
3028

3129
# Install guardrails, the guardrails API, and gunicorn
3230
# openai optional. only used for integration testing
33-
RUN pip install "uvicorn[standard]" "guardrails-api>=0.1.0a1" "guardrails-api-client>=0.4.0a2" --no-cache-dir
34-
35-
WORKDIR /app/guardrails
36-
37-
RUN poetry install
31+
RUN pip install "uvicorn[standard]" --no-cache-dir
3832

39-
RUN pip install ./
33+
RUN pip install "/app/guardrails[api]"
4034

4135
RUN guardrails configure --enable-metrics --enable-remote-inferencing --token $GUARDRAILS_TOKEN
4236

server_ci/Dockerfile.flask

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
FROM python:3.12-slim
1+
# New LiteLLM version has a dependency on madoka which requires g++ to build the wheel
2+
FROM python:3.12
23

34
ARG GUARDRAILS_TOKEN
45
ARG GUARDRAILS_TEMPLATE="guard-template.json"
@@ -19,17 +20,15 @@ RUN apt-get update && \
1920
apt-get clean && \
2021
rm -rf /var/lib/apt/lists/*
2122

22-
RUN pipx install poetry
23-
24-
# Ensure poetry is available in the PATH
2523
ENV PATH="/root/.local/bin:$PATH"
2624

2725
# Copy the entrypoint script
2826
COPY /server_ci/flask-entry.sh /app/flask-entry.sh
2927

3028
# Install guardrails, the guardrails API, and gunicorn
3129
# openai optional. only used for integration testing
32-
RUN pip install "gunicorn[gthread]>=22.0.0,<23" "guardrails-api>=0.0.5,<0.1.0" "guardrails-ai==0.5.15" "guardrails-api-client>=0.3.0,<0.4.0"
30+
# Lock click version to what's compatible with typer==0.12.5
31+
RUN pip install "click==8.1.8" "gunicorn[gthread]>=22.0.0,<23" "guardrails-ai[api]==0.5.15"
3332

3433
RUN guardrails configure --enable-metrics --enable-remote-inferencing --token $GUARDRAILS_TOKEN
3534

tests/integration_tests/test_formatters.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,12 @@ class Foo(BaseModel):
5858
assert isinstance(validated_output["bez"][0], str)
5959

6060

61-
@pytest.mark.skip(reason="Random model infinitely recurses on complex struct. Use GPT2")
61+
@if_transformers_installed
6262
def test_hugging_face_pipeline_complex_schema():
6363
# NOTE: This is the real GPT-2 model.
6464
from transformers import pipeline
6565

66-
model = pipeline("text-generation", "gpt2")
66+
model = pipeline("text-generation", "distilgpt2")
6767

6868
class MultiNum(BaseModel):
6969
whole: int
@@ -73,10 +73,12 @@ class Tricky(BaseModel):
7373
foo: MultiNum
7474

7575
g = Guard.for_pydantic(Tricky, output_formatter="jsonformer")
76-
response = g(model, prompt="Sample:")
76+
response = g(model, messages=[{"content": "Sample:", "role": "user"}])
7777
out = response.validated_output
7878
assert isinstance(out, dict)
7979
assert "foo" in out
8080
assert isinstance(out["foo"], dict)
81-
assert isinstance(out["foo"]["whole"], int | float)
81+
assert isinstance(out["foo"]["whole"], int) or isinstance(
82+
out["foo"]["whole"], float
83+
)
8284
assert isinstance(out["foo"]["frac"], float)

tests/unit_tests/test_llm_providers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -360,11 +360,11 @@ def mock_os_environ_get(key, *args):
360360
reason="transformers is not installed",
361361
)
362362
def test_get_llm_ask_hugging_face_model(mocker):
363-
from transformers import PreTrainedModel
363+
from transformers import PreTrainedModel, GenerationMixin
364364

365365
from guardrails.llm_providers import HuggingFaceModelCallable
366366

367-
class MockModel(PreTrainedModel):
367+
class MockModel(PreTrainedModel, GenerationMixin):
368368
_modules: Any
369369

370370
def __init__(self, *args, **kwargs):

0 commit comments

Comments
 (0)