Skip to content
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 19 additions & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ classifiers = [
]
requires-python = ">=3.8,<3.11"
dependencies = [
"transformers==4.55.0",
"huggingface-hub==0.34.0",
"hf_transfer==0.1.9",
"peft==0.13.2",
Expand All @@ -35,21 +34,33 @@ dependencies = [
"onnxscript==0.2.5",
"pillow===10.4.0",
"sympy",
"tensorboard",
"fire",
"py7zr",
"torchmetrics==1.7.0",
"torch==2.7.0; platform_machine=='aarch64'",
# Specifying torch cpu package URL per python version, update the list once pytorch releases whl for python>3.11
"torch@https://download.pytorch.org/whl/cpu/torch-2.4.1%2Bcpu-cp38-cp38-linux_x86_64.whl ; python_version=='3.8' and platform_machine=='x86_64'",
"torch@https://download.pytorch.org/whl/cpu/torch-2.7.0%2Bcpu-cp39-cp39-manylinux_2_28_x86_64.whl ; python_version=='3.9' and platform_machine=='x86_64'",
"torch@https://download.pytorch.org/whl/cpu/torch-2.7.0%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl ; python_version=='3.10' and platform_machine=='x86_64'",
]

[project.optional-dependencies]
test = ["pytest","pytest-mock"]
docs = ["Sphinx==7.1.2","sphinx-rtd-theme==2.0.0","myst-parser==3.0.1","sphinx-multiversion"]
quality = ["black", "ruff", "hf_doc_builder@git+https://github.com/huggingface/doc-builder.git"]
infer = [
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will be default; we just need to over right when we do pip install -e .[ft]

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No, having torch under dependencies section and different torch version under "ft" optional dependencies -> creates conflicts. Hence, proposing to move different torch versions under different sections. Same will be true for transformers. FT will soon have its own transformer version (either open-source or based on whl file which contains qaic backend changes).

"transformers==4.55.0",
"torch==2.7.0; platform_machine=='aarch64'",
# Specifying torch cpu package URL per python version, update the list once pytorch releases whl for python>3.11
"torch@https://download.pytorch.org/whl/cpu/torch-2.4.1%2Bcpu-cp38-cp38-linux_x86_64.whl ; python_version=='3.8' and platform_machine=='x86_64'",
"torch@https://download.pytorch.org/whl/cpu/torch-2.7.0%2Bcpu-cp39-cp39-manylinux_2_28_x86_64.whl ; python_version=='3.9' and platform_machine=='x86_64'",
"torch@https://download.pytorch.org/whl/cpu/torch-2.7.0%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl ; python_version=='3.10' and platform_machine=='x86_64'",
]
ft = [
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

accelerate package?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

whl file of that would come from Eager team itself just like torch_qaic package.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That would require explicit installation again by the user right. If it can be included here it would be good. When we do pip install .[ft] all the dependencies required for FT are installed in one go.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, plan is to include files here and make it install from /opt/ location.

"accelerate @ file:///opt/qti-aic/integrations/accelerate/py310/accelerate-1.10.1-py3-none-any.whl ; python_version=='3.10' and platform_machine=='x86_64'",
"accelerate @ file:///opt/qti-aic/integrations/accelerate/py311/accelerate-1.10.1-py3-none-any.whl ; python_version=='3.11' and platform_machine=='x86_64'",
"tensorboard ; python_version>='3.10' and python_version<'3.12' and platform_machine=='x86_64'",
"transformers==4.55.0 ; python_version>='3.10' and python_version<'3.12' and platform_machine=='x86_64'",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Dependencies already has the same version of transformers at line number 22 of this file. Why is this needed here?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please check the python version comment for this line as well.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Eager support python 3.10 and 3.12.
Explicitly adding transformers here to simplify future modifications to the FT’s transformer.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Transformers and torch should be handled uniformly. While both of them are added in the FT section, for inference one of them is kept in dependencies and other in infer section.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Updated in latest.

"torch@https://download.pytorch.org/whl/cpu/torch-2.9.0%2Bcpu-cp310-cp310-manylinux_2_28_x86_64.whl ; python_version=='3.10' and platform_machine=='x86_64'",
"torch@https://download.pytorch.org/whl/cpu/torch-2.9.0%2Bcpu-cp311-cp311-manylinux_2_28_x86_64.whl ; python_version=='3.11' and platform_machine=='x86_64'",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are we supporting python 3.11? At line number 20, "requires-python = ">=3.8,<3.11" is mentioned. It will not let QEff install for python 3.11.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, that is global level python. It will soon start to support python=3.12 as well. In that case this line still holds true. No harm in adding this here.

"torchmetrics==1.7.0 ; python_version>='3.10' and python_version<'3.12' and platform_machine=='x86_64'",
"torch_qaic @ file:///opt/qti-aic/integrations/torch_qaic/py310/torch_qaic-0.1.0-cp310-cp310-linux_x86_64.whl ; python_version=='3.10' and platform_machine=='x86_64'",
"torch_qaic @ file:///opt/qti-aic/integrations/torch_qaic/py311/torch_qaic-0.1.0-cp310-cp310-linux_x86_64.whl ; python_version=='3.11' and platform_machine=='x86_64'",
]

[build-system]
requires = ["setuptools>=62.0.0"]
Expand Down
Loading