34 lines
872 B
TOML
34 lines
872 B
TOML
[project]
|
|
name = "sec-cybert-train"
|
|
version = "0.1.0"
|
|
description = "SEC-cyBERT training pipeline: DAPT, TAPT, fine-tuning, and evaluation"
|
|
readme = "README.md"
|
|
requires-python = ">=3.13,<3.14"
|
|
dependencies = [
|
|
"torch>=2.11,<2.12",
|
|
"torchao>=0.17,<0.18",
|
|
"transformers>=5,<6",
|
|
"datasets>=4,<5",
|
|
"accelerate>=1,<2",
|
|
"pyyaml>=6,<7",
|
|
"flash-attn==2.6.3+cu130torch2.11",
|
|
"unsloth==2026.3.11",
|
|
]
|
|
|
|
[project.scripts]
|
|
sec-cybert = "main:main"
|
|
|
|
[[tool.uv.index]]
|
|
name = "pytorch-cu130"
|
|
url = "https://download.pytorch.org/whl/cu130"
|
|
explicit = true
|
|
|
|
[[tool.uv.index]]
|
|
url = "https://pypi.org/simple/"
|
|
default = true
|
|
|
|
[tool.uv.sources]
|
|
torch = [ { index = "pytorch-cu130" } ]
|
|
flash-attn = { url = "https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.9.4/flash_attn-2.6.3%2Bcu130torch2.11-cp313-cp313-linux_x86_64.whl" }
|
|
|