36 lines
908 B
TOML
36 lines
908 B
TOML
[project]
|
|
name = "sec-cybert-train"
|
|
version = "0.1.0"
|
|
description = "SEC-cyBERT training pipeline: DAPT, TAPT, fine-tuning, and evaluation"
|
|
readme = "README.md"
|
|
requires-python = ">=3.13"
|
|
dependencies = [
|
|
"torch>=2.11",
|
|
"transformers",
|
|
"datasets",
|
|
"accelerate",
|
|
"pyyaml",
|
|
"nvidia-nvshmem-cu13>=3.4.5",
|
|
"nvidia-cuda-cccl>=13.2.27",
|
|
"flash-attn",
|
|
"unsloth",
|
|
]
|
|
|
|
[project.scripts]
|
|
sec-cybert = "main:main"
|
|
|
|
[[tool.uv.index]]
|
|
name = "pytorch-cu130"
|
|
url = "https://download.pytorch.org/whl/cu130"
|
|
explicit = true
|
|
|
|
[[tool.uv.index]]
|
|
url = "https://pypi.org/simple/"
|
|
default = true
|
|
|
|
[tool.uv.sources]
|
|
torch = [
|
|
{ index = "pytorch-cu130", marker = "sys_platform == 'linux' or sys_platform == 'win32'" },
|
|
]
|
|
flash-attn = { url = "https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.9.4/flash_attn-2.6.3%2Bcu130torch2.11-cp313-cp313-linux_x86_64.whl" }
|