[project] name = "sec-cybert-train" version = "0.1.0" description = "SEC-cyBERT training pipeline: DAPT, TAPT, fine-tuning, and evaluation" readme = "README.md" requires-python = ">=3.13,<3.14" dependencies = [ "torch>=2.11,<2.12", "torchao>=0.17,<0.18", "transformers>=5,<6", "datasets>=4,<5", "accelerate>=1,<2", "pyyaml>=6,<7", "flash-attn==2.6.3+cu130torch2.11", "unsloth==2026.3.11", "coral-pytorch>=1.4.0", "scikit-learn>=1.8.0", "krippendorff>=0.8.2", "matplotlib>=3.10.8", "seaborn>=0.13.2", "onnx>=1.21.0", "onnxruntime-gpu>=1.24.4", "onnxruntime>=1.24.4", "onnxscript>=0.6.2", "onnxconverter-common>=1.16.0", ] [project.scripts] sec-cybert = "main:main" [[tool.uv.index]] name = "pytorch-cu130" url = "https://download.pytorch.org/whl/cu130" explicit = true [[tool.uv.index]] url = "https://pypi.org/simple/" default = true [tool.uv.sources] torch = [ { index = "pytorch-cu130" } ] flash-attn = { url = "https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.9.4/flash_attn-2.6.3%2Bcu130torch2.11-cp313-cp313-linux_x86_64.whl" }