1
0

Exclude transformers 5.4.0 due to flash_attn KeyError

This commit is contained in:
2026-04-03 13:32:11 +02:00
parent 4fe7762262
commit e767aca68c
2 changed files with 5 additions and 5 deletions

View File

@@ -9,7 +9,7 @@ dependencies = [
"python-dotenv",
"elevenlabs",
"torch==2.5.1",
"transformers",
"transformers!=5.4.0",
"diffusers",
"accelerate",
"safetensors",