rahul7star commited on
Commit
9b7d954
·
verified ·
1 Parent(s): c9dbb34

Update app_exp.py

Browse files
Files changed (1) hide show
  1. app_exp.py +20 -0
app_exp.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import gradio as gr
2
  import torch
3
  import os
@@ -19,6 +20,25 @@ subprocess.run(["pip3", "install", "-U", "cache-dit"], check=True)
19
 
20
  import cache_dit
21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  # ============================================================
23
  # 1️⃣ Repository & Weights
24
  # ============================================================
 
1
+ import spaces
2
  import gradio as gr
3
  import torch
4
  import os
 
20
 
21
  import cache_dit
22
 
23
+ enable_fa3 = False # default if FA3 cannot be loaded
24
+
25
+ try:
26
+ print("Installing FlashAttention 3...")
27
+ flash_attention_wheel = hf_hub_download(
28
+ repo_id="rahul7star/flash-attn-3",
29
+ repo_type="model",
30
+ filename="128/flash_attn_3-3.0.0b1-cp39-abi3-linux_x86_64.whl",
31
+ )
32
+ subprocess.run(["pip", "install", flash_attention_wheel], check=True)
33
+ site.addsitedir(site.getsitepackages()[0])
34
+ importlib.invalidate_caches()
35
+ enable_fa3 = True
36
+ print("✅ FlashAttention 3 installed and enabled")
37
+ except Exception as e:
38
+ print(f"⚠️ Could not install FlashAttention 3: {e}")
39
+ # enable_fa3 remains False
40
+
41
+
42
  # ============================================================
43
  # 1️⃣ Repository & Weights
44
  # ============================================================