Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -14,8 +14,8 @@ import torch
|
|
| 14 |
# Initialize model and tokenizer globally
|
| 15 |
@st.cache_resource
|
| 16 |
def load_model_and_tokenizer():
|
| 17 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
| 18 |
-
model = AutoModelForCausalLM.from_pretrained("
|
| 19 |
if torch.cuda.is_available():
|
| 20 |
model = model.to("cuda")
|
| 21 |
return model, tokenizer
|
|
|
|
| 14 |
# Initialize model and tokenizer globally
|
| 15 |
@st.cache_resource
|
| 16 |
def load_model_and_tokenizer():
|
| 17 |
+
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen2.5-1.5B-Instruct")
|
| 18 |
+
model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen2.5-1.5B-Instruct")
|
| 19 |
if torch.cuda.is_available():
|
| 20 |
model = model.to("cuda")
|
| 21 |
return model, tokenizer
|