Spaces:
Runtime error
Runtime error
Change Model
Browse files
app.py
CHANGED
|
@@ -49,7 +49,7 @@ except:
|
|
| 49 |
#---------------------------------------
|
| 50 |
|
| 51 |
#-------LOAD HUGGINGFACE PIPELINE-------
|
| 52 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
| 53 |
|
| 54 |
quantization_config = BitsAndBytesConfig(
|
| 55 |
load_in_4bit=True,
|
|
@@ -57,7 +57,7 @@ quantization_config = BitsAndBytesConfig(
|
|
| 57 |
bnb_4bit_use_double_quant=True,
|
| 58 |
bnb_4bit_quant_type= "nf4")
|
| 59 |
|
| 60 |
-
model = AutoModelForCausalLM.from_pretrained("
|
| 61 |
device_map="auto", torch_dtype=torch.bfloat16)
|
| 62 |
|
| 63 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=1024, return_full_text=False)
|
|
|
|
| 49 |
#---------------------------------------
|
| 50 |
|
| 51 |
#-------LOAD HUGGINGFACE PIPELINE-------
|
| 52 |
+
tokenizer = AutoTokenizer.from_pretrained("defog/llama-3-sqlcoder-8b")
|
| 53 |
|
| 54 |
quantization_config = BitsAndBytesConfig(
|
| 55 |
load_in_4bit=True,
|
|
|
|
| 57 |
bnb_4bit_use_double_quant=True,
|
| 58 |
bnb_4bit_quant_type= "nf4")
|
| 59 |
|
| 60 |
+
model = AutoModelForCausalLM.from_pretrained("defog/llama-3-sqlcoder-8b", quantization_config=quantization_config,
|
| 61 |
device_map="auto", torch_dtype=torch.bfloat16)
|
| 62 |
|
| 63 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=1024, return_full_text=False)
|