Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,21 +1,22 @@
|
|
| 1 |
from transformers import pipeline
|
| 2 |
import gradio as gr
|
| 3 |
|
| 4 |
-
#
|
| 5 |
generator = pipeline(
|
| 6 |
"text-generation",
|
| 7 |
-
model="
|
| 8 |
-
tokenizer="
|
|
|
|
| 9 |
)
|
| 10 |
|
| 11 |
def answer_question(prompt):
|
| 12 |
system_prompt = (
|
| 13 |
-
"๋๋ ํ๊ตญ
|
| 14 |
-
"
|
| 15 |
)
|
| 16 |
response = generator(
|
| 17 |
system_prompt + prompt,
|
| 18 |
-
max_new_tokens=
|
| 19 |
temperature=0.7,
|
| 20 |
top_p=0.9,
|
| 21 |
do_sample=True
|
|
@@ -27,11 +28,11 @@ app = gr.Interface(
|
|
| 27 |
inputs=gr.Textbox(
|
| 28 |
lines=2,
|
| 29 |
label="์
์ ์ง๋ฌธ ์
๋ ฅ",
|
| 30 |
-
placeholder="์:
|
| 31 |
),
|
| 32 |
outputs=gr.Textbox(label="AI ๋ต๋ณ"),
|
| 33 |
-
title="
|
| 34 |
-
description="
|
| 35 |
)
|
| 36 |
|
| 37 |
if __name__ == "__main__":
|
|
|
|
| 1 |
from transformers import pipeline
|
| 2 |
import gradio as gr
|
| 3 |
|
| 4 |
+
# KoAlpaca ๋ชจ๋ธ ๋ก๋ (์๋์ ํ์ง์ ๋ฐธ๋ฐ์ค)
|
| 5 |
generator = pipeline(
|
| 6 |
"text-generation",
|
| 7 |
+
model="beomi/KoAlpaca-Polyglot-1.1B",
|
| 8 |
+
tokenizer="beomi/KoAlpaca-Polyglot-1.1B",
|
| 9 |
+
device_map="auto"
|
| 10 |
)
|
| 11 |
|
| 12 |
def answer_question(prompt):
|
| 13 |
system_prompt = (
|
| 14 |
+
"๋๋ ํ๊ตญ ๋ํ ์
์ ์ ๋ณด๋ฅผ ์๋ ค์ฃผ๋ AI์ผ. "
|
| 15 |
+
"์๋ฅ, ํ์๋ถ์ข
ํฉ, ๋
ผ์ , ์ ์ ๋ฑ ์ ํ์ ์ดํดํ๊ธฐ ์ฝ๊ฒ ์ค๋ช
ํด์ค.\n\n"
|
| 16 |
)
|
| 17 |
response = generator(
|
| 18 |
system_prompt + prompt,
|
| 19 |
+
max_new_tokens=250,
|
| 20 |
temperature=0.7,
|
| 21 |
top_p=0.9,
|
| 22 |
do_sample=True
|
|
|
|
| 28 |
inputs=gr.Textbox(
|
| 29 |
lines=2,
|
| 30 |
label="์
์ ์ง๋ฌธ ์
๋ ฅ",
|
| 31 |
+
placeholder="์: ๊ฐ์ฒ๋ ๋
ผ์ ์ ํ / 2025 ์๋ฅ ์ผ์ / ํ๊ตญ๊ณตํ๋ ์ ํ์์ฝ"
|
| 32 |
),
|
| 33 |
outputs=gr.Textbox(label="AI ๋ต๋ณ"),
|
| 34 |
+
title="์
์ ์ ๋ฌธ AI (KoAlpaca 1.1B)",
|
| 35 |
+
description="๋น ๋ฅด๊ณ ๋๋ํ ์
์์ ๋ณด ์ฑ๋ด์
๋๋ค. ์ค์ ์ ํ์ ๋ณด์ ๊ทผ๊ฑฐํ ์ค๋ช
์ ์ ๊ณตํฉ๋๋ค."
|
| 36 |
)
|
| 37 |
|
| 38 |
if __name__ == "__main__":
|