Sukuna01 commited on
Commit
0b8d2c0
·
verified ·
1 Parent(s): d47e4ca

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -10
app.py CHANGED
@@ -1,22 +1,25 @@
1
  import gradio as gr
2
  import json
3
 
4
- # Your existing interface
5
  def query_model(prompt):
6
- # This will use the featherless-ai provider
7
- return prompt # The actual model runs through gr.load
8
-
9
- # Create a JSON API endpoint
10
- def api_endpoint(prompt):
11
- response = query_model(prompt)
12
- return json.dumps({
 
 
 
13
  "status": "success",
14
  "prompt": prompt,
15
  "response": response,
16
  "model": "open-r1/OlympicCoder-7B"
17
  })
18
 
19
- # Modified version with API capability
20
  with gr.Blocks() as demo:
21
  with gr.Row():
22
  with gr.Column(scale=3):
@@ -40,7 +43,7 @@ with gr.Blocks() as demo:
40
  clear.click(lambda: None, None, chatbot, queue=False)
41
 
42
  # API endpoint
43
- api_button.click(fn=api_endpoint, inputs=[api_input], outputs=[api_output])
44
 
45
  # Launch with debug info
46
  demo.launch(
 
1
  import gradio as gr
2
  import json
3
 
4
+ # Function to query the model
5
  def query_model(prompt):
6
+ # Simulate the response (replace with actual model logic)
7
+ response = f"Python function for {prompt}"
8
+
9
+ # Format for Chatbot component
10
+ chatbot_messages = [
11
+ ["user", prompt],
12
+ ["assistant", response]
13
+ ]
14
+
15
+ return chatbot_messages, json.dumps({
16
  "status": "success",
17
  "prompt": prompt,
18
  "response": response,
19
  "model": "open-r1/OlympicCoder-7B"
20
  })
21
 
22
+ # Create the Gradio interface
23
  with gr.Blocks() as demo:
24
  with gr.Row():
25
  with gr.Column(scale=3):
 
43
  clear.click(lambda: None, None, chatbot, queue=False)
44
 
45
  # API endpoint
46
+ api_button.click(fn=lambda x: query_model(x)[1], inputs=[api_input], outputs=[api_output])
47
 
48
  # Launch with debug info
49
  demo.launch(