| import random | |
| import gradio as gr | |
| from transformers import AutoModelForCausalLM | |
| def LMmodel(message, history): | |
| base_model_id="LeoLM/leo-mistral-hessianai-7b-chat" | |
| try: | |
| base_model = AutoModelForCausalLM.from_pretrained(base_model_id, device_map="auto",offload_folder="offload",trust_remote_code=True) | |
| except Exception as e: | |
| exp = str(e) | |
| return exp | |
| return "works" | |
| gr.ChatInterface(LMmodel).launch() |