Spaces:
No application file
No application file
| import spaces | |
| import torch | |
| print('torch version:', torch.__version__) | |
| import gradio as gr | |
| from unsloth import FastLanguageModel | |
| max_seq_length = 2048 | |
| dtype = None | |
| load_in_4bit = True | |
| model, tokenizer = FastLanguageModel.from_pretrained( | |
| model_name = "ua-l/gemma-2-9b-legal-steps200-uk", # YOUR MODEL YOU USED FOR TRAINING | |
| max_seq_length = max_seq_length, | |
| dtype = dtype, | |
| load_in_4bit = load_in_4bit, | |
| ) | |
| FastLanguageModel.for_inference(model) | |
| def predict(question): | |
| inputs = tokenizer( | |
| [f'''### Question: | |
| {question} | |
| ### Answer: | |
| '''], return_tensors = "pt").to("cuda") | |
| outputs = model.generate(**inputs, max_new_tokens = 128) | |
| results = tokenizer.batch_decode(outputs, skip_special_tokens=True) | |
| return results[0] | |
| inputs = gr.Textbox(lines=2, label="Enter a question", value="Як отримати виплати ВПО?") | |
| outputs = gr.Markdown(label="Answer") | |
| demo = gr.Interface(fn=predict, inputs=inputs, outputs=outputs) | |
| demo.launch() | |