| import gradio as gr |
| from transformers import pipeline, set_seed |
| from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
| checkpoint = "bigcode/starcoder-3b" |
| device = "cpu" |
|
|
|
|
| tokenizer = AutoTokenizer.from_pretrained(checkpoint) |
| model = AutoModelForCausalLM.from_pretrained(checkpoint, token=os.environ['ACCESS_TOKEN']).to(device) |
|
|
|
|
| set_seed(42) |
|
|
|
|
| def Bemenet(bemenet): |
| inputs = tokenizer.encode(bemenet, return_tensors="pt").to(device) |
| outputs = model.generate(inputs) |
| return tokenizer.decode(outputs[0]) |
|
|
|
|
| interface = gr.Interface(fn=Bemenet, |
| title="Cím..", |
| description="Leírás..", |
| inputs="text", |
| outputs="text") |
|
|
| interface.launch() |