| from transformers import GPT2LMHeadModel, GPT2Tokenizer |
| import gradio as gr |
|
|
| |
| model_name = "eminAydin/turkish-gpt2-mini-M1-cleaned-sports720k-10ep" |
| tokenizer = GPT2Tokenizer.from_pretrained("ytu-ce-cosmos/turkish-gpt2") |
| tokenizer.pad_token = tokenizer.eos_token |
| model = GPT2LMHeadModel.from_pretrained(model_name) |
| |
| generation_config = { |
| 'temperature': 0.7, |
| "do_sample": True, |
| "max_new_tokens": 50, |
| 'top_p': 0.9, |
| 'repetition_penalty': 1.3, |
| 'eos_token_id': tokenizer.eos_token_id, |
| 'pad_token_id': tokenizer.pad_token_id, |
| } |
|
|
| def launch(input): |
| input_ids = tokenizer.encode(input, return_tensors="pt") |
| output = model.generate(input_ids, num_return_sequences=1, **generation_config) |
| output = tokenizer.decode(output[0], skip_special_tokens=True) |
| return output |
|
|
| iface = gr.Interface(launch, |
| inputs="text", |
| outputs="text", |
| title="Turkish Text Generation with GPT-2", |
| description="Enter a Turkish prompt and generate text using GPT-2.", |
| theme="default") |
|
|
| iface.launch() |