GPT3 equivalent:
import torch
import torch.nn as nn
from torch.nn import functional as F
import torch.optim.lr_scheduler
from transformers import pipeline
# Hiperparámetros simples
max_new_tokens = 200 # For generation
# Carga el modelo pre-entrenado
generator = pipeline('text-generation', model='EleutherAI/gpt-neo-125M')
# Genera texto
def generate(start, max_new_tokens):
result = generator(start, max_length=len(start.split()) + max_new_tokens, do_sample=True, temperature=0.8, num_return_sequences=1,truncation=True)
return result[0]['generated_text']
print(generate('then', max_new_tokens)) # Prueba generando texto