original model: agemagician/mlong-t5-tglobal-large

adaptation guide: https://towardsdatascience.com/how-to-adapt-a-multilingual-t5-model-for-a-single-language-b9f94f3d9c90

import torch from transformers import MT5Tokenizer, LongT5ForConditionalGeneration

model_name = "dantepalacio/ruLongT5-Large" tokenizer = MT5Tokenizer.from_pretrained(model_name) model = LongT5ForConditionalGeneration.from_pretrained(model_name, ignore_mismatched_sizes=True)