from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("svjack/bloom-daliy-dialogue-english")

model = AutoModelForCausalLM.from_pretrained("svjack/bloom-daliy-dialogue-english")

tokenizer.decode(
model.generate(
    tokenizer.encode(
            "Are you hungry?", return_tensors="pt", add_special_tokens=True
        ), max_length = 128)[0],
skip_special_tokens = True
).split("\n-----\n")

'''
['Are you hungry?ou already have breakfast? ',
 " Sorry, I didn't order my breakfast. ",
 ' You just have to put your breakfast in the oven. ',
 ' OK, I will get it for you. ',
 ' Is there anything I can do for you? ',
 ' I also have a suit and a woolen sweater. ',
 ' What do you want? ',
 " I'd like a Sprite, please. ",
 ' What kind of juice do you want? ',
 ' Is there any extra water? ',
 " No, that's all. ",
 ' What kind of drink do you']
'''