123456789101112131415161718192021222324 |
- from random import choice
- from nltk.tokenize import sent_tokenize, word_tokenize
- with open("dixit.txt") as article:
- quotes = sent_tokenize(article.read())
- text_in = ""
- while text_in != "stop":
- text_in = input("> ")
- text_out = "> "
- words_in = word_tokenize(text_in)
- words_in.sort(key = lambda s: len(s))
- print(words_in)
- words_in.reverse()
- quote_found = False
- for word in words_in:
- for quote in quotes:
- if word in quote:
- text_out += "À propos de %s, Einstein a dit un jour : \"%s\"." % (word, quote)
- quote_found = True
- break
- if quote_found:
- break
- print(choice(quotes));
|