64 lines
2.0 KiB
Python
64 lines
2.0 KiB
Python
#!/usr/local/bin/python3.9
|
|
import tweepy
|
|
import time
|
|
import os
|
|
import random
|
|
import sys
|
|
from datetime import datetime, timedelta
|
|
import math
|
|
from nltk.corpus import wordnet as wn
|
|
|
|
print(datetime.now().strftime("%H:%M:%S"))
|
|
callback_uri = 'oob' # https://cfe.sh/twitter/callback
|
|
|
|
auth = tweepy.OAuthHandler(os.getenv('API_TOKEN'), os.getenv('API_KEY_SECRET'), callback_uri)
|
|
auth.set_access_token(os.getenv('ACCESS_TOKEN'), os.getenv('ACCESS_TOKEN_SECRET'))
|
|
|
|
api = tweepy.API(auth)
|
|
|
|
def generate_words():
|
|
syllables = random.randint(1,sum(1 for file in os.listdir('lists')))
|
|
w1 = get_word(syllables)
|
|
w2 = get_word(syllables)
|
|
w1_synset = wn.synsets(w1.replace(' ', '_'))[0]
|
|
w2_synset = wn.synsets(w2.replace(' ', '_'))[0]
|
|
i = 0
|
|
while w1 == w2 or (w1_synset.wup_similarity(w2_synset) < 0.66 and i < 100):
|
|
w2 = get_word(syllables)
|
|
w2_synset = wn.synsets(w2.replace(' ', '_'))[0]
|
|
i += 1
|
|
return [w1,w2]
|
|
|
|
def get_word(syl):
|
|
fileName = f'{syl}_syllable.txt'
|
|
with open(f'lists/{fileName}', 'r') as wordList:
|
|
l = sum(1 for line in wordList) - 1
|
|
c = random.randint(0,l)
|
|
wordList.seek(0)
|
|
for i, line in enumerate(wordList):
|
|
if i == c:
|
|
if c == l:
|
|
return line
|
|
else:
|
|
return line[:-1]
|
|
elif i > c:
|
|
break
|
|
|
|
def posting():
|
|
words = generate_words()
|
|
output = f'What is {words[0]} if not {words[1]} persevering?'
|
|
print(output)
|
|
api.update_status(output)
|
|
with open('post_log.txt', 'a+') as logFile:
|
|
logFile.write(f'{datetime.now().strftime("%Y-%m-%d-%X")}: {output}\n')
|
|
|
|
now = datetime.now()
|
|
nextHour = (now + timedelta(hours=1)).replace(minute=0,second=0,microsecond=0)
|
|
tdelta = nextHour - datetime.now()
|
|
sec = math.floor(tdelta.total_seconds())
|
|
print(f'Waiting until {nextHour} for first post, sleeping for {sec} seconds.')
|
|
time.sleep(sec)
|
|
|
|
while True:
|
|
posting()
|
|
time.sleep(3600) |