Re-based to Slim, reduced NTLK corpus dependencies
Image now has a much smaller footprint of ca 300 MB rather than 9 GB
This commit is contained in:
@ -1,7 +1,7 @@
|
||||
FROM python:slim
|
||||
COPY . /usr/src/app
|
||||
WORKDIR /usr/src/app
|
||||
RUN pip install --upgrade pip
|
||||
RUN pip install -r requirements.txt
|
||||
RUN python -m nltk.downloader all
|
||||
RUN pip install --upgrade pip && pip install -r requirements.txt && \
|
||||
apt-get autoremove -y
|
||||
RUN python -m nltk.downloader wordnet
|
||||
CMD python3 -u ./script.py
|
@ -1 +0,0 @@
|
||||
2021-03-19-00:28:14: What is artificial intelligence, if not mucopolysaccharidosis persevering?
|
@ -17,7 +17,7 @@ auth.set_access_token(os.getenv('ACCESS_TOKEN'), os.getenv('ACCESS_TOKEN_SECRET'
|
||||
api = tweepy.API(auth)
|
||||
|
||||
def generate_words():
|
||||
syllables = random.randint(1,sum(1 for file in os.listdir('/usr/src/app/lists')))
|
||||
syllables = random.randint(1,sum(1 for file in os.listdir('lists')))
|
||||
w1 = get_word(syllables)
|
||||
w2 = get_word(syllables)
|
||||
w1_synset = wn.synsets(w1.replace(' ', '_'))[0]
|
||||
@ -31,7 +31,7 @@ def generate_words():
|
||||
|
||||
def get_word(syl):
|
||||
fileName = f'{syl}_syllable.txt'
|
||||
with open(f'/usr/src/app/lists/{fileName}', 'r') as wordList:
|
||||
with open(f'lists/{fileName}', 'r') as wordList:
|
||||
l = sum(1 for line in wordList) - 1
|
||||
c = random.randint(0,l)
|
||||
wordList.seek(0)
|
||||
@ -49,7 +49,7 @@ def posting():
|
||||
output = f'What is {words[0]} if not {words[1]} persevering?'
|
||||
print(output)
|
||||
api.update_status(output)
|
||||
with open('/usr/src/app/post_log.txt', 'a') as logFile:
|
||||
with open('post_log.txt', 'a+') as logFile:
|
||||
logFile.write(f'{datetime.now().strftime("%Y-%m-%d-%X")}: {output}\n')
|
||||
|
||||
now = datetime.now()
|
||||
|
Reference in New Issue
Block a user