-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathController.py
More file actions
57 lines (52 loc) · 3.86 KB
/
Controller.py
File metadata and controls
57 lines (52 loc) · 3.86 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
from NLP import NLP
from TwitterAPI import TwitterAPI
import argparse
if __name__ == '__main__':
print("\n> Welcome to Twitter Influencer AI\n")
print( "████████▀▀░░░░░░░░░░░░░░░░░░░▀▀████████"+"\n"+
"██████▀░░░░░░░░░░░░░░░░░░░░░░░░░▀██████"+"\n"+
"█████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█████"+"\n"+
"████░░░░░▄▄▄▄▄▄▄░░░░░░░░▄▄▄▄▄▄░░░░░████"+"\n"+
"████░░▄██████████░░░░░░██▀░░░▀██▄░░████"+"\n"+
"████░░███████████░░░░░░█▄░░▀░░▄██░░████"+"\n"+
"█████░░▀▀███████░░░██░░░██▄▄▄█▀▀░░█████"+"\n"+
"██████░░░░░░▄▄▀░░░████░░░▀▄▄░░░░░██████"+"\n"+
"█████░░░░░█▄░░░░░░▀▀▀▀░░░░░░░█▄░░░█████"+"\n"+
"█████░░░▀▀█░█▀▄▄▄▄▄▄▄▄▄▄▄▄▄▀██▀▀░░█████"+"\n"+
"██████░░░░░▀█▄░░█░░█░░░█░░█▄▀░░░░██▀▀▀▀"+"\n"+
"▀░░░▀██▄░░░░░░▀▀█▄▄█▄▄▄█▄▀▀░░░░▄█▀░░░▄▄"+"\n"+
"▄▄▄░░░▀▀██▄▄▄▄░░░░░░░░░░░░▄▄▄███░░░▄██▄"+"\n"+
"██████▄▄░░▀█████▀█████▀██████▀▀░░▄█████"+"\n"+
"██████████▄░░▀▀█▄░░░░░▄██▀▀▀░▄▄▄███▀▄██")
parser = argparse.ArgumentParser()
parser.add_argument("--stream_twitter",action="store_true")
parser.add_argument("--train_model") # input the number of epochs
parser.add_argument("--generate_text") #input the length of text
parser.add_argument("--checkpoint")
parser.add_argument("--seed")
args = parser.parse_args()
if(args.stream_twitter):
access_token = "YOUR KEY"
access_token_secret = "YOUR KEY"
api_key = "YOUR KEY"
api_secret_key = "YOUR KEY"
# Data Collection
twitter_api = TwitterAPI(access_token, access_token_secret, api_key, api_secret_key)
#twitter_api.streamTweets(['#InfluencerLife', '#Influencer'])
twitter_api.streamTweets(
['#InfluencerLife', '#Influencer', '#beauty', '#makeup', '#lifestyle', '#fashion', '#instagram'])
else:
text = open('data/KyleJenner.txt','rb').read().decode(encoding='utf-8')
nlp = NLP()
dataset, vocabulary = nlp.preprocess(text)
dataset, vocabulary_size, embedding_dimension, rnn_nodes, batch_size = nlp.prepareSettings(dataset,vocabulary)
if (args.train_model):
model = nlp.buildModel(vocabulary_size,embedding_dimension,batch_size,rnn_nodes)
nlp.trainModel(dataset,model,'checkpoints/',int(args.train_model))
elif (args.generate_text):
model = nlp.buildModel(vocabulary_size,embedding_dimension,1,rnn_nodes) # Using batch_size=1 for text gen
print(model.summary())
generated_text = nlp.generateText(model,'checkpoints/'+args.checkpoint,args.seed,int(args.generate_text))
print(generated_text)
else:
print("\n > No such option \n")