-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbuild_dictionary.py
More file actions
67 lines (50 loc) · 1.49 KB
/
build_dictionary.py
File metadata and controls
67 lines (50 loc) · 1.49 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import numpy
import cPickle as pkl
from nltk.tokenize import WordPunctTokenizer
import sys
import fileinput
from collections import OrderedDict
def main():
tokenizer = WordPunctTokenizer()
for filename in sys.argv[1:]:
print ('Processing' + " " + filename)
word_freqs = OrderedDict()
sr_freqs = OrderedDict()
with open(filename, 'r') as f:
for line in f:
#By convention, text is the first column of the .tsv
line = line.split("\t")
subreddit = line[0]
text = line[1]
words_in = tokenizer.tokenize(text)
for w in words_in:
if w not in word_freqs:
word_freqs[w] = 0
word_freqs[w] += 1
if subreddit not in sr_freqs:
sr_freqs[subreddit] = 0
sr_freqs[subreddit] += 1
words = word_freqs.keys()
freqs = word_freqs.values()
sorted_idx = numpy.argsort(freqs)
sorted_words = [words[ii] for ii in sorted_idx[::-1]]
worddict = OrderedDict()
worddict['eos'] = 0
worddict['UNK'] = 1
for ii, ww in enumerate(sorted_words):
worddict[ww] = ii+2
with open('%s_worddict.pkl'%filename, 'wb') as f:
pkl.dump(worddict, f)
subreddits = sr_freqs.keys()
freqs = sr_freqs.values()
sorted_idx = numpy.argsort(freqs)
sorted_subreddits = [subreddits[ii] for ii in sorted_idx[::-1]]
srdict = OrderedDict()
srdict["UNK"] = 0
for ii, sr in enumerate(sorted_subreddits):
srdict[sr] = ii+1
with open('%s_srdict.pkl'%filename, 'wb') as f:
pkl.dump(srdict, f)
print ('Done')
if __name__ == '__main__':
main()