1010from termcolor import colored
1111from bs4 import BeautifulSoup
1212
13+ # Globals
14+ WORD = ""
15+
1316# CLI Arguments
1417WORD_LIST = "word_lists/WORD-LIST-1"
1518OUTPUT = "AVAILABLE.txt"
1619
1720# Regex Patterns
21+ PLACEHOLDER = r"%word%"
1822URLPATT = r"(^https?:\/\/[-.a-zA-Z0-9]+)"
1923DOMAIN = r"https?:\/\/(\w*)(?(1)\.|(?(1)\w*))"
2024
@@ -55,25 +59,44 @@ def generate_pw(size=16, chars=string.ascii_uppercase + string.digits + string.a
5559
5660def replace (word ):
5761 # Finds and replaces matches of the name variable with the actual word to insert in URL
58- if int (SITE ) != 4 : # if not Instagram
62+ if int (SITE ) == 1 :
63+ x = re .sub (PLACEHOLDER , word , URLS [1 ])
64+ print (x )
65+ return x
66+ elif int (SITE ) != 4 : # if not Instagram
5967 return URLS [int (SITE )] % word
6068 else :
6169 print ("instagram" )
6270
63- def get_proxy ():
64- if PROXY and (PROXYLIST != [] ):
71+ def get_proxy_list ():
72+ if PROXY and (PROXYLIST != None ):
6573 fx = open (PROXYLIST , 'r' )
6674 proxies = fx .read ().split ('\n ' )
6775 fx .close ()
6876
69- i = random .randrange (0 , proxies .__len__ ())
70- return str (proxies [i ])
77+ return proxies
7178 else :
7279 if not PROXY :
7380 print ("Proxy support is disabled. Please enable it in the config." )
7481 elif PROXYLIST == []:
7582 print ("No proxies available to use." )
7683
84+ def select_random_proxy (plist ):
85+ i = random .randrange (0 , plist .__len__ ())
86+ proxyDict [PROTOCOL ] = "http://" + str (plist [i ])
87+
88+ def check_proxy ():
89+ try :
90+ requests .get (
91+ "https://google.com" ,
92+ proxies = proxyDict
93+ )
94+ except IOError :
95+ print ("Proxy failed, trying another..." )
96+ return False
97+ else :
98+ return True
99+
77100def taken (word , service , error = None ):
78101 if error != None :
79102 print (str (word ) + " is " + colored ('TAKEN' , 'red' , attrs = ['bold' ]) + " on " + str (service ) + " because " + str (error ))
@@ -172,20 +195,34 @@ def prepare_headers(cookie):
172195
173196def send_get (words ):
174197 for w in range (words .__len__ ()):
175- link = replace (words [w ])
198+ WORD = words [w ]
199+ link = replace (WORD )
176200 if PROXY :
177- proxyDict [PROTOCOL ] = get_proxy ()
178- r = s .get (link , proxies = proxyDict )
201+ pl = get_proxy_list ()
202+ select_random_proxy (pl )
203+ if check_proxy ():
204+ r = s .get (link , proxies = proxyDict )
205+ else :
206+ pl = get_proxy_list ()
207+ select_random_proxy (pl )
208+ r = s .get (link , proxies = proxyDict )
179209 else :
180210 r = s .get (link )
181- log_result (r , words [ w ] , link )
211+ log_result (r , WORD , link )
182212
183213def parse_page (words ):
184214 for w in range (words .__len__ ()):
185- link = replace (words [w ])
215+ WORD = words [w ]
216+ link = replace (WORD )
186217 if PROXY :
187- proxyDict [PROTOCOL ] = get_proxy ()
188- r = s .get (link , proxies = proxyDict )
218+ pl = get_proxy_list ()
219+ select_random_proxy (pl )
220+ if check_proxy ():
221+ r = s .get (link , proxies = proxyDict )
222+ else :
223+ pl = get_proxy_list ()
224+ select_random_proxy (pl )
225+ r = s .get (link , proxies = proxyDict )
189226 else :
190227 r = s .get (link )
191228 page = r .content
@@ -209,22 +246,29 @@ def parse_page(words):
209246 matches = [match1 , match2 , match3 ]
210247 else :
211248 print ("Wrong site!" )
212- log_result (r , words [ w ] , link , matches = matches )
249+ log_result (r , WORD , link , matches = matches )
213250
214251def send_post (words ):
215252 cookie = get_cookie ()
216253 header = prepare_headers (cookie )
217254 link = URLS [int (SITE )]
218255 for w in range (words .__len__ ()):
219- payload = ready_payload (words [w ])
256+ WORD = words [w ]
257+ payload = ready_payload (WORD )
220258 r = None
221259 if PROXY :
222- proxyDict [PROTOCOL ] = get_proxy ()
223- r = s .post (URLS [int (SITE )], data = payload , headers = header , cookies = cookie , proxies = proxyDict )
260+ pl = get_proxy_list ()
261+ select_random_proxy (pl )
262+ if check_proxy ():
263+ r = s .post (link , data = payload , headers = header , cookies = cookie , proxies = proxyDict )
264+ else :
265+ pl = get_proxy_list ()
266+ select_random_proxy (pl )
267+ r = s .post (link , data = payload , headers = header , cookies = cookie , proxies = proxyDict )
224268 else :
225269 r = s .post (URLS [int (SITE )], data = payload , headers = header , cookies = cookie )
226270
227- log_result (r , words [ w ] , link )
271+ log_result (r , WORD , link )
228272
229273def main ():
230274 # Reads word list from file and adds each name to array words[]
0 commit comments