Skip to content

Commit eef51c4

Browse files
committed
Bug fixes, added proxy checker, sample proxy lists
1 parent 4bb57fe commit eef51c4

7 files changed

Lines changed: 108 additions & 522 deletions

File tree

AVAILABLE.txt

Lines changed: 0 additions & 17 deletions
This file was deleted.

UsernameChecker.py

Lines changed: 61 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,15 @@
1010
from termcolor import colored
1111
from bs4 import BeautifulSoup
1212

13+
# Globals
14+
WORD = ""
15+
1316
# CLI Arguments
1417
WORD_LIST = "word_lists/WORD-LIST-1"
1518
OUTPUT = "AVAILABLE.txt"
1619

1720
# Regex Patterns
21+
PLACEHOLDER = r"%word%"
1822
URLPATT = r"(^https?:\/\/[-.a-zA-Z0-9]+)"
1923
DOMAIN = r"https?:\/\/(\w*)(?(1)\.|(?(1)\w*))"
2024

@@ -55,25 +59,44 @@ def generate_pw(size=16, chars=string.ascii_uppercase + string.digits + string.a
5559

5660
def replace(word):
5761
# Finds and replaces matches of the name variable with the actual word to insert in URL
58-
if int(SITE) != 4: # if not Instagram
62+
if int(SITE) == 1:
63+
x = re.sub(PLACEHOLDER, word, URLS[1])
64+
print(x)
65+
return x
66+
elif int(SITE) != 4: # if not Instagram
5967
return URLS[int(SITE)] % word
6068
else:
6169
print("instagram")
6270

63-
def get_proxy():
64-
if PROXY and (PROXYLIST != []):
71+
def get_proxy_list():
72+
if PROXY and (PROXYLIST != None):
6573
fx = open(PROXYLIST, 'r')
6674
proxies = fx.read().split('\n')
6775
fx.close()
6876

69-
i = random.randrange(0, proxies.__len__())
70-
return str(proxies[i])
77+
return proxies
7178
else:
7279
if not PROXY:
7380
print("Proxy support is disabled. Please enable it in the config.")
7481
elif PROXYLIST == []:
7582
print("No proxies available to use.")
7683

84+
def select_random_proxy(plist):
85+
i = random.randrange(0, plist.__len__())
86+
proxyDict[PROTOCOL] = "http://" + str(plist[i])
87+
88+
def check_proxy():
89+
try:
90+
requests.get(
91+
"https://google.com",
92+
proxies=proxyDict
93+
)
94+
except IOError:
95+
print("Proxy failed, trying another...")
96+
return False
97+
else:
98+
return True
99+
77100
def taken(word, service, error=None):
78101
if error != None:
79102
print(str(word) + " is " + colored('TAKEN', 'red', attrs=['bold']) + " on " + str(service) + " because " + str(error))
@@ -172,20 +195,34 @@ def prepare_headers(cookie):
172195

173196
def send_get(words):
174197
for w in range(words.__len__()):
175-
link = replace(words[w])
198+
WORD = words[w]
199+
link = replace(WORD)
176200
if PROXY:
177-
proxyDict[PROTOCOL] = get_proxy()
178-
r = s.get(link, proxies=proxyDict)
201+
pl = get_proxy_list()
202+
select_random_proxy(pl)
203+
if check_proxy():
204+
r = s.get(link, proxies=proxyDict)
205+
else:
206+
pl = get_proxy_list()
207+
select_random_proxy(pl)
208+
r = s.get(link, proxies=proxyDict)
179209
else:
180210
r = s.get(link)
181-
log_result(r, words[w], link)
211+
log_result(r, WORD, link)
182212

183213
def parse_page(words):
184214
for w in range(words.__len__()):
185-
link = replace(words[w])
215+
WORD = words[w]
216+
link = replace(WORD)
186217
if PROXY:
187-
proxyDict[PROTOCOL] = get_proxy()
188-
r = s.get(link, proxies=proxyDict)
218+
pl = get_proxy_list()
219+
select_random_proxy(pl)
220+
if check_proxy():
221+
r = s.get(link, proxies=proxyDict)
222+
else:
223+
pl = get_proxy_list()
224+
select_random_proxy(pl)
225+
r = s.get(link, proxies=proxyDict)
189226
else:
190227
r = s.get(link)
191228
page = r.content
@@ -209,22 +246,29 @@ def parse_page(words):
209246
matches = [match1, match2, match3]
210247
else:
211248
print("Wrong site!")
212-
log_result(r, words[w], link, matches=matches)
249+
log_result(r, WORD, link, matches=matches)
213250

214251
def send_post(words):
215252
cookie = get_cookie()
216253
header = prepare_headers(cookie)
217254
link = URLS[int(SITE)]
218255
for w in range(words.__len__()):
219-
payload = ready_payload(words[w])
256+
WORD = words[w]
257+
payload = ready_payload(WORD)
220258
r = None
221259
if PROXY:
222-
proxyDict[PROTOCOL] = get_proxy()
223-
r = s.post(URLS[int(SITE)], data=payload, headers=header, cookies=cookie, proxies=proxyDict)
260+
pl = get_proxy_list()
261+
select_random_proxy(pl)
262+
if check_proxy():
263+
r = s.post(link, data=payload, headers=header, cookies=cookie, proxies=proxyDict)
264+
else:
265+
pl = get_proxy_list()
266+
select_random_proxy(pl)
267+
r = s.post(link, data=payload, headers=header, cookies=cookie, proxies=proxyDict)
224268
else:
225269
r = s.post(URLS[int(SITE)], data=payload, headers=header, cookies=cookie)
226270

227-
log_result(r, words[w], link)
271+
log_result(r, WORD, link)
228272

229273
def main():
230274
# Reads word list from file and adds each name to array words[]

config.ini

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,25 +20,25 @@
2020
; Place the number from the table above that corresponds
2121
; with the site you want to check available names for.
2222
; If your target site is not listed, put "1" for CUSTOM (without the quotes).
23-
siteNum = 5
23+
siteNum = 3
2424
; Fill in the option below with the profile URL of the service you want to check available names for.
25-
; Use %s as the placeholder for the username to check.
25+
; Use %%word%% as the placeholder for the username to check.
2626
; customSite is only for sites not specifically listed in the chart above, but please be aware
2727
; that not every site will work this way. If there is a service you would like to see support for, please
2828
; don't hesistate to let Croc know.
29-
customSite = https://example.com/%s
29+
customSite = "https://example.com/%%word%%"
3030

3131
[lists]
3232
; Be sure to include the file extension if it has one
3333
output = AVAILABLE.txt
3434
; Place all word lists in the word_lists directory
35-
wordList = word_lists/WORD-LIST-1
35+
wordList = word_lists/WORD-LIST-2
3636

3737
[proxy]
3838
; To enable proxy support, put True. To disable, put False
39-
enableProxy = False
39+
enableProxy = True
4040
; If proxy support is enabled, put http or https below depending on what type of proxies you are using.
41-
proxyProtocol = https
41+
proxyProtocol = http
4242
; If proxy support is enabled, you must specify the path to the proxy list you want to use here
4343
; Place all proxy lists in the proxy_lists directory
4444
; Place one proxy per line in the this format "###.###.###.###:####"

proxy_lists/http_proxies

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
92.38.47.239:80
2+
93.90.220.11:80
3+
93.91.112.185:80
4+
45.55.27.246:80
5+
115.124.73.122:80
6+
178.251.142.178:8081
7+
82.149.207.86:8081
8+
185.148.220.11:8081
9+
203.146.82.253:80
10+
217.107.197.174:8081
11+
77.73.95.51:8081
12+
93.90.220.11:880
13+
188.32.118.182:8081
14+
193.24.196.152:8081
15+
81.211.43.102:8081
16+
62.176.5.93:8080
17+
92.38.47.226:80
18+
80.252.148.148:8080

proxy_lists/https_proxies.txt

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
45.55.27.246:80
2+
115.124.73.122:80
3+
62.176.5.93:8080
4+
176.106.145.122:8080
5+
149.56.201.254:3128
6+
144.217.96.213:3128
7+
149.56.64.58:80
8+
167.114.218.7:8080
9+
80.252.148.148:8080
10+
144.217.117.194:3128
11+
144.217.195.72:3128
12+
144.217.117.194:8080

proxy_lists/proxies.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
62.250.84.188:80
2+
115.238.229.36:80
3+
45.55.27.246:80
4+
93.91.112.185:80
5+
93.90.220.11:80
6+
1.9.78.19:80
7+
1.9.78.30:80
8+
203.146.82.253:80
9+
202.201.64.112:80
10+
116.213.102.189:80
11+
116.213.102.189:80

0 commit comments

Comments
 (0)