diff --git a/deepsearch.py b/deepsearch.py index 5d2f8ad..0e68828 100644 --- a/deepsearch.py +++ b/deepsearch.py @@ -9,10 +9,8 @@ term = input("Enter Search: ") results = searchFTS(term,con) -print("Found " + str(len(results)) + " Results.") +print(f"Found {len(results)} Results.") for i in results: - string = "" - for j in i: - string += j + string = "".join(i) print(string) diff --git a/grime.py b/grime.py index 19e93da..eb0a292 100644 --- a/grime.py +++ b/grime.py @@ -12,6 +12,7 @@ titlePrinter() check = rootcheck() masterList = [] +blacklist = ('http://76qugh5bey5gum7l.onion') while len(inputList) > 0: if not os.path.exists("../output/deepminer.db"): deepminerDB = createDB() @@ -21,7 +22,6 @@ url = random.choice(inputList) torstatus() extensions = ('.jpg', 'jpeg', '.mp4', '.png', '.gif') - blacklist = ('http://76qugh5bey5gum7l.onion') if url not in masterList and not url.endswith(extensions) and not url.startswith(blacklist): print(Y +""" [GRIME] New Iteration:""") diff --git a/modules/deephelpers.py b/modules/deephelpers.py index 2b5d9c3..430a1b1 100644 --- a/modules/deephelpers.py +++ b/modules/deephelpers.py @@ -22,8 +22,13 @@ def onionHTML(url): proxy = httplib2.ProxyInfo(proxy_type=socks.PROXY_TYPE_SOCKS5, proxy_host='localhost', proxy_port=9050) http = httplib2.Http(proxy_info=proxy, timeout=30) content = http.request(url, headers={'Connection': 'close', 'User-Agent': 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36'})[1] - html = str(content,'utf-8').replace('\t',' ').replace('\n',' ').replace('\r',' ').replace('\"','') - return html + return ( + str(content, 'utf-8') + .replace('\t', ' ') + .replace('\n', ' ') + .replace('\r', ' ') + .replace('\"', '') + ) except: return "None" @@ -56,12 +61,9 @@ def ahmia(): for matchNum, match in enumerate(matches, start=1): url = (match.group()) results.append(url) - ahmia = list(set(results)) - return ahmia + return list(set(results)) def redditOnions(): - results = [] - regex = r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.onion\/?[-a-zA-Z0-9@:%._\/+~#=]{1,256}" url = "https://www.reddit.com/r/onions/new.json?limit=10000000000000000000000000000000" req = request.Request(url, data=None, headers={'Connection': 'close', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36'}) @@ -72,12 +74,13 @@ def redditOnions(): if "Traceback (most recent call last):" in dataString: redditOnions() else: + regex = r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.onion\/?[-a-zA-Z0-9@:%._\/+~#=]{1,256}" matches = re.finditer(regex, dataString, re.MULTILINE) + results = [] for matchNum, match in enumerate(matches, start=1): url = (match.group()) results.append(url) - reddit = list(set(results)) - return reddit + return list(set(results)) def torstatus(): torstatus = subprocess.getoutput("Service Tor Status | grep Active") @@ -104,7 +107,7 @@ def urlSplitter(url): directory = str(url.split(".org")[1]) url = str(url.split(".org")[0]) + ".org" else: - print(Y +"Unknown URL " + str(url)) + print(f"{Y}Unknown URL {str(url)}") exit() if directory == "": directory = "/" @@ -116,8 +119,7 @@ def urlSplitter(url): def removeDuplicates(listOne, listTwo): - results = listOne + list(set(listTwo) - set(listOne)) - return results + return listOne + list(set(listTwo) - set(listOne)) def aTag(inputURL,html): if inputURL[-1] == "/": @@ -129,25 +131,21 @@ def aTag(inputURL,html): url = (match.group()) results.append(url) onions = list(set(results)) - for i in onions: - temp.append((i.replace("",""))) + temp.extend(i.replace("","") for i in onions) for i in temp: if "http" in i: - if ".onion" not in i: - pass - else: + if ".onion" in i: temp2.append(i) elif "mailto:" in i: pass elif i.startswith("../"): - i = i.replace("../",inputURL+"/") + i = i.replace("../", f"{inputURL}/") temp2.append(i) elif i.startswith("/"): temp2.append(inputURL+i) else: - temp2.append(inputURL + "/" + i) - aTag = list(set(temp2)) - return aTag + temp2.append(f"{inputURL}/{i}") + return list(set(temp2)) def inputAdder(newInput, input): for i in input: diff --git a/modules/deepsqlite.py b/modules/deepsqlite.py index fc35525..e0ffccb 100644 --- a/modules/deepsqlite.py +++ b/modules/deepsqlite.py @@ -2,12 +2,10 @@ import sqlite3 def createDB(): - con = sqlite3.connect('output/deepminer.db') - return con + return sqlite3.connect('output/deepminer.db') def connectDB(): - con = sqlite3.connect('output/deepminer.db', timeout=30) - return con + return sqlite3.connect('output/deepminer.db', timeout=30) def createTables(con): cur = con.cursor() @@ -66,8 +64,7 @@ def searchDB(term,con): cur = con.cursor() query = "SELECT URL,Directory FROM Deepdata WHERE HTML LIKE \'%" + term + "%\' ORDER BY URL;" cur.execute(query) - results = cur.fetchall() - return results + return cur.fetchall() def createFTStable(con): cur = con.cursor() @@ -83,5 +80,4 @@ def searchFTS(term,con): cur = con.cursor() query = "SELECT URL,Directory FROM Deepsearch WHERE HTML MATCH \'" + term + "\' ORDER BY rank;" cur.execute(query) - results = cur.fetchall() - return results + return cur.fetchall()