-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathreddit_scrape.py
More file actions
32 lines (27 loc) · 977 Bytes
/
reddit_scrape.py
File metadata and controls
32 lines (27 loc) · 977 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from time import sleep
import json
import datetime
driver = webdriver.Safari()
driver.base_url = "https://www.reddit.com/r/travel"
driver.get(driver.base_url)
found_questions = driver.find_elements_by_xpath("//span/a/h2/text()")
print(len(found_questions))
increment = 10
# redlen = len(found_questions)
# prev_ques = []
for i in range(500):
print(i)
driver.execute_script('window.scrollTo(0, document.body.scrollHeight);')
# found_questions = driver.find_elements_by_xpath("//span/a/h2/text()")
# print(i, len(found_questions))
sleep(0.3)
sleep(10)
found_questions = driver.find_elements_by_xpath("//span/a/h2/text()")
print(len(found_questions))
with open('data/travel/reddit.txt', 'w') as f:
for item in found_questions:
f.write("%s\n" % item.text)
driver.close()