Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Projects/Password_Validator/Password_Validator.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import re
#re is a built in module in python that provides support of regular expressions
#regular expressions are used to chcek the rpesence of a certain pattern in a string.

# function to check whether the password is valid or not.
def validator(password):
Expand Down
7 changes: 5 additions & 2 deletions Projects/ROCK PAPER SCISSORS/RockPaperScissors.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,11 @@

#INPUT;
games=int(input("\nEnter the number of games you want to play: "))
game_counter=0

while(comp_count+user_count<games):
#used game counter variables to keep track of the umber of matches playes as the earlier loop would run infinitely if continuous ties occur this way we can ensure that the number of matches played is eual to the number of matches user wanted to play

while(game_counter<games):
#WHILE LOOP STARTS;

flag=0
Expand Down Expand Up @@ -42,7 +45,7 @@

print("\nSCORE:")
print("User Score:",user_count,"\tComputer Score:",comp_count,"\n")

game_counter+=1
#LOOP ENDS;

print("\n\t\tFINAL SCORE:")
Expand Down
18 changes: 10 additions & 8 deletions Projects/Reddit Scraper/reddit_scraper.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
import requests
import csv
import time
from bs4 import BeautifulSoup
import requests #to get the html data of the webpage
import csv #to read and write on the html page
import time #to add delay between requests This is important to avoid overwhelming the server with too many requests in a short period, which could lead to being blocked or flagged as a bot


from bs4 import BeautifulSoup # It is used for parsing and navigating the HTML content of the webpage.


class HaikuScraper:
"""
This scraper is designed with the purpose of scraping Haikus (Japanese poems) from Reddit.
"""
This scraper is designed with the purpose of scraping Haikus (Japanese poems) from Reddit. The resultant data is stored in a CSV file.
"""
def __init__(self, url: str, headers: dict):
self.url = url
self.headers = headers
self.url = url
self.headers = headers

def make_request(self):
time.sleep(3)
Expand Down
13 changes: 8 additions & 5 deletions Projects/send email from csv/Sending_mail.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,14 @@ def Send_mail():
message = f"Subject : {subject} \n\n {body}"

with open("emails.csv", newline="") as csvfile:
spamreader = csv.reader(csvfile, delimiter=" ", quotechar="|")
for email in spamreader:
s.sendmail(Email_Address, email[0], message)
print("Send To " + email[0])

# added a try and except block to handle the exceptions if any occur during the process of sending email
try:
spamreader = csv.reader(csvfile, delimiter=" ", quotechar="|")
for email in spamreader:
s.sendmail(Email_Address, email[0], message)
print("Send To " + email[0])
except Exception as e:
print(e)
# terminating the session
s.quit()
print("sent")
Expand Down