Shammer/main.py

52 lines
1.1 KiB
Python

import requests
import queue
import re
# Init static vars
INDEX_URL = "https://bitscuit.be/"
# Create session
session = requests.Session()
# Create website queue
urls = queue.Queue()
urls.put(INDEX_URL)
# Create list to store visited sites
visited = set([INDEX_URL])
# Loop
while not urls.empty():
url = urls.get()
# Perform request
print("Fetching url '%s'..."%url, end="")
try:
r = session.get(url)
print("\tdone")
except Exception as e:
print("\tfailed")
print(e)
finally:
r.close()
# Read response
if r.status_code != 200:
print("returned %d"%r.status_code)
continue
# Filter page for hrefs
hrefs = [res[0] for res in re.findall("(https*:\/\/([a-z0-9]+\.)*[a-z0-9]{3,}\.[a-z0-9]{2,}\/)", r.text)]
print(hrefs)
# Add to queue
print("found %d new urls"%len(hrefs), end="")
numAdded = 0
for href in hrefs:
if href not in visited:
urls.put(href)
visited.add(href)
numAdded += 1
print(", of which %d new"%numAdded)