import requests from bs4 import BeautifulSoup # Function to fetch all links from a webpage def get_links(url): try: response = requests.get(url) soup = BeautifulSoup(response.text, 'html.parser') links = [] for link in soup.find_all('a'): href = link.get('href') if href and href.startswith('http'): links.append(href) return links except Exception as e: print(f"Error fetching links: {e}") return [] # Function to scan a webpage for vulnerabilities (you can add more checks here) def scan_webpage(url): try: response = requests.get(url) if response.status_code == 200: print(f"Scanning: {url}") # Add your vulnerability scanning logic here # For example, check for common security headers or known vulnerabilities else: print(f"Failed to fetch {url}, Status code: {response.status_code}") except Exception as e: print(f"Error scanning {url}: {e}") # Main function to start the scan def main(start_url): visited_urls = set() queue = [start_url] while queue: url = queue.pop() if url not in visited_urls: visited_urls.add(url) links = get_links(url) for link in links: queue.append(link) scan_webpage(url) if __name__ == "__main__": start_url = "" # Replace with your website URL main(start_url)