import requests from bs4 import BeautifulSoup def is_indexed(url): # Perform a Google search for the URL google_search_url = f"https://www.google.com/search?q=site:{url}" response = requests.get(google_search_url) # Parse the HTML response soup = BeautifulSoup(response.text, 'html.parser') # Check if the search results contain the URL return url in response.text def main(input_file): indexed_links = [] non_indexed_links = [] # Read URLs from the input file with open(input_file, 'r') as file: for line in file: url = line.strip() if is_indexed(url): indexed_links.append(url) else: non_indexed_links.append(url) # Write indexed links to a file with open("indexed_links_check.txt", 'w') as file: for link in indexed_links: file.write(link + '\n') # Write non-indexed links to a file with open("non_indexed_links_check.txt", 'w') as file: for link in non_indexed_links: file.write(link + '\n') if __name__ == "__main__": input_file = input("Enter the path to the text file containing URLs: ") main(input_file)