```py import asyncio import os from aiohttp_socks import SocksConnector import aiohttp from selenium import webdriver from selenium.webdriver.chrome.options import Options ROOT_DOWNLOAD_DIR = "./downloads" # Change this to your preferred root directory async def download_file(session, url, path): """Asynchronously download a file and save it to the specified path""" try: os.makedirs(os.path.dirname(path), exist_ok=True) async with session.get(url) as response: if response.status == 200: content = await response.read() with open(path, 'wb') as f: f.write(content) print(f"Downloaded {url} to {path}") else: print(f"Failed to download {url}: Status {response.status}") except Exception as e: print(f"Error downloading {url}: {str(e)}") async def download_all(download_queue): """Process all downloads in the queue asynchronously""" connector = SocksConnector.from_url('socks5://localhost:9050') # Tor proxy async with aiohttp.ClientSession(connector=connector) as session: tasks = [] for url, maindir_num, subdir_num in download_queue: filename = os.path.basename(url) dir_path = os.path.join( ROOT_DOWNLOAD_DIR, f"maindir_{maindir_num}", f"subdir_{subdir_num}" ) file_path = os.path.join(dir_path, filename) tasks.append(download_file(session, url, file_path)) await asyncio.gather(*tasks) def setup_driver(): """Configure and return the Chrome WebDriver""" chrome_options = Options() chrome_options.add_argument("--window-size=768x1024") return webdriver.Chrome(options=chrome_options) def handle_bot_check(driver): """Wait for the bot check to complete""" print("Waiting on bot check...") while True: if driver.execute_script("return document.title") == "HOME | CL0P^_- LEAKS": return driver.implicitly_wait(2) def main(): """Main scraping and download coordination function""" driver = setup_driver() driver.get("http://santat7kpllt6iyvqbr7q4amdv6dzrh6paatvyrzl7ry3zm72zigf4ad.onion") handle_bot_check(driver) driver.implicitly_wait(20) download_queue = [] maindir = 0 # Main directory counter try: while True: # Navigate to main directory backto = driver.execute_script( 'return document.body.getElementsByClassName("g-menu-parent-indicator")[1].parentElement.href' ) maindir += 1 driver.get(backto) driver.implicitly_wait(20) # Process subdirectories subitems = driver.execute_script('return document.body.getElementsByClassName("read-more").length') for subdir in range(subitems): driver.get(backto) driver.implicitly_wait(20) driver.execute_script(f'document.body.getElementsByClassName("read-more")[{subdir}].click()') driver.implicitly_wait(20) # Collect download links total_items = driver.execute_script( 'return document.getElementById("g-container-main").getElementsByTagName("a").length' ) for item in range(1, total_items): # Skip first item if needed url = driver.execute_script( f'return document.getElementById("g-container-main").getElementsByTagName("a")[{item}].href' ) download_queue.append((url, maindir, subdir)) print(f"Queued: {url}") finally: driver.quit() print("Starting asynchronous downloads...") asyncio.run(download_all(download_queue)) if __name__ == "__main__": main() ```