import requests
from bs4 import BeautifulSoup
import csv
import time
def parse_draw(draw_number):
url = f"https://lotocafe.ru/archive-russkoe-loto/{draw_number}-tirazh"
response = requests.get(url)
if response.status_code != 200:
return None
soup = BeautifulSoup(response.text, "html.parser")
date_elem = soup.find("h1")
date_text = date_elem.text.strip() if date_elem else ""
table = soup.find("table")
if not table:
return None
numbers = []
rows = table.find_all("tr")[1:]
for row in rows:
cells = row.find_all("td")
if len(cells) >= 2:
text = cells[1].get_text(strip=True)
if text.isdigit():
numbers.append(int(text))
return {"draw_number": draw_number, "date": date_text, "numbers": numbers}
def main():
start_draw = 1665
count = 100
results = []
for draw in range(start_draw, start_draw - count, -1):
data = parse_draw(draw)
if data:
results.append(data)
time.sleep(1) # Чтобы не перегружать сервер
# Сохранить данные в CSV
with open("lotto_results.csv", "w", newline="", encoding="utf-8") as f:
writer = csv.writer(f)
writer.writerow(["draw_number", "date", "numbers"])
for row in results:
writer.writerow([row["draw_number"], row["date"], ",".join(map(str, row["numbers"]))])
if __name__ == "__main__":
main()