padel/padel/tennis.py

107 lines
3.1 KiB
Python

import requests
from bs4 import BeautifulSoup
from datetime import date, time, timedelta, datetime
import asyncio as aio
BASE_URL = "https://www.tennisvlaanderen.be/terreinreservatie-dagplanning"
def extract_timeslots(tbody, column_headers):
counters = [0] * len(column_headers)
timeslots = []
for tr in tbody.findAll("tr"):
# Determine time for row
start_time = time.fromisoformat(tr.find("th").text)
# Iterate over each column
for td in tr.findAll("td"):
# Find first empty counter
counter_index = next(
(i for i in range(len(counters)) if counters[i] <= 0), None
)
# this means there's no empty counters atm
if counter_index is None:
break
block_length = int(td["rowspan"])
counters[counter_index] += block_length
# By default, a slot is just not available for hire
code = 2
length = timedelta(minutes=15 * block_length)
if td.find("div", class_="reservation-detail free"):
code = 0
elif td.find("div", class_="reservation-detail regular-reservation"):
code = 1
timeslots.append((column_headers[counter_index], code, start_time, length))
counters = [i - 1 for i in counters]
return timeslots
def extract_calendar(soup: BeautifulSoup, reservation_date):
reservation_date = reservation_date or date.today()
reservation_t = soup.find("div", class_="reservation-table")
# Get court names
header_trs_txts = reservation_t.find("thead").find("tr").findAll("th")
court_names = [th.text.strip() for th in header_trs_txts if th.text.strip()]
# the real stuff
tbody = reservation_t.find("tbody")
timeslots = extract_timeslots(tbody, court_names)
# Here, we convert the timeslots to datetime instead of time
return [
(col, status, datetime.combine(reservation_date, start), duration)
for col, status, start, duration in timeslots
]
async def get_time_slots(club_id: int, days=1):
dates = [date.today() + timedelta(days=i) for i in range(days)]
async def get_calendar(date_obj):
r = requests.get(
BASE_URL,
params={"clubId": club_id, "planningDay": date_obj.strftime("%d-%m-%Y")},
)
soup = BeautifulSoup(r.content, "html.parser")
return extract_calendar(soup, date_obj)
output = []
for coro in aio.as_completed([get_calendar(date_obj) for date_obj in dates]):
res = await coro
output.extend(res)
return output
async def get_club_address(club_id: int):
r = requests.get(
BASE_URL,
params={
"clubId": club_id,
"tab": "club",
},
)
soup = BeautifulSoup(r.content, "html.parser")
tab_div = soup.find("div", id="club")
info_ul = tab_div.find("ul")
for li in info_ul.findAll("li"):
if li.find("span", text="Adres (hoofdlocatie)"):
return li.find("span", class_="list-value").text.strip()
return None