mirror of https://github.com/stijndcl/didier
Merge pull request #85 from stijndcl/more_tests
Add test for jpl scraper, fix random import, fix reminderspull/86/head
commit
9561b98f98
|
@ -2,7 +2,7 @@ import discord
|
|||
from discord.ext import commands
|
||||
from decorators import help
|
||||
from enums.help_categories import Category
|
||||
from functions.scraping import google_search
|
||||
from functions.scrapers.google import google_search
|
||||
|
||||
|
||||
class Google(commands.Cog):
|
||||
|
|
|
@ -5,7 +5,7 @@ from enums.numbers import Numbers
|
|||
from functions import timeFormatters
|
||||
from functions.config import config
|
||||
from functions.database import currency, poke, prison, birthdays, stats
|
||||
from functions.scraping import getMatchweek
|
||||
from functions.scrapers.sporza import getMatchweek
|
||||
from functions import ufora_notifications
|
||||
import json
|
||||
import random
|
||||
|
@ -202,6 +202,12 @@ class Tasks(commands.Cog):
|
|||
if (not category["weekends"]) and weekday > 4:
|
||||
continue
|
||||
|
||||
# Create embed once because this can be heavy
|
||||
if "embed" in category:
|
||||
embed = category["embed"]()
|
||||
else:
|
||||
embed = None
|
||||
|
||||
for user in category["users"]:
|
||||
userInstance = self.client.get_user(user)
|
||||
|
||||
|
@ -213,7 +219,7 @@ class Tasks(commands.Cog):
|
|||
if "embed" not in category:
|
||||
await userInstance.send(random.choice(category["messages"]))
|
||||
else:
|
||||
await userInstance.send(random.choice(category["messages"]), embed=category["embed"])
|
||||
await userInstance.send(random.choice(category["messages"]), embed=embed)
|
||||
|
||||
with open("files/lastTasks.json", "w") as fp:
|
||||
lastTasks["remind"] = round(time.time())
|
||||
|
@ -228,6 +234,10 @@ class Tasks(commands.Cog):
|
|||
"""
|
||||
Task that checks the current JPL matchweek & changes the dict value
|
||||
"""
|
||||
# Don't run this when testing
|
||||
if self.client.user.id != int(constants.didierId):
|
||||
return
|
||||
|
||||
matchweek = getMatchweek()
|
||||
|
||||
if matchweek is None:
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from functions import les
|
||||
from data import schedule
|
||||
from functions import les, config
|
||||
from functions.database import remind
|
||||
|
||||
|
||||
|
@ -12,13 +13,11 @@ class Reminders:
|
|||
|
||||
self._les = [int(user[0]) for user in rows if user[2]]
|
||||
self._lesMessages = ["Lessenrooster voor vandaag:"]
|
||||
self.les = {"users": self._les, "messages": self._lesMessages, "embed": self.lesEmbed(), "weekends": False, "disabled": True}
|
||||
self.les = {"users": self._les, "messages": self._lesMessages, "embed": self.lesEmbed, "weekends": False, "disabled": True}
|
||||
|
||||
self.categories = [self.nightly, self.les]
|
||||
|
||||
def lesEmbed(self):
|
||||
day, dayDatetime, semester, year = les.parseArgs([])[1:]
|
||||
|
||||
schedule = les.getSchedule(semester, year)
|
||||
|
||||
return les.createEmbed(day, dayDatetime, semester, year, schedule)
|
||||
dt = les.find_target_date()
|
||||
s = schedule.Schedule(dt, int(config.get("year")), int(config.get("semester")))
|
||||
return s.create_schedule().to_embed()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from enum import Enum
|
||||
from attr import dataclass, field
|
||||
from functions.timeFormatters import fromString
|
||||
from functions.scraping import getJPLMatches, getJPLTable
|
||||
from functions.scrapers.sporza import getJPLMatches, getJPLTable
|
||||
from functions.stringFormatters import leadingZero
|
||||
from datetime import datetime
|
||||
import tabulate
|
||||
|
|
|
@ -3,7 +3,7 @@ from functions.timeFormatters import dateTimeNow, weekdayToInt, forward_to_weekd
|
|||
from typing import Optional
|
||||
|
||||
|
||||
def find_target_date(arg: Optional[str]) -> datetime:
|
||||
def find_target_date(arg: Optional[str] = None) -> datetime:
|
||||
"""
|
||||
Find the requested date out of the user's arguments
|
||||
"""
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
from bs4 import BeautifulSoup
|
||||
from requests import get
|
||||
from urllib.parse import urlencode
|
||||
|
||||
|
||||
def google_search(query):
|
||||
"""
|
||||
Function to get Google search results
|
||||
"""
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'
|
||||
}
|
||||
|
||||
query = urlencode({"q": query})
|
||||
|
||||
# Get 20 results in case some of them are None
|
||||
resp = get("https://www.google.com/search?{}&num=20&hl=en".format(query), headers=headers)
|
||||
|
||||
if resp.status_code != 200:
|
||||
return None, resp.status_code
|
||||
|
||||
bs = BeautifulSoup(resp.text, "html.parser")
|
||||
|
||||
def getContent(element):
|
||||
"""
|
||||
Function to find links & titles in the HTML of a <div> element
|
||||
"""
|
||||
link = element.find("a", href=True)
|
||||
title = element.find("h3")
|
||||
|
||||
if link is None or title is None:
|
||||
return None
|
||||
|
||||
return link["href"], title.text
|
||||
|
||||
divs = bs.find_all("div", attrs={"class": "g"})
|
||||
|
||||
return list(getContent(d) for d in divs), 200
|
|
@ -1,45 +1,6 @@
|
|||
import re
|
||||
|
||||
from requests import get
|
||||
from urllib.parse import urlencode
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# TODO add Football requests in here as well
|
||||
|
||||
|
||||
def google_search(query):
|
||||
"""
|
||||
Function to get Google search results
|
||||
"""
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'
|
||||
}
|
||||
|
||||
query = urlencode({"q": query})
|
||||
|
||||
# Get 20 results in case some of them are None
|
||||
resp = get("https://www.google.com/search?{}&num=20&hl=en".format(query), headers=headers)
|
||||
|
||||
if resp.status_code != 200:
|
||||
return None, resp.status_code
|
||||
|
||||
bs = BeautifulSoup(resp.text, "html.parser")
|
||||
|
||||
def getContent(element):
|
||||
"""
|
||||
Function to find links & titles in the HTML of a <div> element
|
||||
"""
|
||||
link = element.find("a", href=True)
|
||||
title = element.find("h3")
|
||||
|
||||
if link is None or title is None:
|
||||
return None
|
||||
|
||||
return link["href"], title.text
|
||||
|
||||
divs = bs.find_all("div", attrs={"class": "g"})
|
||||
|
||||
return list(getContent(d) for d in divs), 200
|
||||
import re
|
||||
from requests import get
|
||||
|
||||
|
||||
def getMatchweek():
|
|
@ -0,0 +1,13 @@
|
|||
from functions.scrapers import sporza
|
||||
import unittest
|
||||
|
||||
|
||||
class TestSporzaScraper(unittest.TestCase):
|
||||
def test_find_matchweek(self):
|
||||
"""
|
||||
This tests if the structure of the HTML is still what we expect it to be,
|
||||
as Sporza changes it from time to time.
|
||||
"""
|
||||
# This will throw an error if the argument was not a proper integer
|
||||
week = int(sporza.getMatchweek())
|
||||
self.assertGreater(week, 0)
|
Loading…
Reference in New Issue