Fix typing & linting

pull/119/head
stijndcl 2022-07-18 23:39:14 +02:00
parent c9dd275860
commit f4056d8af6
3 changed files with 20 additions and 8 deletions

View File

@ -1,6 +1,7 @@
from http import HTTPStatus from http import HTTPStatus
import discord import discord
from overrides import overrides
from didier.data.embeds.base import EmbedBaseModel from didier.data.embeds.base import EmbedBaseModel
from didier.data.scrapers.google import SearchData from didier.data.scrapers.google import SearchData
@ -31,6 +32,7 @@ class GoogleSearch(EmbedBaseModel):
return embed return embed
@overrides
def to_embed(self) -> discord.Embed: def to_embed(self) -> discord.Embed:
if not self.data.results or self.data.status_code != HTTPStatus.OK: if not self.data.results or self.data.status_code != HTTPStatus.OK:
return self._error_embed() return self._error_embed()

View File

@ -1,4 +1,5 @@
import http import http
import typing
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import Optional from typing import Optional
from urllib.parse import unquote_plus, urlencode from urllib.parse import unquote_plus, urlencode
@ -28,19 +29,25 @@ def get_result_stats(bs: BeautifulSoup) -> Optional[str]:
Example result: "About 16.570.000 results (0,84 seconds)" Example result: "About 16.570.000 results (0,84 seconds)"
""" """
stats = bs.find("div", id="result-stats").text stats = bs.find("div", id="result-stats")
return stats and stats.removesuffix("\xa0") if stats is None:
return None
return stats.text.removesuffix("\xa0")
def parse_result(element: Tag) -> Optional[str]: def parse_result(element: Tag) -> Optional[str]:
"""Parse 1 wrapper into a link""" """Parse 1 wrapper into a link"""
a_tag = element.find("a", href=True) a_tag = element.find("a", href=True)
url = a_tag["href"] if a_tag is None:
title = a_tag.find("h3") return None
url = a_tag["href"] # type: ignore
title = typing.cast(Tag, a_tag.find("h3"))
if ( if (
url is None url is None
or not url.startswith( or not str(url).startswith(
( (
"http://", "http://",
"https://", "https://",
@ -57,7 +64,8 @@ def parse_result(element: Tag) -> Optional[str]:
def get_search_results(bs: BeautifulSoup) -> list[str]: def get_search_results(bs: BeautifulSoup) -> list[str]:
"""Parse the search results""" """Parse the search results"""
result_wrappers = bs.find_all("div", class_="g") result_wrappers = bs.find_all("div", class_="g")
results = filter(lambda x: x is not None, map(parse_result, result_wrappers))
results: list[str] = list(result for result in map(parse_result, result_wrappers) if result is not None)
# Remove duplicates # Remove duplicates
# (sets don't preserve the order!) # (sets don't preserve the order!)
@ -67,7 +75,8 @@ def get_search_results(bs: BeautifulSoup) -> list[str]:
async def google_search(http_client: ClientSession, query: str): async def google_search(http_client: ClientSession, query: str):
"""Get the first 10 Google search results""" """Get the first 10 Google search results"""
headers = { headers = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36" "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/83.0.4103.97 Safari/537.36"
} }
query = urlencode({"q": query}) query = urlencode({"q": query})
@ -82,4 +91,4 @@ async def google_search(http_client: ClientSession, query: str):
result_stats = get_result_stats(bs) result_stats = get_result_stats(bs)
results = get_search_results(bs) results = get_search_results(bs)
return SearchData(query, 200, results[:10], result_stats) return SearchData(query, 200, results[:10], result_stats or "")

View File

@ -6,6 +6,7 @@ pytest==7.1.2
pytest-asyncio==0.18.3 pytest-asyncio==0.18.3
pytest-env==0.6.2 pytest-env==0.6.2
sqlalchemy2-stubs==0.0.2a23 sqlalchemy2-stubs==0.0.2a23
types-beautifulsoup4==4.11.3
types-pytz==2021.3.8 types-pytz==2021.3.8
# Flake8 + plugins # Flake8 + plugins