Compare commits

...

7 Commits

Author SHA1 Message Date
Stijn De Clercq 016d87bcea
Merge pull request #121 from stijndcl/pre-commit
Add pre-commit hooks
2022-07-19 21:39:39 +02:00
stijndcl bb903fdad5 Update readme & config files 2022-07-19 21:33:18 +02:00
stijndcl e371e2cc5c Add pre-commit config, make tests a requirement for quality checks 2022-07-19 21:12:04 +02:00
Stijn De Clercq 3057222607
Merge pull request #120 from stijndcl/pytest-migrations
Small cleanup in migrations fix
2022-07-19 19:00:57 +02:00
stijndcl 8bd4495016 Small cleanup 2022-07-19 18:57:24 +02:00
Stijn De Clercq 6c225bacc1
Merge pull request #119 from stijndcl/pytest-migrations
Use migrations in tests
2022-07-19 18:53:07 +02:00
stijndcl 9401111bee Try to use migrations in tests 2022-07-19 18:49:22 +02:00
9 changed files with 183 additions and 137 deletions

View File

@ -62,7 +62,7 @@ jobs:
with:
token: ${{ secrets.CODECOV }}
linting:
needs: [dependencies]
needs: [tests]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -82,7 +82,7 @@ jobs:
- name: Linting
run: flake8
typing:
needs: [dependencies]
needs: [tests]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
@ -100,9 +100,9 @@ jobs:
- name: Install dependencies
run: pip3 install -r requirements.txt -r requirements-dev.txt
- name: Typing
run: mypy didier database
run: mypy
formatting:
needs: [dependencies]
needs: [tests]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

View File

@ -0,0 +1,44 @@
default_language_version:
python: python3.9.5
repos:
- repo: https://github.com/ambv/black
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
- id: check-json
- id: end-of-file-fixer
- id: pretty-format-json
- id: trailing-whitespace
- repo: https://github.com/pycqa/isort
rev: 5.10.1
hooks:
- id: isort
- repo: https://github.com/PyCQA/autoflake
rev: v1.4
hooks:
- id: autoflake
name: autoflake (python)
args:
- "--remove-all-unused-imports"
- "--in-place"
- "--ignore-init-module-imports"
- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
hooks:
- id: flake8
additional_dependencies:
- "flake8-bandit"
- "flake8-bugbear"
- "flake8-docstrings"
- "flake8-dunder-all"
- "flake8-eradicate"
- "flake8-isort"
- "flake8-simplify"

View File

@ -1,8 +1,9 @@
import asyncio
from logging.config import fileConfig
from alembic import context
from sqlalchemy.ext.asyncio import AsyncEngine
from alembic import context
from database.engine import engine
from database.models import Base
@ -18,31 +19,6 @@ if config.config_file_name is not None:
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
render_as_batch=True,
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata, render_as_batch=True)
@ -50,22 +26,26 @@ def do_run_migrations(connection):
context.run_migrations()
async def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine
async def run_async_migrations(connectable: AsyncEngine):
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
if context.is_offline_mode():
run_migrations_offline()
else:
asyncio.run(run_migrations_online())
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = context.config.attributes.get("connection", None) or engine
if isinstance(connectable, AsyncEngine):
asyncio.run(run_async_migrations(connectable))
else:
do_run_migrations(connectable)
run_migrations_online()

View File

@ -5,47 +5,52 @@ Revises: b2d511552a1f
Create Date: 2022-06-30 20:02:27.284759
"""
from alembic import op
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '0d03c226d881'
down_revision = 'b2d511552a1f'
revision = "0d03c226d881"
down_revision = "b2d511552a1f"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('user_id', sa.BigInteger(), nullable=False),
sa.PrimaryKeyConstraint('user_id')
op.create_table("users", sa.Column("user_id", sa.BigInteger(), nullable=False), sa.PrimaryKeyConstraint("user_id"))
op.create_table(
"bank",
sa.Column("bank_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("dinks", sa.BigInteger(), server_default="0", nullable=False),
sa.Column("interest_level", sa.Integer(), server_default="1", nullable=False),
sa.Column("capacity_level", sa.Integer(), server_default="1", nullable=False),
sa.Column("rob_level", sa.Integer(), server_default="1", nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("bank_id"),
)
op.create_table('bank',
sa.Column('bank_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.BigInteger(), nullable=True),
sa.Column('dinks', sa.BigInteger(), nullable=False),
sa.Column('interest_level', sa.Integer(), nullable=False),
sa.Column('capacity_level', sa.Integer(), nullable=False),
sa.Column('rob_level', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], ),
sa.PrimaryKeyConstraint('bank_id')
)
op.create_table('nightly_data',
sa.Column('nightly_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.BigInteger(), nullable=True),
sa.Column('last_nightly', sa.DateTime(timezone=True), nullable=True),
sa.Column('count', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], ),
sa.PrimaryKeyConstraint('nightly_id')
op.create_table(
"nightly_data",
sa.Column("nightly_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("last_nightly", sa.DateTime(timezone=True), nullable=True),
sa.Column("count", sa.Integer(), server_default="0", nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("nightly_id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('nightly_data')
op.drop_table('bank')
op.drop_table('users')
op.drop_table("nightly_data")
op.drop_table("bank")
op.drop_table("users")
# ### end Alembic commands ###

View File

@ -7,6 +7,7 @@ from sqlalchemy.orm import sessionmaker
import settings
encoded_password = quote_plus(settings.DB_PASSWORD)
engine = create_async_engine(
URL.create(
drivername="postgresql+asyncpg",

View File

@ -1,16 +1,26 @@
import logging
from alembic import config, script
from sqlalchemy.orm import Session
from alembic import command, script
from alembic.config import Config
from alembic.runtime import migration
from database.engine import engine
__all__ = ["ensure_latest_migration"]
__config_path__ = "alembic.ini"
__migrations_path__ = "alembic/"
cfg = Config(__config_path__)
cfg.set_main_option("script_location", __migrations_path__)
__all__ = ["ensure_latest_migration", "migrate"]
async def ensure_latest_migration():
"""Make sure we are currently on the latest revision, otherwise raise an exception"""
alembic_config = config.Config("alembic.ini")
alembic_script = script.ScriptDirectory.from_config(alembic_config)
alembic_script = script.ScriptDirectory.from_config(cfg)
async with engine.begin() as connection:
current_revision = await connection.run_sync(
@ -25,3 +35,19 @@ async def ensure_latest_migration():
)
logging.error(error_message)
raise RuntimeError(error_message)
def __execute_upgrade(connection: Session):
cfg.attributes["connection"] = connection
command.upgrade(cfg, "head")
def __execute_downgrade(connection: Session):
cfg.attributes["connection"] = connection
command.downgrade(cfg, "base")
async def migrate(up: bool):
"""Migrate the database upwards or downwards"""
async with engine.begin() as connection:
await connection.run_sync(__execute_upgrade if up else __execute_downgrade)

View File

@ -14,7 +14,9 @@ omit = [
"./database/migrations.py",
"./didier/cogs/*",
"./didier/didier.py",
"./didier/data/*",
"./didier/data/constants.py",
"./didier/data/embeds/*",
"./didier/data/flags/*",
"./didier/utils/discord/colours.py",
"./didier/utils/discord/constants.py"
]
@ -23,6 +25,11 @@ omit = [
profile = "black"
[tool.mypy]
files = [
"database/**/*.py",
"didier/**/*.py",
"main.py"
]
plugins = [
"pydantic.mypy",
"sqlalchemy.ext.mypy.plugin"

View File

@ -4,77 +4,54 @@
You bet. The time has come.
### Discord Documentation
## Development
[Link to the Discord API docs](https://discordpy.readthedocs.io/en/latest/index.html). When making a command, make sure to check to docs to see if what you're doing is even possible, and if you're providing the right (amount of) parameters. Ask questions in De Zandbak Discord all you want, but at least make an attempt at looking it up first.
Didier uses `Python 3.9.5`, as specified in the [`.python-version`](.python-version)-file. This file will cause [`pyenv`](https://github.com/pyenv/pyenv) to automatically use the correct version when you're working on Didier.
### Running Didier
```shell
# Installing Python 3.9.5 through pyenv
pyenv install 3.9.5
In order to run Didier, simply run `python3 didier.py` in your terminal, or click `run` in PyCharm (green arrow @ top-right, or right-click the file). Make sure you have installed all the required packages in `requirements.txt`.
# Creating a Virtual Environment and activate it
# PyCharm will automatically activate your venv
python3 -m venv venv
source venv/bin/activate
### Databases
# Installing dependencies + development dependencies
pip3 install -r requirements.txt -r requirements-dev.txt
`databases.md` contains info on every database. Using this file you can find out which tables exist, which columns those tables have, and which types of values those columns contain. This should be enough for you to set up a local Postgresql database in order to mess around with & test functions before committing them (guilty).
### Cog Template
When using PyCharm, you can configure [file templates](https://www.jetbrains.com/help/pycharm/using-file-and-code-templates.html) to create blueprints for files. This speeds up the process of creating `Cogs` as you don't have to write the same exact code every single time.
Below is the Cog template you are expected to use when creating new Cogs for Didier, providing both essential and commonly-used imports & functionality. It's possible that you don't need all of the imports, in which case you can obviously remove the obsolete ones.
```python
from data import constants
import discord
from discord.ext import commands
from decorators import help
from enums.help_categories import Category
from functions import checks
class Cog(commands.Cog):
def __init__(self, client):
self.client = client
# Don't allow any commands to work when locked
def cog_check(self, ctx):
return not self.client.locked
def setup(client):
client.add_cog(Cog(client))
# Installing pre-commit hooks
pre-commit install
```
Replacing the classname `Cog` with the name of your cog.
The database can be managed easily using `Docker Compose`. If you want to, however, you can run a regular PostgreSQL server and connect to that instead.
### Help Categories
A separate database is used in the tests, as it would obviously not be ideal when tests randomly wipe your database.
Didier uses a custom help command, which classifies certain commands into categories. Discord's default help does this based on `Cogs`, but this would mean some Cogs would be thousands of lines long, defeating the purpose of using them in the first place.
```shell
# Starting the database
docker-compose up -d db
When creating a new Didier command, you can add it to a `Category` by adding a decorator above the function. The example below shows how to add a command to the `Currency` category.
```python
from decorators import help
from discord.ext import commands
from enums.help_categories import Category
from functions import checks
@commands.command(name="Command Name", aliases=["Cn"])
@commands.check(checks.allowedChannels)
@help.Category(Category.Currency)
async def command_name(self, ctx):
# Command code
await ctx.send("Command response")
# Starting the database used in tests
docker-compose up -d db-pytest
```
This allows commands across multiple Cogs to be classified under the same category in the help page.
### Commands
### Python Version
Didier uses `Python 3.9.5`, the most recent one as of the writing of this sentence. The reasoning behind this is that I hope it'll last for a while so I don't have to update it all the time.
```shell
# Starting Didier
python3 main.py
### Ignored Files
`ignored.md` contains a list of all ignored files, and what they look like. This way, you can recreate these files locally to test commands that use them. API keys should be stored in `environment variables`. To do so, create a file called `.env` in the root of this repository (which has already been added to `.gitignore`) and make sure the names match.
# Running tests
pytest
### FAQ
`faq.md` is always worth taking a look at when you've got a question. Right now this doesn't contain much, as I don't have any questions for myself, but over time this will be expanded upon.
# Running tests with Coverage
coverage run -m pytest
# Generating code coverage report
coverage html
### Useful Links
- [Embed Visualizer](https://leovoel.github.io/embed-visualizer/): Allows for easy creation of embeds, and can generate the code for them (bottom: "Generate Code" -> "discord.py (Python)"). This is helpful when starting out so you can see what you're doing, when you don't really know how Embeds work yet.
# Running code quality checks
black
flake8
mypy
```

View File

@ -6,11 +6,11 @@ import pytest
from sqlalchemy.ext.asyncio import AsyncSession
from database.engine import engine
from database.models import Base
from database.migrations import ensure_latest_migration, migrate
from didier import Didier
@pytest.fixture(scope="session")
@pytest.fixture(scope="session", autouse=True)
def event_loop() -> Generator:
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
@ -19,9 +19,15 @@ def event_loop() -> Generator:
@pytest.fixture(scope="session")
async def tables():
"""Initialize a database before the tests, and then tear it down again"""
async with engine.begin() as connection:
await connection.run_sync(Base.metadata.create_all)
"""Initialize a database before the tests, and then tear it down again
Checks that the migrations were successful by asserting that we are currently
on the latest migration
"""
await migrate(up=True)
await ensure_latest_migration()
yield
await migrate(up=False)
@pytest.fixture