Merge pull request #131 from stijndcl/schedules

Re-write schedules to be auto-generated
pull/132/head
Stijn De Clercq 2022-09-18 17:50:38 +02:00 committed by GitHub
commit bf28611ddc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 787 additions and 576 deletions

3
.gitignore vendored
View File

@ -157,3 +157,6 @@ cython_debug/
# Debugging files # Debugging files
debug.py debug.py
# Schedule .ics files
/files/schedules/

View File

@ -1,39 +0,0 @@
"""Deadlines
Revision ID: 08d21b2d1a0a
Revises: 3962636f3a3d
Create Date: 2022-08-12 23:44:13.947011
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "08d21b2d1a0a"
down_revision = "3962636f3a3d"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"deadlines",
sa.Column("deadline_id", sa.Integer(), nullable=False),
sa.Column("course_id", sa.Integer(), nullable=True),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("deadline", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(
["course_id"],
["ufora_courses.course_id"],
),
sa.PrimaryKeyConstraint("deadline_id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("deadlines")
# ### end Alembic commands ###

View File

@ -1,56 +0,0 @@
"""Initial currency models
Revision ID: 0d03c226d881
Revises: b2d511552a1f
Create Date: 2022-06-30 20:02:27.284759
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "0d03c226d881"
down_revision = "b2d511552a1f"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table("users", sa.Column("user_id", sa.BigInteger(), nullable=False), sa.PrimaryKeyConstraint("user_id"))
op.create_table(
"bank",
sa.Column("bank_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("dinks", sa.BigInteger(), server_default="0", nullable=False),
sa.Column("interest_level", sa.Integer(), server_default="1", nullable=False),
sa.Column("capacity_level", sa.Integer(), server_default="1", nullable=False),
sa.Column("rob_level", sa.Integer(), server_default="1", nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("bank_id"),
)
op.create_table(
"nightly_data",
sa.Column("nightly_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("last_nightly", sa.Date, nullable=True),
sa.Column("count", sa.Integer(), server_default="0", nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("nightly_id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("nightly_data")
op.drop_table("bank")
op.drop_table("users")
# ### end Alembic commands ###

View File

@ -1,38 +0,0 @@
"""Add birthdays
Revision ID: 1716bfecf684
Revises: 581ae6511b98
Create Date: 2022-07-19 21:46:42.796349
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "1716bfecf684"
down_revision = "581ae6511b98"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"birthdays",
sa.Column("birthday_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("birthday", sa.Date, nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("birthday_id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("birthdays")
# ### end Alembic commands ###

View File

@ -1,36 +0,0 @@
"""Create tasks
Revision ID: 346b408c362a
Revises: 1716bfecf684
Create Date: 2022-07-23 19:41:07.029482
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "346b408c362a"
down_revision = "1716bfecf684"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"tasks",
sa.Column("task_id", sa.Integer(), nullable=False),
sa.Column("task", sa.Enum("BIRTHDAYS", "UFORA_ANNOUNCEMENTS", name="tasktype"), nullable=False),
sa.Column("previous_run", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("task_id"),
sa.UniqueConstraint("task"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("tasks")
sa.Enum("BIRTHDAYS", "UFORA_ANNOUNCEMENTS", name="tasktype").drop(op.get_bind())
# ### end Alembic commands ###

View File

@ -1,37 +0,0 @@
"""Meme templates
Revision ID: 36300b558ef1
Revises: 08d21b2d1a0a
Create Date: 2022-08-25 01:34:22.845955
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "36300b558ef1"
down_revision = "08d21b2d1a0a"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"meme",
sa.Column("meme_id", sa.Integer(), nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("template_id", sa.Integer(), nullable=False),
sa.Column("field_count", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("meme_id"),
sa.UniqueConstraint("name"),
sa.UniqueConstraint("template_id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("meme")
# ### end Alembic commands ###

View File

@ -1,63 +0,0 @@
"""Wordle
Revision ID: 38b7c29f10ee
Revises: 36300b558ef1
Create Date: 2022-08-29 20:21:02.413631
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "38b7c29f10ee"
down_revision = "36300b558ef1"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"wordle_word",
sa.Column("word_id", sa.Integer(), nullable=False),
sa.Column("word", sa.Text(), nullable=False),
sa.Column("day", sa.Date(), nullable=False),
sa.PrimaryKeyConstraint("word_id"),
sa.UniqueConstraint("day"),
)
op.create_table(
"wordle_guesses",
sa.Column("wordle_guess_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("guess", sa.Text(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("wordle_guess_id"),
)
op.create_table(
"wordle_stats",
sa.Column("wordle_stats_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("last_win", sa.Date(), nullable=True),
sa.Column("games", sa.Integer(), server_default="0", nullable=False),
sa.Column("wins", sa.Integer(), server_default="0", nullable=False),
sa.Column("current_streak", sa.Integer(), server_default="0", nullable=False),
sa.Column("highest_streak", sa.Integer(), server_default="0", nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("wordle_stats_id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("wordle_stats")
op.drop_table("wordle_guesses")
op.drop_table("wordle_word")
# ### end Alembic commands ###

View File

@ -1,35 +0,0 @@
"""Add custom links
Revision ID: 3962636f3a3d
Revises: 346b408c362a
Create Date: 2022-08-10 00:54:05.668255
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "3962636f3a3d"
down_revision = "346b408c362a"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"links",
sa.Column("link_id", sa.Integer(), nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("url", sa.Text(), nullable=False),
sa.PrimaryKeyConstraint("link_id"),
sa.UniqueConstraint("name"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("links")
# ### end Alembic commands ###

View File

@ -1,63 +0,0 @@
"""Initial migration
Revision ID: 4ec79dd5b191
Revises:
Create Date: 2022-06-19 00:31:58.384360
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "4ec79dd5b191"
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"ufora_courses",
sa.Column("course_id", sa.Integer(), nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("code", sa.Text(), nullable=False),
sa.Column("year", sa.Integer(), nullable=False),
sa.Column("log_announcements", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("course_id"),
sa.UniqueConstraint("code"),
sa.UniqueConstraint("name"),
)
op.create_table(
"ufora_announcements",
sa.Column("announcement_id", sa.Integer(), nullable=False),
sa.Column("course_id", sa.Integer(), nullable=True),
sa.Column("publication_date", sa.Date, nullable=True),
sa.ForeignKeyConstraint(
["course_id"],
["ufora_courses.course_id"],
),
sa.PrimaryKeyConstraint("announcement_id"),
)
op.create_table(
"ufora_course_aliases",
sa.Column("alias_id", sa.Integer(), nullable=False),
sa.Column("alias", sa.Text(), nullable=False),
sa.Column("course_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["course_id"],
["ufora_courses.course_id"],
),
sa.PrimaryKeyConstraint("alias_id"),
sa.UniqueConstraint("alias"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("ufora_course_aliases")
op.drop_table("ufora_announcements")
op.drop_table("ufora_courses")
# ### end Alembic commands ###

View File

@ -0,0 +1,246 @@
"""Initial migration
Revision ID: 515dc3f52c6d
Revises:
Create Date: 2022-09-18 00:30:56.348634
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "515dc3f52c6d"
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"custom_commands",
sa.Column("command_id", sa.Integer(), nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("indexed_name", sa.Text(), nullable=False),
sa.Column("response", sa.Text(), nullable=False),
sa.PrimaryKeyConstraint("command_id"),
sa.UniqueConstraint("name"),
)
with op.batch_alter_table("custom_commands", schema=None) as batch_op:
batch_op.create_index(batch_op.f("ix_custom_commands_indexed_name"), ["indexed_name"], unique=False)
op.create_table(
"dad_jokes",
sa.Column("dad_joke_id", sa.Integer(), nullable=False),
sa.Column("joke", sa.Text(), nullable=False),
sa.PrimaryKeyConstraint("dad_joke_id"),
)
op.create_table(
"links",
sa.Column("link_id", sa.Integer(), nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("url", sa.Text(), nullable=False),
sa.PrimaryKeyConstraint("link_id"),
sa.UniqueConstraint("name"),
)
op.create_table(
"meme",
sa.Column("meme_id", sa.Integer(), nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("template_id", sa.Integer(), nullable=False),
sa.Column("field_count", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("meme_id"),
sa.UniqueConstraint("name"),
sa.UniqueConstraint("template_id"),
)
op.create_table(
"tasks",
sa.Column("task_id", sa.Integer(), nullable=False),
sa.Column("task", sa.Enum("BIRTHDAYS", "SCHEDULES", "UFORA_ANNOUNCEMENTS", name="tasktype"), nullable=False),
sa.Column("previous_run", sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint("task_id"),
sa.UniqueConstraint("task"),
)
op.create_table(
"ufora_courses",
sa.Column("course_id", sa.Integer(), nullable=False),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("code", sa.Text(), nullable=False),
sa.Column("year", sa.Integer(), nullable=False),
sa.Column("compulsory", sa.Boolean(), server_default="1", nullable=False),
sa.Column("role_id", sa.BigInteger(), nullable=True),
sa.Column("overarching_role_id", sa.BigInteger(), nullable=True),
sa.Column("log_announcements", sa.Boolean(), server_default="0", nullable=False),
sa.PrimaryKeyConstraint("course_id"),
sa.UniqueConstraint("code"),
sa.UniqueConstraint("name"),
)
op.create_table("users", sa.Column("user_id", sa.BigInteger(), nullable=False), sa.PrimaryKeyConstraint("user_id"))
op.create_table(
"wordle_word",
sa.Column("word_id", sa.Integer(), nullable=False),
sa.Column("word", sa.Text(), nullable=False),
sa.Column("day", sa.Date(), nullable=False),
sa.PrimaryKeyConstraint("word_id"),
sa.UniqueConstraint("day"),
)
op.create_table(
"bank",
sa.Column("bank_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("dinks", sa.BigInteger(), server_default="0", nullable=False),
sa.Column("invested", sa.BigInteger(), server_default="0", nullable=False),
sa.Column("interest_level", sa.Integer(), server_default="1", nullable=False),
sa.Column("capacity_level", sa.Integer(), server_default="1", nullable=False),
sa.Column("rob_level", sa.Integer(), server_default="1", nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("bank_id"),
)
op.create_table(
"birthdays",
sa.Column("birthday_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("birthday", sa.Date(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("birthday_id"),
)
op.create_table(
"bookmarks",
sa.Column("bookmark_id", sa.Integer(), nullable=False),
sa.Column("label", sa.Text(), nullable=False),
sa.Column("jump_url", sa.Text(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("bookmark_id"),
sa.UniqueConstraint("user_id", "label"),
)
op.create_table(
"custom_command_aliases",
sa.Column("alias_id", sa.Integer(), nullable=False),
sa.Column("alias", sa.Text(), nullable=False),
sa.Column("indexed_alias", sa.Text(), nullable=False),
sa.Column("command_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["command_id"],
["custom_commands.command_id"],
),
sa.PrimaryKeyConstraint("alias_id"),
sa.UniqueConstraint("alias"),
)
with op.batch_alter_table("custom_command_aliases", schema=None) as batch_op:
batch_op.create_index(batch_op.f("ix_custom_command_aliases_indexed_alias"), ["indexed_alias"], unique=False)
op.create_table(
"deadlines",
sa.Column("deadline_id", sa.Integer(), nullable=False),
sa.Column("course_id", sa.Integer(), nullable=True),
sa.Column("name", sa.Text(), nullable=False),
sa.Column("deadline", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(
["course_id"],
["ufora_courses.course_id"],
),
sa.PrimaryKeyConstraint("deadline_id"),
)
op.create_table(
"nightly_data",
sa.Column("nightly_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("last_nightly", sa.Date(), nullable=True),
sa.Column("count", sa.Integer(), server_default="0", nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("nightly_id"),
)
op.create_table(
"ufora_announcements",
sa.Column("announcement_id", sa.Integer(), nullable=False),
sa.Column("course_id", sa.Integer(), nullable=True),
sa.Column("publication_date", sa.Date(), nullable=True),
sa.ForeignKeyConstraint(
["course_id"],
["ufora_courses.course_id"],
),
sa.PrimaryKeyConstraint("announcement_id"),
)
op.create_table(
"ufora_course_aliases",
sa.Column("alias_id", sa.Integer(), nullable=False),
sa.Column("alias", sa.Text(), nullable=False),
sa.Column("course_id", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["course_id"],
["ufora_courses.course_id"],
),
sa.PrimaryKeyConstraint("alias_id"),
sa.UniqueConstraint("alias"),
)
op.create_table(
"wordle_guesses",
sa.Column("wordle_guess_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("guess", sa.Text(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("wordle_guess_id"),
)
op.create_table(
"wordle_stats",
sa.Column("wordle_stats_id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.Column("last_win", sa.Date(), nullable=True),
sa.Column("games", sa.Integer(), server_default="0", nullable=False),
sa.Column("wins", sa.Integer(), server_default="0", nullable=False),
sa.Column("current_streak", sa.Integer(), server_default="0", nullable=False),
sa.Column("highest_streak", sa.Integer(), server_default="0", nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("wordle_stats_id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("wordle_stats")
op.drop_table("wordle_guesses")
op.drop_table("ufora_course_aliases")
op.drop_table("ufora_announcements")
op.drop_table("nightly_data")
op.drop_table("deadlines")
with op.batch_alter_table("custom_command_aliases", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_custom_command_aliases_indexed_alias"))
op.drop_table("custom_command_aliases")
op.drop_table("bookmarks")
op.drop_table("birthdays")
op.drop_table("bank")
op.drop_table("wordle_word")
op.drop_table("users")
op.drop_table("ufora_courses")
op.drop_table("tasks")
op.drop_table("meme")
op.drop_table("links")
op.drop_table("dad_jokes")
with op.batch_alter_table("custom_commands", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_custom_commands_indexed_name"))
op.drop_table("custom_commands")
sa.Enum("BIRTHDAYS", "SCHEDULES", "UFORA_ANNOUNCEMENTS", name="tasktype").drop(op.get_bind())
# ### end Alembic commands ###

View File

@ -1,33 +0,0 @@
"""Add dad jokes
Revision ID: 581ae6511b98
Revises: 632b69cdadde
Create Date: 2022-07-15 23:37:08.147611
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "581ae6511b98"
down_revision = "632b69cdadde"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"dad_jokes",
sa.Column("dad_joke_id", sa.Integer(), nullable=False),
sa.Column("joke", sa.Text(), nullable=False),
sa.PrimaryKeyConstraint("dad_joke_id"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("dad_jokes")
# ### end Alembic commands ###

View File

@ -1,28 +0,0 @@
"""Add missing defaults
Revision ID: 632b69cdadde
Revises: 8c4ad0a1d699
Create Date: 2022-07-03 16:29:07.387011
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '632b69cdadde'
down_revision = '8c4ad0a1d699'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@ -1,32 +0,0 @@
"""Move dinks over to Bank & add invested amount
Revision ID: 8c4ad0a1d699
Revises: 0d03c226d881
Create Date: 2022-07-03 16:27:11.330746
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8c4ad0a1d699'
down_revision = '0d03c226d881'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('bank', schema=None) as batch_op:
batch_op.add_column(sa.Column('invested', sa.BigInteger(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('bank', schema=None) as batch_op:
batch_op.drop_column('invested')
# ### end Alembic commands ###

View File

@ -1,57 +0,0 @@
"""Add custom commands
Revision ID: b2d511552a1f
Revises: 4ec79dd5b191
Create Date: 2022-06-21 22:10:05.590846
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b2d511552a1f'
down_revision = '4ec79dd5b191'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('custom_commands',
sa.Column('command_id', sa.Integer(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.Column('indexed_name', sa.Text(), nullable=False),
sa.Column('response', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('command_id'),
sa.UniqueConstraint('name')
)
with op.batch_alter_table('custom_commands', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_custom_commands_indexed_name'), ['indexed_name'], unique=False)
op.create_table('custom_command_aliases',
sa.Column('alias_id', sa.Integer(), nullable=False),
sa.Column('alias', sa.Text(), nullable=False),
sa.Column('indexed_alias', sa.Text(), nullable=False),
sa.Column('command_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['command_id'], ['custom_commands.command_id'], ),
sa.PrimaryKeyConstraint('alias_id'),
sa.UniqueConstraint('alias')
)
with op.batch_alter_table('custom_command_aliases', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_custom_command_aliases_indexed_alias'), ['indexed_alias'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('custom_command_aliases', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_custom_command_aliases_indexed_alias'))
op.drop_table('custom_command_aliases')
with op.batch_alter_table('custom_commands', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_custom_commands_indexed_name'))
op.drop_table('custom_commands')
# ### end Alembic commands ###

View File

@ -1,40 +0,0 @@
"""Bookmarks
Revision ID: f5da771a155d
Revises: 38b7c29f10ee
Create Date: 2022-08-30 01:08:54.323883
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "f5da771a155d"
down_revision = "38b7c29f10ee"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"bookmarks",
sa.Column("bookmark_id", sa.Integer(), nullable=False),
sa.Column("label", sa.Text(), nullable=False),
sa.Column("jump_url", sa.Text(), nullable=False),
sa.Column("user_id", sa.BigInteger(), nullable=True),
sa.ForeignKeyConstraint(
["user_id"],
["users.user_id"],
),
sa.PrimaryKeyConstraint("bookmark_id"),
sa.UniqueConstraint("user_id", "label"),
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("bookmarks")
# ### end Alembic commands ###

View File

@ -5,7 +5,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from database.schemas import UforaCourse, UforaCourseAlias from database.schemas import UforaCourse, UforaCourseAlias
__all__ = ["get_all_courses", "get_course_by_name"] __all__ = ["get_all_courses", "get_course_by_code", "get_course_by_name"]
async def get_all_courses(session: AsyncSession) -> list[UforaCourse]: async def get_all_courses(session: AsyncSession) -> list[UforaCourse]:
@ -14,6 +14,12 @@ async def get_all_courses(session: AsyncSession) -> list[UforaCourse]:
return list((await session.execute(statement)).scalars().all()) return list((await session.execute(statement)).scalars().all())
async def get_course_by_code(session: AsyncSession, code: str) -> Optional[UforaCourse]:
"""Try to find a course by its code"""
statement = select(UforaCourse).where(UforaCourse.code == code)
return (await session.execute(statement)).scalar_one_or_none()
async def get_course_by_name(session: AsyncSession, query: str) -> Optional[UforaCourse]: async def get_course_by_name(session: AsyncSession, query: str) -> Optional[UforaCourse]:
"""Try to find a course by its name """Try to find a course by its name

View File

@ -10,4 +10,5 @@ class TaskType(enum.IntEnum):
"""Enum for the different types of tasks""" """Enum for the different types of tasks"""
BIRTHDAYS = enum.auto() BIRTHDAYS = enum.auto()
SCHEDULES = enum.auto()
UFORA_ANNOUNCEMENTS = enum.auto() UFORA_ANNOUNCEMENTS = enum.auto()

View File

@ -197,6 +197,9 @@ class UforaCourse(Base):
name: str = Column(Text, nullable=False, unique=True) name: str = Column(Text, nullable=False, unique=True)
code: str = Column(Text, nullable=False, unique=True) code: str = Column(Text, nullable=False, unique=True)
year: int = Column(Integer, nullable=False) year: int = Column(Integer, nullable=False)
compulsory: bool = Column(Boolean, server_default="1", nullable=False)
role_id: Optional[int] = Column(BigInteger, nullable=True, unique=False)
overarching_role_id: Optional[int] = Column(BigInteger, nullable=True, unique=False)
log_announcements: bool = Column(Boolean, server_default="0", nullable=False) log_announcements: bool = Column(Boolean, server_default="0", nullable=False)
announcements: list[UforaAnnouncement] = relationship( announcements: list[UforaAnnouncement] = relationship(

View File

View File

@ -0,0 +1,23 @@
from sqlalchemy.ext.asyncio import AsyncSession
from database.engine import DBSession
from database.schemas import UforaCourse
__all__ = ["main"]
async def main():
"""Example script: add a Ufora course"""
session: AsyncSession
async with DBSession() as session:
modsim = UforaCourse(
course_id=439235,
code="C003786",
name="Modelleren en Simuleren",
year=3,
compulsory=False,
role_id=785577582561067028,
)
session.add_all([modsim])
await session.commit()

View File

@ -53,6 +53,15 @@ class Owner(commands.Cog):
"""Raise an exception for debugging purposes""" """Raise an exception for debugging purposes"""
raise Exception(message) raise Exception(message)
@commands.command(name="Reload")
async def reload(self, ctx: commands.Context, *cogs: str):
"""Reload the cogs passed as an argument"""
for cog in cogs:
await self.client.reload_extension(f"didier.cogs.{cog}")
await self.client.confirm_message(ctx.message)
return await ctx.reply(f"Successfully reloaded {', '.join(cogs)}.", mention_author=False)
@commands.command(name="Sync") @commands.command(name="Sync")
async def sync( async def sync(
self, self,

View File

@ -11,9 +11,12 @@ from didier import Didier
from didier.data.apis.hydra import fetch_menu from didier.data.apis.hydra import fetch_menu
from didier.data.embeds.deadlines import Deadlines from didier.data.embeds.deadlines import Deadlines
from didier.data.embeds.hydra import no_menu_found from didier.data.embeds.hydra import no_menu_found
from didier.exceptions import HTTPException from didier.data.embeds.schedules import Schedule, get_schedule_for_user
from didier.exceptions import HTTPException, NotInMainGuildException
from didier.utils.discord.converters.time import DateTransformer from didier.utils.discord.converters.time import DateTransformer
from didier.utils.discord.flags.school import StudyGuideFlags from didier.utils.discord.flags.school import StudyGuideFlags
from didier.utils.discord.users import to_main_guild_member
from didier.utils.types.datetime import skip_weekends
class School(commands.Cog): class School(commands.Cog):
@ -33,6 +36,30 @@ class School(commands.Cog):
embed = Deadlines(deadlines).to_embed() embed = Deadlines(deadlines).to_embed()
await ctx.reply(embed=embed, mention_author=False, ephemeral=False) await ctx.reply(embed=embed, mention_author=False, ephemeral=False)
@commands.hybrid_command(
name="les", description="Show your personalized schedule for a given day.", aliases=["Sched", "Schedule"]
)
@app_commands.rename(day_dt="date")
async def les(self, ctx: commands.Context, day_dt: Optional[app_commands.Transform[date, DateTransformer]] = None):
"""Show your personalized schedule for a given day."""
if day_dt is None:
day_dt = date.today()
day_dt = skip_weekends(day_dt)
async with ctx.typing():
try:
member_instance = to_main_guild_member(self.client, ctx.author)
# Always make sure there is at least one schedule in case it returns None
# this allows proper error messages
schedule = get_schedule_for_user(self.client, member_instance, day_dt) or Schedule()
return await ctx.reply(embed=schedule.to_embed(day=day_dt), mention_author=False)
except NotInMainGuildException:
return await ctx.reply(f"You are not a member of {self.client.main_guild.name}.", mention_author=False)
@commands.hybrid_command( @commands.hybrid_command(
name="menu", name="menu",
description="Show the menu in the Ghent University restaurants.", description="Show the menu in the Ghent University restaurants.",

View File

@ -11,6 +11,7 @@ from database.crud.birthdays import get_birthdays_on_day
from database.crud.ufora_announcements import remove_old_announcements from database.crud.ufora_announcements import remove_old_announcements
from database.crud.wordle import set_daily_word from database.crud.wordle import set_daily_word
from didier import Didier from didier import Didier
from didier.data.embeds.schedules import Schedule, parse_schedule_from_content
from didier.data.embeds.ufora.announcements import fetch_ufora_announcements from didier.data.embeds.ufora.announcements import fetch_ufora_announcements
from didier.decorators.tasks import timed_task from didier.decorators.tasks import timed_task
from didier.utils.discord.checks import is_owner from didier.utils.discord.checks import is_owner
@ -41,6 +42,7 @@ class Tasks(commands.Cog):
self._tasks = { self._tasks = {
"birthdays": self.check_birthdays, "birthdays": self.check_birthdays,
"schedules": self.pull_schedules,
"ufora": self.pull_ufora_announcements, "ufora": self.pull_ufora_announcements,
"remove_ufora": self.remove_old_ufora_announcements, "remove_ufora": self.remove_old_ufora_announcements,
"wordle": self.reset_wordle_word, "wordle": self.reset_wordle_word,
@ -59,6 +61,7 @@ class Tasks(commands.Cog):
# Start other tasks # Start other tasks
self.reset_wordle_word.start() self.reset_wordle_word.start()
self.pull_schedules.start()
@overrides @overrides
def cog_unload(self) -> None: def cog_unload(self) -> None:
@ -110,6 +113,44 @@ class Tasks(commands.Cog):
async def _before_check_birthdays(self): async def _before_check_birthdays(self):
await self.client.wait_until_ready() await self.client.wait_until_ready()
@tasks.loop(time=DAILY_RESET_TIME)
@timed_task(enums.TaskType.SCHEDULES)
async def pull_schedules(self, **kwargs):
"""Task that pulls the schedules & saves the files locally
Schedules are then parsed & cached in memory
"""
_ = kwargs
new_schedules: dict[settings.ScheduleType, Schedule] = {}
async with self.client.postgres_session as session:
for data in settings.SCHEDULE_DATA:
if data.schedule_url is None:
return
async with self.client.http_session.get(data.schedule_url) as response:
# If a schedule couldn't be fetched, log it and move on
if response.status != 200:
await self.client.log_warning(
f"Unable to fetch schedule {data.name} (status {response.status}).", log_to_discord=False
)
continue
# Write the content to a file
content = await response.text()
with open(f"files/schedules/{data.name}.ics", "w+") as fp:
fp.write(content)
schedule = await parse_schedule_from_content(content, database_session=session)
if schedule is None:
continue
new_schedules[data.name] = schedule
# Only replace cached version if all schedules succeeded
self.client.schedules = new_schedules
@tasks.loop(minutes=10) @tasks.loop(minutes=10)
@timed_task(enums.TaskType.UFORA_ANNOUNCEMENTS) @timed_task(enums.TaskType.UFORA_ANNOUNCEMENTS)
async def pull_ufora_announcements(self, **kwargs): async def pull_ufora_announcements(self, **kwargs):

View File

@ -1,6 +1,6 @@
# The year in which we were in 1Ba
import settings import settings
# The year in which we were in 1Ba
FIRST_YEAR = 2019 FIRST_YEAR = 2019
# Year to use when adding the current year of our education # Year to use when adding the current year of our education
# to find the academic year # to find the academic year

View File

@ -0,0 +1,220 @@
from __future__ import annotations
import pathlib
import re
from dataclasses import dataclass, field
from datetime import date, datetime
from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from didier import Didier
import discord
from ics import Calendar
from overrides import overrides
from sqlalchemy.ext.asyncio import AsyncSession
from database.crud.ufora_courses import get_course_by_code
from database.schemas import UforaCourse
from didier.data.embeds.base import EmbedBaseModel
from didier.utils.discord import colours
from didier.utils.types.datetime import LOCAL_TIMEZONE, int_to_weekday, time_string
from didier.utils.types.string import leading
from settings import ScheduleType
__all__ = ["Schedule", "get_schedule_for_user", "parse_schedule_from_content", "parse_schedule"]
@dataclass
class Schedule(EmbedBaseModel):
"""An entire schedule"""
slots: set[ScheduleSlot] = field(default_factory=set)
def __add__(self, other) -> Schedule:
"""Combine schedules using the + operator"""
if not isinstance(other, Schedule):
raise TypeError("Argument to __add__ must be a Schedule")
return Schedule(slots=self.slots.union(other.slots))
def __bool__(self) -> bool:
"""Make empty schedules falsy"""
return bool(self.slots)
def on_day(self, day: date) -> Schedule:
"""Only show courses on a given day"""
return Schedule(set(filter(lambda slot: slot.start_time.date() == day, self.slots)))
def personalize(self, roles: set[int]) -> Schedule:
"""Personalize a schedule for a user, only adding courses they follow"""
personal_slots = set()
for slot in self.slots:
role_found = slot.role_id is not None and slot.role_id in roles
overarching_role_found = slot.overarching_role_id is not None and slot.overarching_role_id in roles
if role_found or overarching_role_found:
personal_slots.add(slot)
return Schedule(personal_slots)
@overrides
def to_embed(self, **kwargs) -> discord.Embed:
day: date = kwargs.get("day", date.today())
day_str = f"{leading('0', str(day.day))}/{leading('0', str(day.month))}/{leading('0', str(day.year))}"
embed = discord.Embed(title=f"Schedule - {int_to_weekday(day.weekday())} {day_str}")
if self:
embed.colour = colours.ghent_university_blue()
else:
embed.colour = colours.error_red()
embed.description = (
"No planned classes found.\n\n"
"In case this doesn't seem right, "
"make sure that you've got the roles of all of courses that you're taking on.\n\n"
"In case it does, enjoy your day off!"
)
return embed
slots_sorted = sorted(list(self.slots), key=lambda k: k.start_time)
description_data = []
for slot in slots_sorted:
description_data.append(
f"{time_string(slot.start_time)} - {time_string(slot.end_time)}: {slot.course.name} "
f"in **{slot.location}**"
)
embed.description = "\n".join(description_data)
return embed
@dataclass
class ScheduleSlot:
"""A slot in the schedule"""
course: UforaCourse
start_time: datetime
end_time: datetime
location: str
_hash: int = field(init=False)
def __post_init__(self):
"""Fix some properties to display more nicely"""
# Re-format the location data
room, building, campus = re.search(r"(.*)\. Gebouw (.*)\. Campus (.*)\. ", self.location).groups()
room = room.replace("PC / laptoplokaal ", "PC-lokaal")
self.location = f"{campus} {building} {room}"
self._hash = hash(f"{self.course.course_id} {str(self.start_time)}")
@property
def overarching_role_id(self) -> Optional[int]:
"""Shortcut to getting the overarching role id for this slot"""
return self.course.overarching_role_id
@property
def role_id(self) -> Optional[int]:
"""Shortcut to getting the role id for this slot"""
return self.course.role_id
@overrides
def __hash__(self) -> int:
return self._hash
@overrides
def __eq__(self, other):
if not isinstance(other, ScheduleSlot):
return False
return self._hash == other._hash
def get_schedule_for_user(client: Didier, member: discord.Member, day_dt: date) -> Optional[Schedule]:
"""Get a user's schedule"""
roles: set[int] = {role.id for role in member.roles}
main_schedule: Optional[Schedule] = None
for schedule in client.schedules.values():
personalized_schedule = schedule.on_day(day_dt).personalize(roles)
if not personalized_schedule:
continue
# Add the personalized one to the current main schedule
if main_schedule is None:
main_schedule = personalized_schedule
else:
main_schedule = main_schedule + personalized_schedule
return main_schedule
def parse_course_code(summary: str) -> str:
"""Parse a course's code out of the summary"""
code = re.search(r"^([^ ]+)\. ", summary)
if code is None:
return summary
code_group = code.groups()[0]
# Strip off last character as it's not relevant
if code_group[-1].isalpha():
return code_group[:-1]
return code_group
def parse_time_string(string: str) -> datetime:
"""Parse an ISO string to a timezone-aware datetime instance"""
return datetime.fromisoformat(string).astimezone(LOCAL_TIMEZONE)
async def parse_schedule_from_content(content: str, *, database_session: AsyncSession) -> Schedule:
"""Parse a schedule file, taking the file content as an argument
This can be used to avoid unnecessarily opening the file again if you already have its contents
"""
calendar = Calendar(content)
events = list(calendar.events)
course_codes: dict[str, UforaCourse] = {}
slots: set[ScheduleSlot] = set()
for event in events:
code = parse_course_code(event.name)
if code not in course_codes:
course = await get_course_by_code(database_session, code)
if course is None:
# raise ValueError(f"Unable to find course with code {code} (event {event.name})") # noqa: E800
continue # TODO uncomment the line above after all courses have been added
course_codes[code] = course
# Overwrite the name to be the sanitized value
event.name = code
slot = ScheduleSlot(
course=course_codes[code],
start_time=parse_time_string(str(event.begin)),
end_time=parse_time_string(str(event.end)),
location=event.location,
)
slots.add(slot)
return Schedule(slots=slots)
async def parse_schedule(name: ScheduleType, *, database_session: AsyncSession) -> Optional[Schedule]:
"""Read and then parse a schedule file"""
schedule_path = pathlib.Path(f"files/schedules/{name}.ics")
if not schedule_path.exists():
return None
with open(schedule_path, "r", encoding="utf-8") as fp:
return await parse_schedule_from_content(fp.read(), database_session=database_session)

View File

@ -1,5 +1,7 @@
import logging import logging
import os import os
import pathlib
from functools import cached_property
import discord import discord
from aiohttp import ClientSession from aiohttp import ClientSession
@ -12,6 +14,7 @@ from database.crud import custom_commands
from database.engine import DBSession from database.engine import DBSession
from database.utils.caches import CacheManager from database.utils.caches import CacheManager
from didier.data.embeds.error_embed import create_error_embed from didier.data.embeds.error_embed import create_error_embed
from didier.data.embeds.schedules import Schedule, parse_schedule
from didier.exceptions import HTTPException, NoMatch from didier.exceptions import HTTPException, NoMatch
from didier.utils.discord.prefix import get_prefix from didier.utils.discord.prefix import get_prefix
@ -28,6 +31,7 @@ class Didier(commands.Bot):
error_channel: discord.abc.Messageable error_channel: discord.abc.Messageable
initial_extensions: tuple[str, ...] = () initial_extensions: tuple[str, ...] = ()
http_session: ClientSession http_session: ClientSession
schedules: dict[settings.ScheduleType, Schedule] = {}
wordle_words: set[str] = set() wordle_words: set[str] = set()
def __init__(self): def __init__(self):
@ -49,6 +53,11 @@ class Didier(commands.Bot):
self.tree.on_error = self.on_app_command_error self.tree.on_error = self.on_app_command_error
@cached_property
def main_guild(self) -> discord.Guild:
"""Obtain a reference to the main guild"""
return self.get_guild(settings.DISCORD_MAIN_GUILD)
@property @property
def postgres_session(self) -> AsyncSession: def postgres_session(self) -> AsyncSession:
"""Obtain a session for the PostgreSQL database""" """Obtain a session for the PostgreSQL database"""
@ -59,6 +68,12 @@ class Didier(commands.Bot):
This hook is called once the bot is initialised This hook is called once the bot is initialised
""" """
# Create directories that are ignored on GitHub
self._create_ignored_directories()
# Load schedules
await self.load_schedules()
# Load the Wordle dictionary # Load the Wordle dictionary
self._load_wordle_words() self._load_wordle_words()
@ -67,19 +82,26 @@ class Didier(commands.Bot):
async with self.postgres_session as session: async with self.postgres_session as session:
await self.database_caches.initialize_caches(session) await self.database_caches.initialize_caches(session)
# Create aiohttp session
self.http_session = ClientSession()
# Load extensions # Load extensions
await self._load_initial_extensions() await self._load_initial_extensions()
await self._load_directory_extensions("didier/cogs") await self._load_directory_extensions("didier/cogs")
# Create aiohttp session
self.http_session = ClientSession()
# Configure channel to send errors to # Configure channel to send errors to
if settings.ERRORS_CHANNEL is not None: if settings.ERRORS_CHANNEL is not None:
self.error_channel = self.get_channel(settings.ERRORS_CHANNEL) self.error_channel = self.get_channel(settings.ERRORS_CHANNEL)
else: else:
self.error_channel = self.get_user(self.owner_id) self.error_channel = self.get_user(self.owner_id)
def _create_ignored_directories(self):
"""Create directories that store ignored data"""
ignored = ["files/schedules"]
for directory in ignored:
pathlib.Path(directory).mkdir(exist_ok=True, parents=True)
async def _load_initial_extensions(self): async def _load_initial_extensions(self):
"""Load all extensions that should be loaded before the others""" """Load all extensions that should be loaded before the others"""
for extension in self.initial_extensions: for extension in self.initial_extensions:
@ -109,6 +131,18 @@ class Didier(commands.Bot):
for line in fp: for line in fp:
self.wordle_words.add(line.strip()) self.wordle_words.add(line.strip())
async def load_schedules(self):
"""Parse & load all schedules into memory"""
self.schedules = {}
async with self.postgres_session as session:
for schedule_data in settings.SCHEDULE_DATA:
schedule = await parse_schedule(schedule_data.name, database_session=session)
if schedule is None:
continue
self.schedules[schedule_data.name] = schedule
async def get_reply_target(self, ctx: commands.Context) -> discord.Message: async def get_reply_target(self, ctx: commands.Context) -> discord.Message:
"""Get the target message that should be replied to """Get the target message that should be replied to
@ -138,13 +172,27 @@ class Didier(commands.Bot):
"""Add an X to a message""" """Add an X to a message"""
await message.add_reaction("") await message.add_reaction("")
async def log_error(self, message: str, log_to_discord: bool = True): async def _log(self, level: int, message: str, log_to_discord: bool = True):
"""Send an error message to the logs, and optionally the configured channel""" """Log a message to the logging file, and optionally to the configured channel"""
logger.error(message) methods = {
logging.ERROR: logger.error,
logging.WARNING: logger.warning,
}
methods.get(level, logger.error)(message)
if log_to_discord: if log_to_discord:
# TODO pretty embed # TODO pretty embed
# different colours per level?
await self.error_channel.send(message) await self.error_channel.send(message)
async def log_error(self, message: str, log_to_discord: bool = True):
"""Log an error message"""
await self._log(logging.ERROR, message, log_to_discord)
async def log_warning(self, message: str, log_to_discord: bool = True):
"""Log a warning message"""
await self._log(logging.WARNING, message, log_to_discord)
async def on_ready(self): async def on_ready(self):
"""Event triggered when the bot is ready""" """Event triggered when the bot is ready"""
print(settings.DISCORD_READY_MESSAGE) print(settings.DISCORD_READY_MESSAGE)

View File

@ -1,5 +1,6 @@
from .http_exception import HTTPException from .http_exception import HTTPException
from .missing_env import MissingEnvironmentVariable from .missing_env import MissingEnvironmentVariable
from .no_match import NoMatch, expect from .no_match import NoMatch, expect
from .not_in_main_guild_exception import NotInMainGuildException
__all__ = ["HTTPException", "MissingEnvironmentVariable", "NoMatch", "expect"] __all__ = ["HTTPException", "MissingEnvironmentVariable", "NoMatch", "expect", "NotInMainGuildException"]

View File

@ -0,0 +1,17 @@
from typing import Union
import discord
import settings
__all__ = ["NotInMainGuildException"]
class NotInMainGuildException(ValueError):
"""Exception raised when a user is not a member of the main guild"""
def __init__(self, user: Union[discord.User, discord.Member]):
super().__init__(
f"User {user.display_name} (id {user.id}) "
f"is not a member of the configured main guild (id {settings.DISCORD_MAIN_GUILD})."
)

View File

@ -1,6 +1,10 @@
import discord import discord
__all__ = ["ghent_university_blue", "ghent_university_yellow", "google_blue", "urban_dictionary_green"] __all__ = ["error_red", "ghent_university_blue", "ghent_university_yellow", "google_blue", "urban_dictionary_green"]
def error_red() -> discord.Colour:
return discord.Colour.red()
def ghent_university_blue() -> discord.Colour: def ghent_university_blue() -> discord.Colour:

View File

@ -53,7 +53,7 @@ def date_converter(argument: Optional[str]) -> date:
raise commands.ArgumentParsingError(f"Unable to interpret `{original_argument}` as a date.") raise commands.ArgumentParsingError(f"Unable to interpret `{original_argument}` as a date.")
class DateTransformer(app_commands.Transformer): class DateTransformer(commands.Converter, app_commands.Transformer):
"""Application commands transformer for dates""" """Application commands transformer for dates"""
@overrides @overrides
@ -62,6 +62,10 @@ class DateTransformer(app_commands.Transformer):
) -> list[app_commands.Choice[Union[int, float, str]]]: ) -> list[app_commands.Choice[Union[int, float, str]]]:
return autocomplete_day(str(value)) return autocomplete_day(str(value))
@overrides
async def convert(self, ctx: commands.Context, argument: str) -> datetime.date:
return date_converter(argument)
@overrides @overrides
async def transform(self, interaction: discord.Interaction, value: str) -> datetime.date: async def transform(self, interaction: discord.Interaction, value: str) -> datetime.date:
return date_converter(value) return date_converter(value)

View File

@ -0,0 +1,26 @@
from typing import Union
import discord
from didier import Didier
from didier.exceptions import NotInMainGuildException
__all__ = ["to_main_guild_member"]
def to_main_guild_member(client: Didier, user: Union[discord.User, discord.Member]) -> discord.Member:
"""Turn a discord.User into a discord.Member instance
This assumes the user is in CoC. If not, it raises a NotInMainGuildException
"""
main_guild = client.main_guild
# Already a discord.Member instance
if isinstance(user, discord.Member) and user.guild == main_guild:
return user
member = main_guild.get_member(user.id)
if member is None:
raise NotInMainGuildException(user)
return member

View File

@ -8,9 +8,11 @@ __all__ = [
"forward_to_next_weekday", "forward_to_next_weekday",
"int_to_weekday", "int_to_weekday",
"parse_dm_string", "parse_dm_string",
"skip_weekends",
"str_to_date", "str_to_date",
"str_to_month", "str_to_month",
"str_to_weekday", "str_to_weekday",
"time_string",
"tz_aware_now", "tz_aware_now",
] ]
@ -86,6 +88,12 @@ def parse_dm_string(argument: str) -> datetime.date:
raise ValueError raise ValueError
def skip_weekends(dt_instance: datetime.date) -> datetime.date:
"""Fast-forward a date instance until its weekday is no longer a weekend"""
to_skip = (7 - dt_instance.weekday()) if dt_instance.weekday() > 4 else 0
return dt_instance + datetime.timedelta(days=to_skip)
def str_to_date(date_str: str, formats: Union[list[str], str] = "%d/%m/%Y") -> datetime.date: def str_to_date(date_str: str, formats: Union[list[str], str] = "%d/%m/%Y") -> datetime.date:
"""Turn a string into a DD/MM/YYYY date""" """Turn a string into a DD/MM/YYYY date"""
# Allow passing multiple formats in a list # Allow passing multiple formats in a list
@ -171,6 +179,11 @@ def str_to_weekday(argument: str) -> int:
raise ValueError raise ValueError
def time_string(dt_instance: datetime.datetime) -> str:
"""Get an HH:MM representation of a datetime instance"""
return dt_instance.strftime("%H:%M")
def tz_aware_now() -> datetime.datetime: def tz_aware_now() -> datetime.datetime:
"""Get the current date & time, but timezone-aware""" """Get the current date & time, but timezone-aware"""
return datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).astimezone(LOCAL_TIMEZONE) return datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).astimezone(LOCAL_TIMEZONE)

View File

@ -11,7 +11,10 @@ from didier import Didier
async def run_bot(): async def run_bot():
"""Run Didier""" """Run Didier"""
didier = Didier() didier = Didier()
try:
await didier.start(settings.DISCORD_TOKEN) await didier.start(settings.DISCORD_TOKEN)
finally:
await didier.http_session.close()
def setup_logging(): def setup_logging():

View File

@ -38,7 +38,7 @@ plugins = [
"sqlalchemy.ext.mypy.plugin" "sqlalchemy.ext.mypy.plugin"
] ]
[[tool.mypy.overrides]] [[tool.mypy.overrides]]
module = ["discord.*", "feedparser.*", "markdownify.*", "motor.*"] module = ["discord.*", "feedparser.*", "ics.*", "markdownify.*"]
ignore_missing_imports = true ignore_missing_imports = true
[tool.pytest.ini_options] [tool.pytest.ini_options]
@ -50,7 +50,8 @@ env = [
"POSTGRES_PASS = pytest", "POSTGRES_PASS = pytest",
"POSTGRES_HOST = localhost", "POSTGRES_HOST = localhost",
"POSTGRES_PORT = 5433", "POSTGRES_PORT = 5433",
"DISCORD_TOKEN = token" "DISCORD_TOKEN = token",
"DISCORD_MAIN_GUILD = 123456789101112131415"
] ]
markers = [ markers = [
"postgres: tests that use PostgreSQL" "postgres: tests that use PostgreSQL"

View File

@ -6,6 +6,7 @@ discord.py==2.0.1
git+https://github.com/Rapptz/discord-ext-menus@8686b5d git+https://github.com/Rapptz/discord-ext-menus@8686b5d
environs==9.5.0 environs==9.5.0
feedparser==6.0.10 feedparser==6.0.10
ics==0.7.2
markdownify==0.11.2 markdownify==0.11.2
overrides==6.1.0 overrides==6.1.0
pydantic==1.9.1 pydantic==1.9.1

28
run_db_scripts.py 100644
View File

@ -0,0 +1,28 @@
"""Script to run database-related scripts
This is slightly ugly, but running the scripts directly isn't possible because of imports
This could be cleaned up a bit using importlib but this is safer
"""
import asyncio
import sys
from typing import Callable
from database.scripts.db00_example import main as debug_add_courses
script_mapping: dict[str, Callable] = {"debug_add_courses.py": debug_add_courses}
if __name__ == "__main__":
scripts = sys.argv[1:]
if not scripts:
print("No scripts provided.", file=sys.stderr)
exit(1)
for script in scripts:
script_main = script_mapping.get(script.removeprefix("database/scripts/"), None)
if script_main is None:
print(f'Script "{script}" not found.', file=sys.stderr)
exit(1)
asyncio.run(script_main())
print(f"Successfully ran {script}")

View File

@ -1,3 +1,5 @@
from dataclasses import dataclass
from enum import Enum
from typing import Optional from typing import Optional
from environs import Env from environs import Env
@ -22,10 +24,15 @@ __all__ = [
"DISCORD_BOOS_REACT", "DISCORD_BOOS_REACT",
"DISCORD_CUSTOM_COMMAND_PREFIX", "DISCORD_CUSTOM_COMMAND_PREFIX",
"UFORA_ANNOUNCEMENTS_CHANNEL", "UFORA_ANNOUNCEMENTS_CHANNEL",
"BA3_ROLE",
"UFORA_RSS_TOKEN", "UFORA_RSS_TOKEN",
"URBAN_DICTIONARY_TOKEN", "URBAN_DICTIONARY_TOKEN",
"IMGFLIP_NAME", "IMGFLIP_NAME",
"IMGFLIP_PASSWORD", "IMGFLIP_PASSWORD",
"BA3_SCHEDULE_URL",
"ScheduleType",
"ScheduleInfo",
"SCHEDULE_DATA",
] ]
@ -35,6 +42,7 @@ TESTING: bool = env.bool("TESTING", False)
LOGFILE: str = env.str("LOGFILE", "didier.log") LOGFILE: str = env.str("LOGFILE", "didier.log")
SEMESTER: int = env.int("SEMESTER", 2) SEMESTER: int = env.int("SEMESTER", 2)
YEAR: int = env.int("YEAR", 3) YEAR: int = env.int("YEAR", 3)
MENU_TIMEOUT: int = env.int("MENU_TIMEOUT", 30)
"""Database""" """Database"""
# PostgreSQL # PostgreSQL
@ -48,6 +56,7 @@ POSTGRES_PORT: int = env.int("POSTGRES_PORT", "5432")
DISCORD_TOKEN: str = env.str("DISCORD_TOKEN") DISCORD_TOKEN: str = env.str("DISCORD_TOKEN")
DISCORD_READY_MESSAGE: str = env.str("DISCORD_READY_MESSAGE", "I'M READY I'M READY I'M READY") DISCORD_READY_MESSAGE: str = env.str("DISCORD_READY_MESSAGE", "I'M READY I'M READY I'M READY")
DISCORD_STATUS_MESSAGE: str = env.str("DISCORD_STATUS_MESSAGE", "with your Didier Dinks.") DISCORD_STATUS_MESSAGE: str = env.str("DISCORD_STATUS_MESSAGE", "with your Didier Dinks.")
DISCORD_MAIN_GUILD: int = env.int("DISCORD_MAIN_GUILD")
DISCORD_TEST_GUILDS: list[int] = env.list("DISCORD_TEST_GUILDS", [], subcast=int) DISCORD_TEST_GUILDS: list[int] = env.list("DISCORD_TEST_GUILDS", [], subcast=int)
DISCORD_OWNER_GUILDS: Optional[list[int]] = env.list("DISCORD_OWNER_GUILDS", [], subcast=int) or None DISCORD_OWNER_GUILDS: Optional[list[int]] = env.list("DISCORD_OWNER_GUILDS", [], subcast=int) or None
DISCORD_BOOS_REACT: str = env.str("DISCORD_BOOS_REACT", "<:boos:629603785840263179>") DISCORD_BOOS_REACT: str = env.str("DISCORD_BOOS_REACT", "<:boos:629603785840263179>")
@ -56,11 +65,45 @@ BIRTHDAY_ANNOUNCEMENT_CHANNEL: Optional[int] = env.int("BIRTHDAY_ANNOUNCEMENT_CH
ERRORS_CHANNEL: Optional[int] = env.int("ERRORS_CHANNEL", None) ERRORS_CHANNEL: Optional[int] = env.int("ERRORS_CHANNEL", None)
UFORA_ANNOUNCEMENTS_CHANNEL: Optional[int] = env.int("UFORA_ANNOUNCEMENTS_CHANNEL", None) UFORA_ANNOUNCEMENTS_CHANNEL: Optional[int] = env.int("UFORA_ANNOUNCEMENTS_CHANNEL", None)
""""General config""" """Discord Role ID's"""
MENU_TIMEOUT: int = env.int("MENU_TIMEOUT", 30) BA3_ROLE: Optional[int] = env.int("BA3_ROLE", 891743208248324196)
MA_CS_ROLE: Optional[int] = env.int("MA_CS_ROLE", None)
MA_CS_ENG_ROLE: Optional[int] = env.int("MA_CS_ENG_ROLE", None)
"""API Keys""" """API Keys"""
UFORA_RSS_TOKEN: Optional[str] = env.str("UFORA_RSS_TOKEN", None) UFORA_RSS_TOKEN: Optional[str] = env.str("UFORA_RSS_TOKEN", None)
URBAN_DICTIONARY_TOKEN: Optional[str] = env.str("URBAN_DICTIONARY_TOKEN", None) URBAN_DICTIONARY_TOKEN: Optional[str] = env.str("URBAN_DICTIONARY_TOKEN", None)
IMGFLIP_NAME: Optional[str] = env.str("IMGFLIP_NAME", None) IMGFLIP_NAME: Optional[str] = env.str("IMGFLIP_NAME", None)
IMGFLIP_PASSWORD: Optional[str] = env.str("IMGFLIP_PASSWORD", None) IMGFLIP_PASSWORD: Optional[str] = env.str("IMGFLIP_PASSWORD", None)
"""Schedule URLs"""
BA3_SCHEDULE_URL: Optional[str] = env.str("BA3_SCHEDULE_URL", None)
MA_CS_SCHEDULE_URL: Optional[str] = env.str("MA_CS_SCHEDULE_URL", None)
MA_CS_ENG_SCHEDULE_URL: Optional[str] = env.str("MA_CS_ENG_SCHEDULE_URL", None)
"""Computed properties"""
class ScheduleType(str, Enum):
"""Enum to differentiate schedules"""
BA3 = "ba3"
MA_CS = "ma_cs"
MA_CS_ENG = "ma_cs_eng"
@dataclass
class ScheduleInfo:
"""Dataclass to hold and combine some information about schedule-related settings"""
role_id: Optional[int]
schedule_url: Optional[str]
name: Optional[str] = None
SCHEDULE_DATA = [
ScheduleInfo(name=ScheduleType.BA3, role_id=BA3_ROLE, schedule_url=BA3_SCHEDULE_URL),
ScheduleInfo(name=ScheduleType.MA_CS, role_id=MA_CS_ROLE, schedule_url=MA_CS_SCHEDULE_URL),
ScheduleInfo(name=ScheduleType.MA_CS_ENG, role_id=MA_CS_ENG_ROLE, schedule_url=MA_CS_ENG_SCHEDULE_URL),
]

View File

@ -46,7 +46,7 @@ async def test_set_execution_time_exists(postgres: AsyncSession, task: Task, tas
await crud.set_last_task_execution_time(postgres, task_type) await crud.set_last_task_execution_time(postgres, task_type)
await postgres.refresh(task) await postgres.refresh(task)
assert task.previous_run == datetime.datetime(year=2022, month=7, day=24) assert task.previous_run == datetime.datetime(year=2022, month=7, day=24, tzinfo=datetime.timezone.utc)
@freeze_time("2022/07/24") @freeze_time("2022/07/24")
@ -60,4 +60,4 @@ async def test_set_execution_time_doesnt_exist(postgres: AsyncSession, task_type
results = list((await postgres.execute(statement)).scalars().all()) results = list((await postgres.execute(statement)).scalars().all())
assert len(results) == 1 assert len(results) == 1
task = results[0] task = results[0]
assert task.previous_run == datetime.datetime(year=2022, month=7, day=24) assert task.previous_run == datetime.datetime(year=2022, month=7, day=24, tzinfo=datetime.timezone.utc)