Compare commits
No commits in common. "ec54e31b5c5f480df62ac25fd84af76136ca83aa" and "72bf022345d15512737859cdc9d0bb7962073728" have entirely different histories.
ec54e31b5c
...
72bf022345
4 changed files with 85 additions and 95 deletions
|
@ -31,7 +31,7 @@ WITHOUT ROWID;
|
|||
CREATE TABLE users (
|
||||
user_id INTEGER NOT NULL PRIMARY KEY,
|
||||
disabled INTEGER NOT NULL DEFAULT 0 CHECK(disabled IN (0, 1)),
|
||||
use_embed INTEGER NOT NULL DEFAULT 1 CHECK(use_embed IN (0, 1)),
|
||||
use_embeds INTEGER NOT NULL DEFAULT 1 CHECK(use_embeds IN (0, 1)),
|
||||
notify_self INTEGER NOT NULL DEFAULT 0 CHECK(notify_self IN (0, 1)),
|
||||
bots_notify INTEGER NOT NULL DEFAULT 0 CHECK(bots_notify IN (0, 1))
|
||||
)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import io
|
||||
import logging
|
||||
import os
|
||||
import pprint
|
||||
import sys
|
||||
import textwrap
|
||||
import traceback
|
||||
|
@ -26,7 +27,8 @@ log.setLevel(logging.DEBUG)
|
|||
formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s: %(message)s")
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setFormatter(formatter)
|
||||
logging.getLogger(None).addHandler(handler)
|
||||
logger_disnake.addHandler(handler)
|
||||
log.addHandler(handler)
|
||||
|
||||
if load_dotenv(find_dotenv(usecwd=True)):
|
||||
log.debug("Loaded .env")
|
||||
|
@ -60,7 +62,7 @@ class Nomen(Bot):
|
|||
intents=options.get("intents"),
|
||||
)
|
||||
|
||||
self.db = None # Setup in start
|
||||
self.db = self.loop.run_until_complete(setup_db(DB_FILE))
|
||||
self.prefixes = {}
|
||||
|
||||
async def get_guild_prefix(self, guild: Guild):
|
||||
|
@ -84,10 +86,6 @@ class Nomen(Bot):
|
|||
)
|
||||
self.prefixes[guild.id] = prefix
|
||||
|
||||
async def start(self, *args, **kwargs):
|
||||
self.db = await setup_db(DB_FILE)
|
||||
await super().start(*args, **kwargs)
|
||||
|
||||
async def close(self):
|
||||
await super().close()
|
||||
await self.db.close()
|
||||
|
@ -221,10 +219,6 @@ async def prefix(ctx, prefix=None):
|
|||
|
||||
|
||||
def run():
|
||||
try:
|
||||
if run_db_migrations(DB_FILE):
|
||||
log.info(f"Migrated DB {DB_FILE}")
|
||||
except RuntimeError:
|
||||
pass
|
||||
else:
|
||||
bot.run(TOKEN)
|
||||
if run_db_migrations(DB_FILE):
|
||||
log.info(f"Migrated DB {DB_FILE}")
|
||||
bot.run(TOKEN)
|
||||
|
|
|
@ -7,8 +7,6 @@ See <https://david.rothlis.net/declarative-schema-migration-for-sqlite>.
|
|||
Author: William Manley <will@stb-tester.com>.
|
||||
Copyright © 2019-2022 Stb-tester.com Ltd.
|
||||
License: MIT.
|
||||
|
||||
Modified to ignore internal tables
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
@ -16,12 +14,6 @@ import re
|
|||
import sqlite3
|
||||
from textwrap import dedent
|
||||
|
||||
INTERNAL_TABLES = {
|
||||
"sqlite_stat1",
|
||||
}
|
||||
|
||||
log = logging.getLogger("migrator")
|
||||
|
||||
|
||||
def dumb_migrate_db(db, schema, allow_deletions=False):
|
||||
"""
|
||||
|
@ -85,19 +77,21 @@ class DBMigrator:
|
|||
msg_argv += (args,)
|
||||
else:
|
||||
args = []
|
||||
log.info(msg_tmpl, *msg_argv)
|
||||
logging.info(msg_tmpl, *msg_argv)
|
||||
self.db.execute(sql, args)
|
||||
self.n_changes += 1
|
||||
|
||||
def __enter__(self):
|
||||
self.orig_foreign_keys = self.db.execute("PRAGMA foreign_keys").fetchone()[0]
|
||||
self.orig_foreign_keys = (
|
||||
self.db.execute("PRAGMA foreign_keys").fetchone()[0])
|
||||
if self.orig_foreign_keys:
|
||||
self.log_execute("Disable foreign keys temporarily for migration", "PRAGMA foreign_keys = OFF")
|
||||
self.log_execute("Disable foreign keys temporarily for migration",
|
||||
"PRAGMA foreign_keys = OFF")
|
||||
# This doesn't count as a change because we'll undo it at the end
|
||||
self.n_changes = 0
|
||||
|
||||
self.db.__enter__()
|
||||
self.db.execute("BEGIN")
|
||||
self.db.execute('BEGIN')
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
||||
|
@ -109,7 +103,7 @@ class DBMigrator:
|
|||
# > constraint enforcement may only be enabled or disabled when
|
||||
# > there is no pending BEGIN or SAVEPOINT.
|
||||
old_changes = self.n_changes
|
||||
new_val = self._migrate_pragma("foreign_keys")
|
||||
new_val = self._migrate_pragma('foreign_keys')
|
||||
if new_val == self.orig_foreign_keys:
|
||||
self.n_changes = old_changes
|
||||
|
||||
|
@ -121,133 +115,142 @@ class DBMigrator:
|
|||
self.db.execute("VACUUM")
|
||||
else:
|
||||
if self.orig_foreign_keys:
|
||||
self.log_execute("Re-enable foreign keys after migration", "PRAGMA foreign_keys = ON")
|
||||
self.log_execute(
|
||||
"Re-enable foreign keys after migration",
|
||||
"PRAGMA foreign_keys = ON")
|
||||
|
||||
def migrate(self):
|
||||
# In CI the database schema may be changing all the time. This checks
|
||||
# the current db and if it doesn't match database.sql we will
|
||||
# modify it so it does match where possible.
|
||||
pristine_tables = dict(
|
||||
self.pristine.execute("""\
|
||||
pristine_tables = dict(self.pristine.execute("""\
|
||||
SELECT name, sql FROM sqlite_master
|
||||
WHERE type = \"table\" AND name != \"sqlite_sequence\"""").fetchall()
|
||||
)
|
||||
pristine_indices = dict(
|
||||
self.pristine.execute("""\
|
||||
WHERE type = \"table\" AND name != \"sqlite_sequence\"""").fetchall())
|
||||
pristine_indices = dict(self.pristine.execute("""\
|
||||
SELECT name, sql FROM sqlite_master
|
||||
WHERE type = \"index\"""").fetchall()
|
||||
)
|
||||
WHERE type = \"index\"""").fetchall())
|
||||
|
||||
tables = dict(
|
||||
self.db.execute("""\
|
||||
tables = dict(self.db.execute("""\
|
||||
SELECT name, sql FROM sqlite_master
|
||||
WHERE type = \"table\" AND name != \"sqlite_sequence\"""").fetchall()
|
||||
)
|
||||
WHERE type = \"table\" AND name != \"sqlite_sequence\"""").fetchall())
|
||||
|
||||
new_tables = set(pristine_tables.keys()) - set(tables.keys()) - INTERNAL_TABLES
|
||||
removed_tables = set(tables.keys()) - set(pristine_tables.keys()) - INTERNAL_TABLES
|
||||
new_tables = set(pristine_tables.keys()) - set(tables.keys())
|
||||
removed_tables = set(tables.keys()) - set(pristine_tables.keys())
|
||||
if removed_tables and not self.allow_deletions:
|
||||
raise RuntimeError("Database migration: Refusing to delete tables %r" % removed_tables)
|
||||
raise RuntimeError(
|
||||
"Database migration: Refusing to delete tables %r" %
|
||||
removed_tables)
|
||||
|
||||
modified_tables = set(
|
||||
name for name, sql in pristine_tables.items() if normalise_sql(tables.get(name, "")) != normalise_sql(sql)
|
||||
)
|
||||
name for name, sql in pristine_tables.items()
|
||||
if normalise_sql(tables.get(name, "")) != normalise_sql(sql))
|
||||
|
||||
# This PRAGMA is automatically disabled when the db is committed
|
||||
self.db.execute("PRAGMA defer_foreign_keys = TRUE")
|
||||
|
||||
# New and removed tables are easy:
|
||||
for tbl_name in new_tables:
|
||||
self.log_execute("Create table %s" % tbl_name, pristine_tables[tbl_name])
|
||||
self.log_execute("Create table %s" % tbl_name,
|
||||
pristine_tables[tbl_name])
|
||||
for tbl_name in removed_tables:
|
||||
self.log_execute("Drop table %s" % tbl_name, "DROP TABLE %s" % tbl_name)
|
||||
self.log_execute("Drop table %s" % tbl_name,
|
||||
"DROP TABLE %s" % tbl_name)
|
||||
|
||||
for tbl_name in modified_tables:
|
||||
# The SQLite documentation insists that we create the new table and
|
||||
# rename it over the old rather than moving the old out of the way
|
||||
# and then creating the new
|
||||
create_table_sql = pristine_tables[tbl_name]
|
||||
create_table_sql = re.sub(r"\b%s\b" % re.escape(tbl_name), tbl_name + "_migration_new", create_table_sql)
|
||||
self.log_execute("Columns change: Create table %s with updated schema" % tbl_name, create_table_sql)
|
||||
create_table_sql = re.sub(r"\b%s\b" % re.escape(tbl_name),
|
||||
tbl_name + "_migration_new",
|
||||
create_table_sql)
|
||||
self.log_execute(
|
||||
"Columns change: Create table %s with updated schema" %
|
||||
tbl_name, create_table_sql)
|
||||
|
||||
cols = set([x[1] for x in self.db.execute("PRAGMA table_info(%s)" % tbl_name)])
|
||||
pristine_cols = set([x[1] for x in self.pristine.execute("PRAGMA table_info(%s)" % tbl_name)])
|
||||
cols = set([
|
||||
x[1] for x in self.db.execute(
|
||||
"PRAGMA table_info(%s)" % tbl_name)])
|
||||
pristine_cols = set([
|
||||
x[1] for x in
|
||||
self.pristine.execute("PRAGMA table_info(%s)" % tbl_name)])
|
||||
|
||||
removed_columns = cols - pristine_cols
|
||||
if not self.allow_deletions and removed_columns:
|
||||
log.warning(
|
||||
logging.warning(
|
||||
"Database migration: Refusing to remove columns %r from "
|
||||
"table %s. Current cols are %r attempting migration to %r",
|
||||
removed_columns,
|
||||
tbl_name,
|
||||
cols,
|
||||
pristine_cols,
|
||||
)
|
||||
removed_columns, tbl_name, cols, pristine_cols)
|
||||
raise RuntimeError(
|
||||
"Database migration: Refusing to remove columns %r from " "table %s" % (removed_columns, tbl_name)
|
||||
)
|
||||
"Database migration: Refusing to remove columns %r from "
|
||||
"table %s" % (removed_columns, tbl_name))
|
||||
|
||||
log.info("cols: %s, pristine_cols: %s", cols, pristine_cols)
|
||||
logging.info("cols: %s, pristine_cols: %s", cols, pristine_cols)
|
||||
self.log_execute(
|
||||
"Migrate data for table %s" % tbl_name,
|
||||
"""\
|
||||
"Migrate data for table %s" % tbl_name, """\
|
||||
INSERT INTO {tbl_name}_migration_new ({common})
|
||||
SELECT {common} FROM {tbl_name}""".format(
|
||||
tbl_name=tbl_name, common=", ".join(cols.intersection(pristine_cols))
|
||||
),
|
||||
)
|
||||
tbl_name=tbl_name,
|
||||
common=", ".join(cols.intersection(pristine_cols))))
|
||||
|
||||
# Don't need the old table any more
|
||||
self.log_execute("Drop old table %s now data has been migrated" % tbl_name, "DROP TABLE %s" % tbl_name)
|
||||
self.log_execute(
|
||||
"Drop old table %s now data has been migrated" % tbl_name,
|
||||
"DROP TABLE %s" % tbl_name)
|
||||
|
||||
self.log_execute(
|
||||
"Columns change: Move new table %s over old" % tbl_name,
|
||||
"ALTER TABLE %s_migration_new RENAME TO %s" % (tbl_name, tbl_name),
|
||||
)
|
||||
"ALTER TABLE %s_migration_new RENAME TO %s" % (
|
||||
tbl_name, tbl_name))
|
||||
|
||||
# Migrate the indices
|
||||
indices = dict(
|
||||
self.db.execute("""\
|
||||
indices = dict(self.db.execute("""\
|
||||
SELECT name, sql FROM sqlite_master
|
||||
WHERE type = \"index\"""").fetchall()
|
||||
)
|
||||
WHERE type = \"index\"""").fetchall())
|
||||
for name in set(indices.keys()) - set(pristine_indices.keys()):
|
||||
self.log_execute("Dropping obsolete index %s" % name, "DROP INDEX %s" % name)
|
||||
self.log_execute("Dropping obsolete index %s" % name,
|
||||
"DROP INDEX %s" % name)
|
||||
for name, sql in pristine_indices.items():
|
||||
if name not in indices:
|
||||
self.log_execute("Creating new index %s" % name, sql)
|
||||
elif sql != indices[name]:
|
||||
self.log_execute("Index %s changed: Dropping old version" % name, "DROP INDEX %s" % name)
|
||||
self.log_execute("Index %s changed: Creating updated version in its place" % name, sql)
|
||||
self.log_execute(
|
||||
"Index %s changed: Dropping old version" % name,
|
||||
"DROP INDEX %s" % name)
|
||||
self.log_execute(
|
||||
"Index %s changed: Creating updated version in its place" %
|
||||
name, sql)
|
||||
|
||||
self._migrate_pragma("user_version")
|
||||
self._migrate_pragma('user_version')
|
||||
|
||||
if self.pristine.execute("PRAGMA foreign_keys").fetchone()[0]:
|
||||
if self.db.execute("PRAGMA foreign_key_check").fetchall():
|
||||
raise RuntimeError("Database migration: Would fail foreign_key_check")
|
||||
raise RuntimeError(
|
||||
"Database migration: Would fail foreign_key_check")
|
||||
|
||||
def _migrate_pragma(self, pragma):
|
||||
pristine_val = self.pristine.execute("PRAGMA %s" % pragma).fetchone()[0]
|
||||
pristine_val = self.pristine.execute(
|
||||
"PRAGMA %s" % pragma).fetchone()[0]
|
||||
val = self.db.execute("PRAGMA %s" % pragma).fetchone()[0]
|
||||
|
||||
if val != pristine_val:
|
||||
self.log_execute(
|
||||
"Set %s to %i from %i" % (pragma, pristine_val, val), "PRAGMA %s = %i" % (pragma, pristine_val)
|
||||
)
|
||||
"Set %s to %i from %i" % (pragma, pristine_val, val),
|
||||
"PRAGMA %s = %i" % (pragma, pristine_val))
|
||||
|
||||
return pristine_val
|
||||
|
||||
|
||||
def _left_pad(text, indent=" "):
|
||||
"""Maybe I can find a package in pypi for this?"""
|
||||
return "\n".join(indent + line for line in text.split("\n"))
|
||||
return "\n".join(indent + line for line in text.split('\n'))
|
||||
|
||||
|
||||
def normalise_sql(sql):
|
||||
# Remove comments:
|
||||
sql = re.sub(r"--[^\n]*\n", "", sql)
|
||||
sql = re.sub(r'--[^\n]*\n', "", sql)
|
||||
# Normalise whitespace:
|
||||
sql = re.sub(r"\s+", " ", sql)
|
||||
sql = re.sub(r'\s+', " ", sql)
|
||||
sql = re.sub(r" *([(),]) *", r"\1", sql)
|
||||
# Remove unnecessary quotes
|
||||
sql = re.sub(r'"(\w+)"', r"\1", sql)
|
||||
|
@ -255,10 +258,8 @@ def normalise_sql(sql):
|
|||
|
||||
|
||||
def test_normalise_sql():
|
||||
assert (
|
||||
normalise_sql("""\
|
||||
assert normalise_sql("""\
|
||||
CREATE TABLE "Node"( -- This is my table
|
||||
-- There are many like it but this one is mine
|
||||
A b, C D, "E F G", h)""")
|
||||
== 'CREATE TABLE Node(A b,C D,"E F G",h)'
|
||||
)
|
||||
A b, C D, "E F G", h)""") == \
|
||||
'CREATE TABLE Node(A b,C D,"E F G",h)'
|
||||
|
|
|
@ -54,9 +54,9 @@ async def handle_triggers(ctx, message):
|
|||
|
||||
disabled = await ctx.bot.db.execute_fetchall(
|
||||
"SELECT EXISTS(SELECT * FROM users WHERE user_id=:author AND disabled IS 1)", params
|
||||
)
|
||||
)[0]
|
||||
|
||||
if disabled[0][0]:
|
||||
if disabled:
|
||||
log.debug(f"User {ctx.author} ({ctx.author.id}) opted out")
|
||||
return
|
||||
|
||||
|
@ -152,14 +152,12 @@ class Notifications(Cog):
|
|||
"""
|
||||
|
||||
if test_keyword(keyword, regex):
|
||||
log.debug("Keyword too common")
|
||||
await ctx.send(f"{'Regex' if regex else 'Keyword'} matches a word that is too common")
|
||||
return
|
||||
|
||||
conflicts = await fetch_unpacked(ctx.bot.db, existing, params)
|
||||
|
||||
if conflicts:
|
||||
log.debug("Keyword conflicts with existing keyword")
|
||||
await ctx.send(f"Any instance of `{keyword}` would be matched by existing keywords (check DMs)")
|
||||
await ctx.author.send(
|
||||
f"Conflicts with keyword `{keyword}`:\n" + "\n".join(f"- `{conflict}`" for conflict in conflicts)
|
||||
|
@ -169,15 +167,12 @@ class Notifications(Cog):
|
|||
conflicts = await fetch_unpacked(ctx.bot.db, redundant, params)
|
||||
|
||||
if conflicts:
|
||||
log.debug("Keyword renders existing redundant")
|
||||
await ctx.send(f"Adding `{keyword}` will cause existing keywords to never match (check DMs)")
|
||||
await ctx.author.send(
|
||||
f"Keywords redundant from `{keyword}`:\n" + "\n".join(f" - `{conflict}`" for conflict in conflicts)
|
||||
)
|
||||
return
|
||||
|
||||
log.debug("Keyword valid, adding")
|
||||
|
||||
await ctx.bot.db.execute(
|
||||
"INSERT INTO keywords (guild_id, keyword, user_id, regex) VALUES (:guild_id, :keyword, :user_id, :regex)",
|
||||
params,
|
||||
|
|
Loading…
Reference in a new issue