From 5a2e2932e2291097f5ef73207e01f3a0ac6760f3 Mon Sep 17 00:00:00 2001 From: Julian Rother <julianr@fsmpi.rwth-aachen.de> Date: Sun, 4 Apr 2021 22:48:47 +0200 Subject: [PATCH] Added flask-migrate --- .gitlab-ci.yml | 6 + README.md | 11 +- create_db.py | 6 - migrations/README | 1 + migrations/alembic.ini | 45 +++++ migrations/env.py | 87 ++++++++++ migrations/script.py.mako | 24 +++ .../a29870f95175_initial_migration.py | 162 ++++++++++++++++++ profiling.py | 6 - requirements.txt | 5 + run.py | 15 -- uffd/__init__.py | 26 ++- 12 files changed, 362 insertions(+), 32 deletions(-) delete mode 100755 create_db.py create mode 100755 migrations/README create mode 100644 migrations/alembic.ini create mode 100755 migrations/env.py create mode 100755 migrations/script.py.mako create mode 100644 migrations/versions/a29870f95175_initial_migration.py delete mode 100755 profiling.py delete mode 100755 run.py diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 1e833fda..c301af2d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -12,6 +12,12 @@ before_script: - python3 -m pylint --version - python3 -m coverage --version +db_migrations_updated: + stage: test + script: + - FLASK_APP=uffd flask db upgrade + - FLASK_APP=uffd flask db migrate 2>&1 | grep -q 'No changes in schema detected' + linter: stage: test script: diff --git a/README.md b/README.md index 859d4e16..63dcebd5 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,7 @@ A web service to manage LDAP users, groups and permissions. - python3-ldap3 - python3-flask - python3-flask-sqlalchemy +- python3-flask-migrate - python3-qrcode - python3-fido2 (version 0.5.0, optional) - python3-flask-oauthlib @@ -18,6 +19,14 @@ You can also use virtualenv with the supplied `requirements.txt`. ## development +Before running uffd, you need to create the database with `flask db upgrade`. +Then use `flask run` to start the application: + +``` +FLASK_APP=uffd flask db upgrade +FLASK_APP=uffd FLASK_ENV=development flask run +``` + During development, you may want to enable LDAP mocking, as you otherwise need to have access to an actual LDAP server with the required schema. You can do so by setting `LDAP_SERVICE_MOCK=True` in the config. Afterwards you can login as a normal user with "testuser" and "userpassword", or as an admin with "testadmin" and "adminpassword". @@ -25,7 +34,7 @@ Please note that the mocked LDAP functionality is very limited and many uffd fea ## deployment -Use uwsgi. +Use uwsgi. Make sure to run `flask db upgrade` after every update! ## python style conventions diff --git a/create_db.py b/create_db.py deleted file mode 100755 index 5f799ab7..00000000 --- a/create_db.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env python3 -from uffd import * - -if __name__ == '__main__': - app = create_app() - init_db(app) diff --git a/migrations/README b/migrations/README new file mode 100755 index 00000000..98e4f9c4 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 00000000..f8ed4801 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,45 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100755 index 00000000..23663ff2 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,87 @@ +from __future__ import with_statement +from alembic import context +from sqlalchemy import engine_from_config, pool +from logging.config import fileConfig +import logging + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +from flask import current_app +config.set_main_option('sqlalchemy.url', + current_app.config.get('SQLALCHEMY_DATABASE_URI')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure(url=url) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + engine = engine_from_config(config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + connection = engine.connect() + context.configure(connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args) + + try: + with context.begin_transaction(): + context.run_migrations() + finally: + connection.close() + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100755 index 00000000..2c015630 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/a29870f95175_initial_migration.py b/migrations/versions/a29870f95175_initial_migration.py new file mode 100644 index 00000000..828fee28 --- /dev/null +++ b/migrations/versions/a29870f95175_initial_migration.py @@ -0,0 +1,162 @@ +"""Initial migration. + +Revision ID: a29870f95175 +Revises: +Create Date: 2021-04-04 22:46:24.930356 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'a29870f95175' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table('invite', + sa.Column('token', sa.String(length=128), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.Column('valid_until', sa.DateTime(), nullable=False), + sa.Column('single_use', sa.Boolean(), nullable=False), + sa.Column('allow_signup', sa.Boolean(), nullable=False), + sa.Column('used', sa.Boolean(), nullable=False), + sa.Column('disabled', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('token') + ) + op.create_table('mailToken', + sa.Column('token', sa.String(length=128), nullable=False), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('loginname', sa.String(length=32), nullable=True), + sa.Column('newmail', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('token') + ) + op.create_table('mfa_method', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', name='mfatype'), nullable=True), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('name', sa.String(length=128), nullable=True), + sa.Column('dn', sa.String(length=128), nullable=True), + sa.Column('recovery_salt', sa.String(length=64), nullable=True), + sa.Column('recovery_hash', sa.String(length=256), nullable=True), + sa.Column('totp_key', sa.String(length=64), nullable=True), + sa.Column('webauthn_cred', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('oauth2grant', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_dn', sa.String(length=128), nullable=True), + sa.Column('client_id', sa.String(length=40), nullable=True), + sa.Column('code', sa.String(length=255), nullable=False), + sa.Column('redirect_uri', sa.String(length=255), nullable=True), + sa.Column('expires', sa.DateTime(), nullable=True), + sa.Column('_scopes', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + with op.batch_alter_table('oauth2grant', schema=None) as batch_op: + batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False) + + op.create_table('oauth2token', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_dn', sa.String(length=128), nullable=True), + sa.Column('client_id', sa.String(length=40), nullable=True), + sa.Column('token_type', sa.String(length=40), nullable=True), + sa.Column('access_token', sa.String(length=255), nullable=True), + sa.Column('refresh_token', sa.String(length=255), nullable=True), + sa.Column('expires', sa.DateTime(), nullable=True), + sa.Column('_scopes', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('access_token'), + sa.UniqueConstraint('refresh_token') + ) + op.create_table('passwordToken', + sa.Column('token', sa.String(length=128), nullable=False), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('loginname', sa.String(length=32), nullable=True), + sa.PrimaryKeyConstraint('token') + ) + op.create_table('ratelimit_event', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('timestamp', sa.DateTime(), nullable=True), + sa.Column('name', sa.String(length=128), nullable=True), + sa.Column('key', sa.String(length=128), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('role', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('name', sa.String(length=32), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name') + ) + op.create_table('signup', + sa.Column('token', sa.String(length=128), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.Column('loginname', sa.Text(), nullable=True), + sa.Column('displayname', sa.Text(), nullable=True), + sa.Column('mail', sa.Text(), nullable=True), + sa.Column('pwhash', sa.Text(), nullable=True), + sa.Column('user_dn', sa.String(length=128), nullable=True), + sa.Column('type', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('token') + ) + op.create_table('invite_grant', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('invite_token', sa.String(length=128), nullable=False), + sa.Column('user_dn', sa.String(length=128), nullable=False), + sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('invite_roles', + sa.Column('invite_token', sa.String(length=128), nullable=False), + sa.Column('role_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], ), + sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), + sa.PrimaryKeyConstraint('invite_token', 'role_id') + ) + op.create_table('invite_signup', + sa.Column('token', sa.String(length=128), nullable=False), + sa.Column('invite_token', sa.String(length=128), nullable=False), + sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], ), + sa.ForeignKeyConstraint(['token'], ['signup.token'], ), + sa.PrimaryKeyConstraint('token') + ) + op.create_table('role-group', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('dn', sa.String(length=128), nullable=True), + sa.Column('role_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('dn', 'role_id') + ) + op.create_table('role-user', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('dn', sa.String(length=128), nullable=True), + sa.Column('role_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('dn', 'role_id') + ) + + +def downgrade(): + op.drop_table('role-user') + op.drop_table('role-group') + op.drop_table('invite_signup') + op.drop_table('invite_roles') + op.drop_table('invite_grant') + op.drop_table('signup') + op.drop_table('role') + op.drop_table('ratelimit_event') + op.drop_table('passwordToken') + op.drop_table('oauth2token') + with op.batch_alter_table('oauth2grant', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('ix_oauth2grant_code')) + + op.drop_table('oauth2grant') + op.drop_table('mfa_method') + op.drop_table('mailToken') + op.drop_table('invite') diff --git a/profiling.py b/profiling.py deleted file mode 100755 index f64bd5c9..00000000 --- a/profiling.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/python3 -from werkzeug.contrib.profiler import ProfilerMiddleware -from uffd import create_app -app = create_app() -app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) -app.run(debug=True) diff --git a/requirements.txt b/requirements.txt index d2b69178..ff3e37c1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,6 +5,8 @@ Flask-SQLAlchemy==2.1 qrcode==6.1 fido2==0.5.0 Flask-OAuthlib==0.9.5 +Flask-Migrate==2.1.1 +alembic==1.0.0 # The main dependencies on their own lead to version collisions and pip is # not very good at resolving them, so we pin the versions from Debian Buster @@ -28,6 +30,9 @@ six==1.12.0 SQLAlchemy==1.2.18 urllib3==1.24.1 Werkzeug==0.14.1 +python-dateutil==2.7.3 +#editor==1.0.3 +Mako==1.0.7 # Testing pytest==3.10.1 diff --git a/run.py b/run.py deleted file mode 100755 index b8ccfa5b..00000000 --- a/run.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python3 -from werkzeug.serving import make_ssl_devcert - -from uffd import * - -if __name__ == '__main__': - app = create_app() - init_db(app) - print(app.url_map) - if not os.path.exists('devcert.crt') or not os.path.exists('devcert.key'): - make_ssl_devcert('devcert') - # WebAuthn requires https and a hostname (not just an IP address). If you - # don't want to test U2F/FIDO2 device registration/authorization, you can - # safely remove `host` and `ssl_context`. - app.run(threaded=True, debug=True, host='localhost', ssl_context=('devcert.crt', 'devcert.key')) diff --git a/uffd/__init__.py b/uffd/__init__.py index f0828bb2..4a859d03 100644 --- a/uffd/__init__.py +++ b/uffd/__init__.py @@ -4,6 +4,9 @@ import sys from flask import Flask, redirect, url_for from werkzeug.routing import IntegerConverter +from werkzeug.serving import make_ssl_devcert +from werkzeug.contrib.profiler import ProfilerMiddleware +from flask_migrate import Migrate sys.path.append('deps/ldapalchemy') @@ -42,6 +45,7 @@ def create_app(test_config=None): # pylint: disable=too-many-locals pass db.init_app(app) + Migrate(app, db, render_as_batch=True) # pylint: disable=C0415 from uffd import user, selfservice, role, mail, session, csrf, mfa, oauth2, services, signup, invite # pylint: enable=C0415 @@ -53,8 +57,22 @@ def create_app(test_config=None): # pylint: disable=too-many-locals def index(): #pylint: disable=unused-variable return redirect(url_for('selfservice.index')) - return app + @app.cli.command("gendevcert", help='Generates a self-signed TLS certificate for development') + def gendevcert(): #pylint: disable=unused-variable + if os.path.exists('devcert.crt') or os.path.exists('devcert.key'): + print('Refusing to overwrite existing "devcert.crt"/"devcert.key" file!') + return + make_ssl_devcert('devcert') + print('Certificate written to "devcert.crt", private key to "devcert.key".') + print('Run `flask run --cert devcert.crt --key devcert.key` to use it.') + + @app.cli.command("profile", help='Runs app with profiler') + def profile(): #pylint: disable=unused-variable + # app.run() is silently ignored if executed from commands. We really want + # to do this, so we overwrite the check by overwriting the environment + # variable. + os.environ['FLASK_RUN_FROM_CLI'] = 'false' + app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30]) + app.run(debug=True) -def init_db(app): - with app.app_context(): - db.create_all() + return app -- GitLab