diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 1e833fdabc7c40d402a9de69ab1a1af58f047741..c301af2dd0cbd39beda03144d01dc0bf31cd5144 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -12,6 +12,12 @@ before_script:
   - python3 -m pylint --version
   - python3 -m coverage --version
 
+db_migrations_updated:
+  stage: test
+  script:
+  - FLASK_APP=uffd flask db upgrade
+  - FLASK_APP=uffd flask db migrate 2>&1 | grep -q 'No changes in schema detected'
+
 linter:
   stage: test
   script:
diff --git a/README.md b/README.md
index 859d4e16ccbc75f9bc331c0b77a7cd4bbfe534ba..63dcebd5754fe553c27e43b8875d2942b3516a59 100644
--- a/README.md
+++ b/README.md
@@ -8,6 +8,7 @@ A web service to manage LDAP users, groups and permissions.
 - python3-ldap3
 - python3-flask
 - python3-flask-sqlalchemy
+- python3-flask-migrate
 - python3-qrcode
 - python3-fido2 (version 0.5.0, optional)
 - python3-flask-oauthlib
@@ -18,6 +19,14 @@ You can also use virtualenv with the supplied `requirements.txt`.
 
 ## development
 
+Before running uffd, you need to create the database with `flask db upgrade`.
+Then use `flask run` to start the application:
+
+```
+FLASK_APP=uffd flask db upgrade
+FLASK_APP=uffd FLASK_ENV=development flask run
+```
+
 During development, you may want to enable LDAP mocking, as you otherwise need to have access to an actual LDAP server with the required schema.
 You can do so by setting `LDAP_SERVICE_MOCK=True` in the config.
 Afterwards you can login as a normal user with "testuser" and "userpassword", or as an admin with "testadmin" and "adminpassword".
@@ -25,7 +34,7 @@ Please note that the mocked LDAP functionality is very limited and many uffd fea
 
 ## deployment
 
-Use uwsgi.
+Use uwsgi. Make sure to run `flask db upgrade` after every update!
 
 ## python style conventions
 
diff --git a/create_db.py b/create_db.py
deleted file mode 100755
index 5f799ab7766baff18a36782281af12f644b18bec..0000000000000000000000000000000000000000
--- a/create_db.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python3
-from uffd import *
-
-if __name__ == '__main__':
-	app = create_app()
-	init_db(app)
diff --git a/migrations/README b/migrations/README
new file mode 100755
index 0000000000000000000000000000000000000000..98e4f9c44effe479ed38c66ba922e7bcc672916f
--- /dev/null
+++ b/migrations/README
@@ -0,0 +1 @@
+Generic single-database configuration.
\ No newline at end of file
diff --git a/migrations/alembic.ini b/migrations/alembic.ini
new file mode 100644
index 0000000000000000000000000000000000000000..f8ed4801f78bcb83cc6acb589508c1b24eda297a
--- /dev/null
+++ b/migrations/alembic.ini
@@ -0,0 +1,45 @@
+# A generic, single database configuration.
+
+[alembic]
+# template used to generate migration files
+# file_template = %%(rev)s_%%(slug)s
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+
+# Logging configuration
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARN
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARN
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/migrations/env.py b/migrations/env.py
new file mode 100755
index 0000000000000000000000000000000000000000..23663ff2f54e6c4425953537976b175246c8a9e6
--- /dev/null
+++ b/migrations/env.py
@@ -0,0 +1,87 @@
+from __future__ import with_statement
+from alembic import context
+from sqlalchemy import engine_from_config, pool
+from logging.config import fileConfig
+import logging
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+fileConfig(config.config_file_name)
+logger = logging.getLogger('alembic.env')
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+from flask import current_app
+config.set_main_option('sqlalchemy.url',
+                       current_app.config.get('SQLALCHEMY_DATABASE_URI'))
+target_metadata = current_app.extensions['migrate'].db.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline():
+    """Run migrations in 'offline' mode.
+
+    This configures the context with just a URL
+    and not an Engine, though an Engine is acceptable
+    here as well.  By skipping the Engine creation
+    we don't even need a DBAPI to be available.
+
+    Calls to context.execute() here emit the given string to the
+    script output.
+
+    """
+    url = config.get_main_option("sqlalchemy.url")
+    context.configure(url=url)
+
+    with context.begin_transaction():
+        context.run_migrations()
+
+
+def run_migrations_online():
+    """Run migrations in 'online' mode.
+
+    In this scenario we need to create an Engine
+    and associate a connection with the context.
+
+    """
+
+    # this callback is used to prevent an auto-migration from being generated
+    # when there are no changes to the schema
+    # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
+    def process_revision_directives(context, revision, directives):
+        if getattr(config.cmd_opts, 'autogenerate', False):
+            script = directives[0]
+            if script.upgrade_ops.is_empty():
+                directives[:] = []
+                logger.info('No changes in schema detected.')
+
+    engine = engine_from_config(config.get_section(config.config_ini_section),
+                                prefix='sqlalchemy.',
+                                poolclass=pool.NullPool)
+
+    connection = engine.connect()
+    context.configure(connection=connection,
+                      target_metadata=target_metadata,
+                      process_revision_directives=process_revision_directives,
+                      **current_app.extensions['migrate'].configure_args)
+
+    try:
+        with context.begin_transaction():
+            context.run_migrations()
+    finally:
+        connection.close()
+
+if context.is_offline_mode():
+    run_migrations_offline()
+else:
+    run_migrations_online()
diff --git a/migrations/script.py.mako b/migrations/script.py.mako
new file mode 100755
index 0000000000000000000000000000000000000000..2c0156303a8df3ffdc9de87765bf801bf6bea4a5
--- /dev/null
+++ b/migrations/script.py.mako
@@ -0,0 +1,24 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade():
+    ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+    ${downgrades if downgrades else "pass"}
diff --git a/migrations/versions/a29870f95175_initial_migration.py b/migrations/versions/a29870f95175_initial_migration.py
new file mode 100644
index 0000000000000000000000000000000000000000..828fee28a5a4229cf83cde69ee9e5939eaba9bc8
--- /dev/null
+++ b/migrations/versions/a29870f95175_initial_migration.py
@@ -0,0 +1,162 @@
+"""Initial migration.
+
+Revision ID: a29870f95175
+Revises: 
+Create Date: 2021-04-04 22:46:24.930356
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = 'a29870f95175'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    op.create_table('invite',
+    sa.Column('token', sa.String(length=128), nullable=False),
+    sa.Column('created', sa.DateTime(), nullable=False),
+    sa.Column('valid_until', sa.DateTime(), nullable=False),
+    sa.Column('single_use', sa.Boolean(), nullable=False),
+    sa.Column('allow_signup', sa.Boolean(), nullable=False),
+    sa.Column('used', sa.Boolean(), nullable=False),
+    sa.Column('disabled', sa.Boolean(), nullable=False),
+    sa.PrimaryKeyConstraint('token')
+    )
+    op.create_table('mailToken',
+    sa.Column('token', sa.String(length=128), nullable=False),
+    sa.Column('created', sa.DateTime(), nullable=True),
+    sa.Column('loginname', sa.String(length=32), nullable=True),
+    sa.Column('newmail', sa.String(length=255), nullable=True),
+    sa.PrimaryKeyConstraint('token')
+    )
+    op.create_table('mfa_method',
+    sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
+    sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', name='mfatype'), nullable=True),
+    sa.Column('created', sa.DateTime(), nullable=True),
+    sa.Column('name', sa.String(length=128), nullable=True),
+    sa.Column('dn', sa.String(length=128), nullable=True),
+    sa.Column('recovery_salt', sa.String(length=64), nullable=True),
+    sa.Column('recovery_hash', sa.String(length=256), nullable=True),
+    sa.Column('totp_key', sa.String(length=64), nullable=True),
+    sa.Column('webauthn_cred', sa.Text(), nullable=True),
+    sa.PrimaryKeyConstraint('id')
+    )
+    op.create_table('oauth2grant',
+    sa.Column('id', sa.Integer(), nullable=False),
+    sa.Column('user_dn', sa.String(length=128), nullable=True),
+    sa.Column('client_id', sa.String(length=40), nullable=True),
+    sa.Column('code', sa.String(length=255), nullable=False),
+    sa.Column('redirect_uri', sa.String(length=255), nullable=True),
+    sa.Column('expires', sa.DateTime(), nullable=True),
+    sa.Column('_scopes', sa.Text(), nullable=True),
+    sa.PrimaryKeyConstraint('id')
+    )
+    with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False)
+
+    op.create_table('oauth2token',
+    sa.Column('id', sa.Integer(), nullable=False),
+    sa.Column('user_dn', sa.String(length=128), nullable=True),
+    sa.Column('client_id', sa.String(length=40), nullable=True),
+    sa.Column('token_type', sa.String(length=40), nullable=True),
+    sa.Column('access_token', sa.String(length=255), nullable=True),
+    sa.Column('refresh_token', sa.String(length=255), nullable=True),
+    sa.Column('expires', sa.DateTime(), nullable=True),
+    sa.Column('_scopes', sa.Text(), nullable=True),
+    sa.PrimaryKeyConstraint('id'),
+    sa.UniqueConstraint('access_token'),
+    sa.UniqueConstraint('refresh_token')
+    )
+    op.create_table('passwordToken',
+    sa.Column('token', sa.String(length=128), nullable=False),
+    sa.Column('created', sa.DateTime(), nullable=True),
+    sa.Column('loginname', sa.String(length=32), nullable=True),
+    sa.PrimaryKeyConstraint('token')
+    )
+    op.create_table('ratelimit_event',
+    sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
+    sa.Column('timestamp', sa.DateTime(), nullable=True),
+    sa.Column('name', sa.String(length=128), nullable=True),
+    sa.Column('key', sa.String(length=128), nullable=True),
+    sa.PrimaryKeyConstraint('id')
+    )
+    op.create_table('role',
+    sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
+    sa.Column('name', sa.String(length=32), nullable=True),
+    sa.Column('description', sa.Text(), nullable=True),
+    sa.PrimaryKeyConstraint('id'),
+    sa.UniqueConstraint('name')
+    )
+    op.create_table('signup',
+    sa.Column('token', sa.String(length=128), nullable=False),
+    sa.Column('created', sa.DateTime(), nullable=False),
+    sa.Column('loginname', sa.Text(), nullable=True),
+    sa.Column('displayname', sa.Text(), nullable=True),
+    sa.Column('mail', sa.Text(), nullable=True),
+    sa.Column('pwhash', sa.Text(), nullable=True),
+    sa.Column('user_dn', sa.String(length=128), nullable=True),
+    sa.Column('type', sa.String(length=50), nullable=True),
+    sa.PrimaryKeyConstraint('token')
+    )
+    op.create_table('invite_grant',
+    sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
+    sa.Column('invite_token', sa.String(length=128), nullable=False),
+    sa.Column('user_dn', sa.String(length=128), nullable=False),
+    sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], ),
+    sa.PrimaryKeyConstraint('id')
+    )
+    op.create_table('invite_roles',
+    sa.Column('invite_token', sa.String(length=128), nullable=False),
+    sa.Column('role_id', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], ),
+    sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
+    sa.PrimaryKeyConstraint('invite_token', 'role_id')
+    )
+    op.create_table('invite_signup',
+    sa.Column('token', sa.String(length=128), nullable=False),
+    sa.Column('invite_token', sa.String(length=128), nullable=False),
+    sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], ),
+    sa.ForeignKeyConstraint(['token'], ['signup.token'], ),
+    sa.PrimaryKeyConstraint('token')
+    )
+    op.create_table('role-group',
+    sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
+    sa.Column('dn', sa.String(length=128), nullable=True),
+    sa.Column('role_id', sa.Integer(), nullable=True),
+    sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
+    sa.PrimaryKeyConstraint('id'),
+    sa.UniqueConstraint('dn', 'role_id')
+    )
+    op.create_table('role-user',
+    sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
+    sa.Column('dn', sa.String(length=128), nullable=True),
+    sa.Column('role_id', sa.Integer(), nullable=True),
+    sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
+    sa.PrimaryKeyConstraint('id'),
+    sa.UniqueConstraint('dn', 'role_id')
+    )
+
+
+def downgrade():
+    op.drop_table('role-user')
+    op.drop_table('role-group')
+    op.drop_table('invite_signup')
+    op.drop_table('invite_roles')
+    op.drop_table('invite_grant')
+    op.drop_table('signup')
+    op.drop_table('role')
+    op.drop_table('ratelimit_event')
+    op.drop_table('passwordToken')
+    op.drop_table('oauth2token')
+    with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f('ix_oauth2grant_code'))
+
+    op.drop_table('oauth2grant')
+    op.drop_table('mfa_method')
+    op.drop_table('mailToken')
+    op.drop_table('invite')
diff --git a/profiling.py b/profiling.py
deleted file mode 100755
index f64bd5c91e93cb21112439b8b0f0a3bfbd281823..0000000000000000000000000000000000000000
--- a/profiling.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/python3
-from werkzeug.contrib.profiler import ProfilerMiddleware
-from uffd import create_app
-app = create_app()
-app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
-app.run(debug=True)
diff --git a/requirements.txt b/requirements.txt
index d2b691787f81594ac76b627c98c04ff72ad02e73..ff3e37c1e517174ead799d53a2beed6dea710640 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,6 +5,8 @@ Flask-SQLAlchemy==2.1
 qrcode==6.1
 fido2==0.5.0
 Flask-OAuthlib==0.9.5
+Flask-Migrate==2.1.1
+alembic==1.0.0
 
 # The main dependencies on their own lead to version collisions and pip is
 # not very good at resolving them, so we pin the versions from Debian Buster
@@ -28,6 +30,9 @@ six==1.12.0
 SQLAlchemy==1.2.18
 urllib3==1.24.1
 Werkzeug==0.14.1
+python-dateutil==2.7.3
+#editor==1.0.3
+Mako==1.0.7
 
 # Testing
 pytest==3.10.1
diff --git a/run.py b/run.py
deleted file mode 100755
index b8ccfa5b4ba817566cc5759c8850a03452e73b5f..0000000000000000000000000000000000000000
--- a/run.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python3
-from werkzeug.serving import make_ssl_devcert
-
-from uffd import *
-
-if __name__ == '__main__':
-	app = create_app()
-	init_db(app)
-	print(app.url_map)
-	if not os.path.exists('devcert.crt') or not os.path.exists('devcert.key'):
-		make_ssl_devcert('devcert')
-	# WebAuthn requires https and a hostname (not just an IP address). If you
-	# don't want to test U2F/FIDO2 device registration/authorization, you can
-	# safely remove `host` and `ssl_context`.
-	app.run(threaded=True, debug=True, host='localhost', ssl_context=('devcert.crt', 'devcert.key'))
diff --git a/uffd/__init__.py b/uffd/__init__.py
index f0828bb2d0e61a079335879baa6f716aeba32036..4a859d0327d7d745fee1eb6fd2387dbfe5d6d00a 100644
--- a/uffd/__init__.py
+++ b/uffd/__init__.py
@@ -4,6 +4,9 @@ import sys
 
 from flask import Flask, redirect, url_for
 from werkzeug.routing import IntegerConverter
+from werkzeug.serving import make_ssl_devcert
+from werkzeug.contrib.profiler import ProfilerMiddleware
+from flask_migrate import Migrate
 
 sys.path.append('deps/ldapalchemy')
 
@@ -42,6 +45,7 @@ def create_app(test_config=None): # pylint: disable=too-many-locals
 		pass
 
 	db.init_app(app)
+	Migrate(app, db, render_as_batch=True)
 	# pylint: disable=C0415
 	from uffd import user, selfservice, role, mail, session, csrf, mfa, oauth2, services, signup, invite
 	# pylint: enable=C0415
@@ -53,8 +57,22 @@ def create_app(test_config=None): # pylint: disable=too-many-locals
 	def index(): #pylint: disable=unused-variable
 		return redirect(url_for('selfservice.index'))
 
-	return app
+	@app.cli.command("gendevcert", help='Generates a self-signed TLS certificate for development')
+	def gendevcert(): #pylint: disable=unused-variable
+		if os.path.exists('devcert.crt') or os.path.exists('devcert.key'):
+			print('Refusing to overwrite existing "devcert.crt"/"devcert.key" file!')
+			return
+		make_ssl_devcert('devcert')
+		print('Certificate written to "devcert.crt", private key to "devcert.key".')
+		print('Run `flask run --cert devcert.crt --key devcert.key` to use it.')
+
+	@app.cli.command("profile", help='Runs app with profiler')
+	def profile(): #pylint: disable=unused-variable
+		# app.run() is silently ignored if executed from commands. We really want
+		# to do this, so we overwrite the check by overwriting the environment
+		# variable.
+		os.environ['FLASK_RUN_FROM_CLI'] = 'false'
+		app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[30])
+		app.run(debug=True)
 
-def init_db(app):
-	with app.app_context():
-		db.create_all()
+	return app