Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • uffd/uffd
  • rixx/uffd
  • thies/uffd
  • leona/uffd
  • enbewe/uffd
  • strifel/uffd
  • thies/uffd-2
7 results
Show changes
Showing
with 2073 additions and 264 deletions
{% extends 'base.html' %}
{% block body %}
<p>When you proceed, all recovery codes, registered authenticator applications and devices will be invalidated.
You can later generate new recovery codes and setup your applications and devices again.</p>
<form class="form" action="{{ url_for('mfa.disable_confirm') }}" method="POST">
<button type="submit" class="btn btn-danger btn-block">Disable two-factor authentication</button>
</form>
{% endblock %}
from warnings import warn
import urllib.parse
from flask import Blueprint, render_template, session, request, redirect, url_for, flash, current_app, abort
from uffd.database import db
from uffd.mfa.models import MFAMethod, TOTPMethod, WebauthnMethod, RecoveryCodeMethod
from uffd.session.views import get_current_user, login_required, pre_mfa_login_required
from uffd.user.models import User
from uffd.csrf import csrf_protect
from uffd.ratelimit import Ratelimit, format_delay
bp = Blueprint('mfa', __name__, template_folder='templates', url_prefix='/mfa/')
mfa_ratelimit = Ratelimit('mfa', 1*60, 3)
@bp.route('/', methods=['GET'])
@login_required()
def setup():
user = get_current_user()
recovery_methods = RecoveryCodeMethod.query.filter_by(dn=user.dn).all()
totp_methods = TOTPMethod.query.filter_by(dn=user.dn).all()
webauthn_methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
return render_template('setup.html', totp_methods=totp_methods, webauthn_methods=webauthn_methods, recovery_methods=recovery_methods)
@bp.route('/setup/disable', methods=['GET'])
@login_required()
def disable():
return render_template('disable.html')
@bp.route('/setup/disable', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def disable_confirm():
user = get_current_user()
MFAMethod.query.filter_by(dn=user.dn).delete()
db.session.commit()
return redirect(url_for('mfa.setup'))
@bp.route('/admin/<int:uid>/disable')
@login_required()
@csrf_protect(blueprint=bp)
def admin_disable(uid):
# Group cannot be checked with login_required kwarg, because the config
# variable is not available when the decorator is processed
if not get_current_user().is_in_group(current_app.config['ACL_ADMIN_GROUP']):
flash('Access denied')
return redirect(url_for('index'))
user = User.query.filter_by(uid=uid)[0]
MFAMethod.query.filter_by(dn=user.dn).delete()
db.session.commit()
flash('Two-factor authentication was reset')
return redirect(url_for('user.show', uid=uid))
@bp.route('/setup/recovery', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_recovery():
user = get_current_user()
for method in RecoveryCodeMethod.query.filter_by(dn=user.dn).all():
db.session.delete(method)
methods = []
for _ in range(10):
method = RecoveryCodeMethod(user)
methods.append(method)
db.session.add(method)
db.session.commit()
return render_template('setup_recovery.html', methods=methods)
@bp.route('/setup/totp', methods=['GET'])
@login_required()
def setup_totp():
user = get_current_user()
method = TOTPMethod(user)
session['mfa_totp_key'] = method.key
return render_template('setup_totp.html', method=method, name=request.values['name'])
@bp.route('/setup/totp', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_totp_finish():
user = get_current_user()
if not RecoveryCodeMethod.query.filter_by(dn=user.dn).all():
flash('Generate recovery codes first!')
return redirect(url_for('mfa.setup'))
method = TOTPMethod(user, name=request.values['name'], key=session.pop('mfa_totp_key'))
if method.verify(request.form['code']):
db.session.add(method)
db.session.commit()
return redirect(url_for('mfa.setup'))
flash('Code is invalid')
return redirect(url_for('mfa.setup_totp', name=request.values['name']))
@bp.route('/setup/totp/<int:id>/delete')
@login_required()
@csrf_protect(blueprint=bp)
def delete_totp(id): #pylint: disable=redefined-builtin
user = get_current_user()
method = TOTPMethod.query.filter_by(dn=user.dn, id=id).first_or_404()
db.session.delete(method)
db.session.commit()
return redirect(url_for('mfa.setup'))
# WebAuthn support is optional because fido2 has a pretty unstable
# interface (v0.5.0 on buster and current version are completely
# incompatible) and might be difficult to install with the correct version
try:
from fido2.client import ClientData
from fido2.server import Fido2Server, RelyingParty
from fido2.ctap2 import AttestationObject, AuthenticatorData
from fido2 import cbor
WEBAUTHN_SUPPORTED = True
except ImportError as err:
warn('2FA WebAuthn support disabled because import of the fido2 module failed (%s)'%err)
WEBAUTHN_SUPPORTED = False
bp.add_app_template_global(WEBAUTHN_SUPPORTED, name='webauthn_supported')
if WEBAUTHN_SUPPORTED:
def get_webauthn_server():
return Fido2Server(RelyingParty(current_app.config.get('MFA_RP_ID', urllib.parse.urlsplit(request.url).hostname), current_app.config['MFA_RP_NAME']))
@bp.route('/setup/webauthn/begin', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_webauthn_begin():
user = get_current_user()
if not RecoveryCodeMethod.query.filter_by(dn=user.dn).all():
abort(403)
methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
creds = [method.cred for method in methods]
server = get_webauthn_server()
registration_data, state = server.register_begin(
{
"id": user.dn.encode(),
"name": user.loginname,
"displayName": user.displayname,
},
creds,
user_verification='discouraged',
)
session["webauthn-state"] = state
return cbor.dumps(registration_data)
@bp.route('/setup/webauthn/complete', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_webauthn_complete():
user = get_current_user()
server = get_webauthn_server()
data = cbor.loads(request.get_data())[0]
client_data = ClientData(data["clientDataJSON"])
att_obj = AttestationObject(data["attestationObject"])
auth_data = server.register_complete(session["webauthn-state"], client_data, att_obj)
method = WebauthnMethod(user, auth_data.credential_data, name=data['name'])
db.session.add(method)
db.session.commit()
return cbor.dumps({"status": "OK"})
@bp.route("/auth/webauthn/begin", methods=["POST"])
@pre_mfa_login_required(no_redirect=True)
def auth_webauthn_begin():
user = get_current_user()
server = get_webauthn_server()
methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
creds = [method.cred for method in methods]
if not creds:
abort(404)
auth_data, state = server.authenticate_begin(creds, user_verification='discouraged')
session["webauthn-state"] = state
return cbor.dumps(auth_data)
@bp.route("/auth/webauthn/complete", methods=["POST"])
@pre_mfa_login_required(no_redirect=True)
def auth_webauthn_complete():
user = get_current_user()
server = get_webauthn_server()
methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
creds = [method.cred for method in methods]
if not creds:
abort(404)
data = cbor.loads(request.get_data())[0]
credential_id = data["credentialId"]
client_data = ClientData(data["clientDataJSON"])
auth_data = AuthenticatorData(data["authenticatorData"])
signature = data["signature"]
# authenticate_complete() (as of python-fido2 v0.5.0, the version in Debian Buster)
# does not check signCount, although the spec recommends it
server.authenticate_complete(
session.pop("webauthn-state"),
creds,
credential_id,
client_data,
auth_data,
signature,
)
session['user_mfa'] = True
return cbor.dumps({"status": "OK"})
@bp.route('/setup/webauthn/<int:id>/delete')
@login_required()
@csrf_protect(blueprint=bp)
def delete_webauthn(id): #pylint: disable=redefined-builtin
user = get_current_user()
method = WebauthnMethod.query.filter_by(dn=user.dn, id=id).first_or_404()
db.session.delete(method)
db.session.commit()
return redirect(url_for('mfa.setup'))
@bp.route('/auth', methods=['GET'])
@pre_mfa_login_required()
def auth():
user = get_current_user()
recovery_methods = RecoveryCodeMethod.query.filter_by(dn=user.dn).all()
totp_methods = TOTPMethod.query.filter_by(dn=user.dn).all()
webauthn_methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
if not totp_methods and not webauthn_methods:
session['user_mfa'] = True
if session.get('user_mfa'):
return redirect(request.values.get('ref', url_for('index')))
return render_template('auth.html', ref=request.values.get('ref'), totp_methods=totp_methods,
webauthn_methods=webauthn_methods, recovery_methods=recovery_methods)
@bp.route('/auth', methods=['POST'])
@pre_mfa_login_required()
def auth_finish():
user = get_current_user()
delay = mfa_ratelimit.get_delay(user.dn)
if delay:
flash('We received too many invalid attempts! Please wait at least %s.'%format_delay(delay))
return redirect(url_for('mfa.auth', ref=request.values.get('ref')))
recovery_methods = RecoveryCodeMethod.query.filter_by(dn=user.dn).all()
totp_methods = TOTPMethod.query.filter_by(dn=user.dn).all()
for method in totp_methods:
if method.verify(request.form['code']):
session['user_mfa'] = True
return redirect(request.values.get('ref', url_for('index')))
for method in recovery_methods:
if method.verify(request.form['code']):
db.session.delete(method)
db.session.commit()
session['user_mfa'] = True
if len(recovery_methods) <= 1:
flash('You have exhausted your recovery codes. Please generate new ones now!')
return redirect(url_for('mfa.setup'))
if len(recovery_methods) <= 5:
flash('You only have a few recovery codes remaining. Make sure to generate new ones before they run out.')
return redirect(url_for('mfa.setup'))
return redirect(request.values.get('ref', url_for('index')))
mfa_ratelimit.log(user.dn)
flash('Two-factor authentication failed')
return redirect(url_for('mfa.auth', ref=request.values.get('ref')))
Database Migrations
===================
While we use Alembic in a single-database configuration, the migration scripts
are compatible with both SQLite and MySQL/MariaDB.
Compatability with SQLite almost always requires `batch_alter_table` operations
to modify existing tables. These recreate the tables, copy the data and finally
replace the old with the newly creaed ones. Alembic is configured to
auto-generate those operations, but in most cases the generated code fails to
fully reflect all details of the original schema. This way some contraints
(i.e. `CHECK` contstrains on Enums) are lost. Define the full table and pass it
with `copy_from` to `batch_alter_table` to prevent this.
Compatability with MySQL requires special care when changing primary keys and
when dealing with foreign keys. It often helps to temporarily remove foreign
key constraints concerning the table that is subject to change. When adding an
autoincrement id column as the new primary key of a table, recreate the table
with `batch_alter_table`.
The `check_migrations.py` script verifies that upgrading and downgrading works
with both databases. While it is far from perfect, it catches many common
errors. It runs automatically as part of the CI pipeline. Make sure to update
the script when adding new tables and when making significant changes to
existing tables.
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging
import click
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
engine = engine_from_config(config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
if engine.name in ('mysql', 'mariadb'):
character_set_connection = connection.execute('SHOW VARIABLES LIKE "character_set_connection"').fetchone()[1]
if character_set_connection != 'utf8mb4':
raise click.ClickException(f'Unsupported connection charset "{character_set_connection}". Make sure to add "?charset=utf8mb4" to SQLALCHEMY_DATABASE_URI!')
collation_database = connection.execute('SHOW VARIABLES LIKE "collation_database"').fetchone()[1]
if collation_database != 'utf8mb4_nopad_bin':
raise click.ClickException(f'Unsupported database collation "{collation_database}". Create the database with "CHARACTER SET utf8mb4 COLLATE utf8mb4_nopad_bin"!')
connection.execute('SET NAMES utf8mb4 COLLATE utf8mb4_nopad_bin')
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}
"""OpenID Connect Support
Revision ID: 01fdd7820f29
Revises: a9b449776953
Create Date: 2023-11-09 16:52:20.860871
"""
from alembic import op
import sqlalchemy as sa
import datetime
import secrets
import math
import logging
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.backends import default_backend # Only required for Buster
import jwt
# pyjwt v1.7.x compat (Buster/Bullseye)
if not hasattr(jwt, 'get_algorithm_by_name'):
jwt.get_algorithm_by_name = lambda name: jwt.algorithms.get_default_algorithms()[name]
# revision identifiers, used by Alembic.
revision = '01fdd7820f29'
down_revision = 'a9b449776953'
branch_labels = None
depends_on = None
logger = logging.getLogger('alembic.runtime.migration.01fdd7820f29')
def token_with_alphabet(alphabet, nbytes=None):
'''Return random text token that consists of characters from `alphabet`'''
if nbytes is None:
nbytes = max(secrets.DEFAULT_ENTROPY, 32)
nbytes_per_char = math.log(len(alphabet), 256)
nchars = math.ceil(nbytes / nbytes_per_char)
return ''.join([secrets.choice(alphabet) for _ in range(nchars)])
def token_urlfriendly(nbytes=None):
'''Return random text token that is urlsafe and works around common parsing bugs'''
alphabet = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
return token_with_alphabet(alphabet, nbytes=nbytes)
def upgrade():
logger.info('Generating 3072 bit RSA key pair (RS256) for OpenID Connect support ...')
private_key = rsa.generate_private_key(public_exponent=65537, key_size=3072, backend=default_backend())
meta = sa.MetaData(bind=op.get_bind())
oauth2_key = op.create_table('oauth2_key',
sa.Column('id', sa.String(length=64), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('active', sa.Boolean(create_constraint=False), nullable=False),
sa.Column('algorithm', sa.String(length=32), nullable=False),
sa.Column('private_key_jwk', sa.Text(), nullable=False),
sa.Column('public_key_jwk', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2_key'))
)
algorithm = jwt.get_algorithm_by_name('RS256')
op.bulk_insert(oauth2_key, [{
'id': token_urlfriendly(),
'created': datetime.datetime.utcnow(),
'active': True,
'algorithm': 'RS256',
'private_key_jwk': algorithm.to_jwk(private_key),
'public_key_jwk': algorithm.to_jwk(private_key.public_key()),
}])
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_oauth2grant_code'))
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.add_column(sa.Column('nonce', sa.Text(), nullable=True))
batch_op.add_column(sa.Column('claims', sa.Text(), nullable=True))
batch_op.alter_column('redirect_uri', existing_type=sa.VARCHAR(length=255), nullable=True)
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.add_column(sa.Column('claims', sa.Text(), nullable=True))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.drop_column('claims')
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.alter_column('redirect_uri', existing_type=sa.VARCHAR(length=255), nullable=False)
batch_op.drop_column('claims')
batch_op.drop_column('nonce')
batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False)
op.drop_table('oauth2_key')
"""lower-case mail receive addresses
Revision ID: 042879d5e3ac
Revises: 878b25c4fae7
Create Date: 2022-02-01 20:37:32.103288
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '042879d5e3ac'
down_revision = '878b25c4fae7'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
mail_receive_address_table = sa.Table('mail_receive_address', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('mail_id', sa.Integer(), nullable=False),
sa.Column('address', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['mail_id'], ['mail.id'], name=op.f('fk_mail_receive_address_mail_id_mail'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mail_receive_address'))
)
op.execute(mail_receive_address_table.update().values(address=sa.func.lower(mail_receive_address_table.c.address)))
def downgrade():
pass
"""add expires attribute to ratelimit_event
Revision ID: 09d2edcaf0cc
Revises: af07cea65391
Create Date: 2022-02-15 14:16:19.318253
"""
from alembic import op
import sqlalchemy as sa
revision = '09d2edcaf0cc'
down_revision = 'af07cea65391'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
ratelimit_event = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
op.execute(ratelimit_event.delete())
with op.batch_alter_table('ratelimit_event', copy_from=ratelimit_event) as batch_op:
batch_op.add_column(sa.Column('expires', sa.DateTime(), nullable=False))
batch_op.alter_column('name', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.alter_column('timestamp', existing_type=sa.DATETIME(), nullable=False)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
ratelimit_event = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=False),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
op.execute(ratelimit_event.delete())
with op.batch_alter_table('ratelimit_event', schema=None) as batch_op:
batch_op.alter_column('timestamp', existing_type=sa.DATETIME(), nullable=True)
batch_op.alter_column('name', existing_type=sa.VARCHAR(length=128), nullable=True)
batch_op.drop_column('expires')
"""MySQL compat fixes
Revision ID: 11ecc8f1ac3b
Revises: bf71799b7b9e
Create Date: 2021-09-13 04:15:07.479295
"""
from alembic import op
import sqlalchemy as sa
revision = '11ecc8f1ac3b'
down_revision = 'bf71799b7b9e'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('device_login_confirmation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('initiation_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_confirmation')),
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
meta = sa.MetaData(bind=op.get_bind())
# Previously "fk_device_login_confirmation_initiation_id_" was named
# "fk_device_login_confirmation_initiation_id_device_login_initiation"
# but this was too long for MySQL.
table = sa.Table('device_login_confirmation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('initiation_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.ForeignKeyConstraint(['initiation_id'], ['device_login_initiation.id'], name='fk_device_login_confirmation_initiation_id_'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_confirmation')),
sa.UniqueConstraint('initiation_id', 'code0', name='uq_device_login_confirmation_initiation_id_code0'),
sa.UniqueConstraint('initiation_id', 'code1', name='uq_device_login_confirmation_initiation_id_code1'),
sa.UniqueConstraint('user_dn', name=op.f('uq_device_login_confirmation_user_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
# Previously "fk_invite_signup_id_signup" was named
# "fk_invite_signup_signup_id_signup" by mistake.
table = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id'], ['signup.id'], name=op.f('fk_invite_signup_id_signup')),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
def downgrade():
pass
"""Deactivate users
Revision ID: 23293f32b503
Revises: e249233e2a31
Create Date: 2022-11-10 02:06:27.766520
"""
from alembic import op
import sqlalchemy as sa
revision = '23293f32b503'
down_revision = 'e249233e2a31'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('hide_deactivated_users', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('hide_deactivated_users', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('hide_deactivated_users', server_default=None)
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_deactivated', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
user = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('is_deactivated', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['unix_uid'], ['uid_allocation.id'], name=op.f('fk_user_unix_uid_uid_allocation')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
with op.batch_alter_table('user', copy_from=user) as batch_op:
batch_op.alter_column('is_deactivated', server_default=None)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
user = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('is_deactivated', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['unix_uid'], ['uid_allocation.id'], name=op.f('fk_user_unix_uid_uid_allocation')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_column('is_deactivated')
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('hide_deactivated_users', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.drop_column('hide_deactivated_users')
"""added missing oauth2grant.code index
Revision ID: 2a6b1fb82ce6
Revises: cbca20cf64d9
Create Date: 2021-04-13 23:03:46.280189
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2a6b1fb82ce6'
down_revision = 'cbca20cf64d9'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False)
def downgrade():
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_oauth2grant_code'))
"""Remailer v2
Revision ID: 2b68f688bec1
Revises: e13b733ec856
Create Date: 2022-10-20 03:40:11.522343
"""
from alembic import op
import sqlalchemy as sa
revision = '2b68f688bec1'
down_revision = 'e13b733ec856'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False, server_default='DISABLED'))
service = sa.table('service',
sa.column('id', sa.Integer),
sa.column('use_remailer', sa.Boolean(create_constraint=True)),
sa.column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode')),
)
op.execute(service.update().values(remailer_mode='ENABLED_V1').where(service.c.use_remailer))
meta = sa.MetaData(bind=op.get_bind())
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False, server_default='DISABLED'),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('remailer_mode', server_default=None)
batch_op.drop_column('use_remailer')
def downgrade():
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('use_remailer', sa.BOOLEAN(), nullable=False, server_default=sa.false()))
service = sa.table('service',
sa.column('id', sa.Integer),
sa.column('use_remailer', sa.Boolean(create_constraint=True)),
sa.column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode')),
)
op.execute(service.update().values(use_remailer=sa.true()).where(service.c.remailer_mode != 'DISABLED'))
meta = sa.MetaData(bind=op.get_bind())
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('use_remailer', server_default=None)
batch_op.drop_column('remailer_mode')
"""Unique email addresses
Revision ID: 468995a9c9ee
Revises: 2b68f688bec1
Create Date: 2022-10-21 01:25:01.469670
"""
import unicodedata
from alembic import op
import sqlalchemy as sa
revision = '468995a9c9ee'
down_revision = '2b68f688bec1'
branch_labels = None
depends_on = None
def normalize_address(value):
return unicodedata.normalize('NFKC', value).lower().strip()
def iter_rows_paged(table, pk='id', limit=1000):
conn = op.get_bind()
pk_column = getattr(table.c, pk)
last_pk = None
while True:
expr = table.select().order_by(pk_column).limit(limit)
if last_pk is not None:
expr = expr.where(pk_column > last_pk)
result = conn.execute(expr)
pk_index = list(result.keys()).index(pk)
rows = result.fetchall()
if not rows:
break
yield from rows
last_pk = rows[-1][pk_index]
def upgrade():
with op.batch_alter_table('user_email', schema=None) as batch_op:
batch_op.add_column(sa.Column('address_normalized', sa.String(length=128), nullable=True))
batch_op.add_column(sa.Column('enable_strict_constraints', sa.Boolean(create_constraint=True), nullable=True))
batch_op.alter_column('verified', existing_type=sa.Boolean(create_constraint=True), nullable=True)
meta = sa.MetaData(bind=op.get_bind())
user_email_table = sa.Table('user_email', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('address', sa.String(length=128), nullable=False),
sa.Column('address_normalized', sa.String(length=128), nullable=True),
sa.Column('enable_strict_constraints', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verified', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verification_legacy_id', sa.Integer(), nullable=True),
sa.Column('verification_secret', sa.Text(), nullable=True),
sa.Column('verification_expires', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_email_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_email')),
sa.UniqueConstraint('user_id', 'address', name='uq_user_email_user_id_address')
)
for row in iter_rows_paged(user_email_table):
id = row[0]
address = row[2]
verified = row[5]
op.execute(user_email_table.update()\
.where(user_email_table.c.id == id)\
.values(
address_normalized=normalize_address(address),
verified=(True if verified else None)
)
)
with op.batch_alter_table('user_email', copy_from=user_email_table) as batch_op:
batch_op.alter_column('address_normalized', existing_type=sa.String(length=128), nullable=False)
batch_op.create_unique_constraint('uq_user_email_address_normalized_verified', ['address_normalized', 'verified', 'enable_strict_constraints'])
batch_op.create_unique_constraint('uq_user_email_user_id_address_normalized', ['user_id', 'address_normalized', 'enable_strict_constraints'])
op.create_table('feature_flag',
sa.Column('name', sa.String(32), nullable=False),
sa.PrimaryKeyConstraint('name', name=op.f('pk_feature_flag')),
)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
user_email_table = sa.Table('user_email', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('address', sa.String(length=128), nullable=False),
sa.Column('address_normalized', sa.String(length=128), nullable=False),
sa.Column('enable_strict_constraints', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verified', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verification_legacy_id', sa.Integer(), nullable=True),
sa.Column('verification_secret', sa.Text(), nullable=True),
sa.Column('verification_expires', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_email_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_email')),
sa.UniqueConstraint('user_id', 'address', name='uq_user_email_user_id_address'),
sa.UniqueConstraint('address_normalized', 'verified', 'enable_strict_constraints', name='uq_user_email_address_normalized_verified'),
sa.UniqueConstraint('user_id', 'address_normalized', 'enable_strict_constraints', name='uq_user_email_user_id_address_normalized')
)
op.execute(user_email_table.update().where(user_email_table.c.verified == None).values(verified=False))
with op.batch_alter_table('user_email', copy_from=user_email_table) as batch_op:
batch_op.drop_constraint('uq_user_email_user_id_address_normalized', type_='unique')
batch_op.drop_constraint('uq_user_email_address_normalized_verified', type_='unique')
batch_op.alter_column('verified', existing_type=sa.Boolean(create_constraint=True), nullable=False)
batch_op.drop_column('enable_strict_constraints')
batch_op.drop_column('address_normalized')
op.drop_table('feature_flag')
"""Unified password hashing for recovery codes
Revision ID: 4bd316207e59
Revises: e71e29cc605a
Create Date: 2024-05-22 03:13:55.917641
"""
from alembic import op
import sqlalchemy as sa
revision = '4bd316207e59'
down_revision = 'e71e29cc605a'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', name='mfatype', create_constraint=True), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('recovery_salt', sa.String(length=64), nullable=True),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('totp_last_counter', sa.Integer(), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
# This field was already unused before the change to unified password hashing. So this is unrelated cleanup.
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.drop_column('recovery_salt')
op.execute(mfa_method.update().values(recovery_hash=('{crypt}' + mfa_method.c.recovery_hash)).where(mfa_method.c.type == 'RECOVERY_CODE'))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', name='mfatype', create_constraint=True), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('totp_last_counter', sa.Integer(), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.add_column(sa.Column('recovery_salt', sa.VARCHAR(length=64), nullable=True))
op.execute(
mfa_method.delete().where(sa.and_(
mfa_method.c.type == 'RECOVERY_CODE',
sa.not_(mfa_method.c.recovery_hash.ilike('{crypt}%'))
))
)
op.execute(
mfa_method.update().values(
recovery_hash=sa.func.substr(mfa_method.c.recovery_hash, len('{crypt}') + 1)
).where(sa.and_(
mfa_method.c.type == 'RECOVERY_CODE',
mfa_method.c.recovery_hash.ilike('{crypt}%')
))
)
"""invite pk change
Revision ID: 54b2413586fd
Revises: 2a6b1fb82ce6
Create Date: 2021-04-13 23:33:40.118507
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '54b2413586fd'
down_revision = '2a6b1fb82ce6'
branch_labels = None
depends_on = None
invite = sa.sql.table('invite',
sa.sql.column('id', sa.Integer()),
sa.sql.column('token', sa.String(length=128))
)
invite_grant = sa.sql.table('invite_grant',
sa.sql.column('invite_id', sa.Integer()),
sa.sql.column('invite_token', sa.String(length=128))
)
invite_roles = sa.sql.table('invite_roles',
sa.sql.column('invite_id', sa.Integer()),
sa.sql.column('invite_token', sa.String(length=128))
)
invite_signup = sa.sql.table('invite_signup',
sa.sql.column('invite_id', sa.Integer()),
sa.sql.column('invite_token', sa.String(length=128))
)
def upgrade():
# CHECK constraints get lost when reflecting from the actual table
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('invite', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite'))
)
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.drop_constraint('fk_invite_grant_invite_token_invite', type_='foreignkey')
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.drop_constraint('fk_invite_roles_invite_token_invite', type_='foreignkey')
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.drop_constraint('fk_invite_signup_invite_token_invite', type_='foreignkey')
with op.batch_alter_table('invite', copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint(batch_op.f('pk_invite'), type_='primary')
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key(batch_op.f('pk_invite'), ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
batch_op.create_unique_constraint(batch_op.f('uq_invite_token'), ['token'])
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('invite_id', sa.Integer(), nullable=True))
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.add_column(sa.Column('invite_id', sa.Integer(), nullable=True))
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.add_column(sa.Column('invite_id', sa.Integer(), nullable=True))
op.execute(invite_grant.update().values(invite_id=sa.select([invite.c.id]).where(invite.c.token==invite_grant.c.invite_token).as_scalar()))
op.execute(invite_roles.update().values(invite_id=sa.select([invite.c.id]).where(invite.c.token==invite_roles.c.invite_token).as_scalar()))
op.execute(invite_signup.update().values(invite_id=sa.select([invite.c.id]).where(invite.c.token==invite_signup.c.invite_token).as_scalar()))
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.alter_column('invite_id', existing_type=sa.INTEGER(), nullable=False)
batch_op.create_foreign_key(batch_op.f('fk_invite_grant_invite_id_invite'), 'invite', ['invite_id'], ['id'])
batch_op.drop_column('invite_token')
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('pk_invite_roles'), type_='primary')
batch_op.create_primary_key(batch_op.f('pk_invite_roles'), ['invite_id', 'role_id'])
batch_op.create_foreign_key(batch_op.f('fk_invite_roles_invite_id_invite'), 'invite', ['invite_id'], ['id'])
batch_op.drop_column('invite_token')
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.alter_column('invite_id', existing_type=sa.INTEGER(), nullable=False)
batch_op.create_foreign_key(batch_op.f('fk_invite_signup_invite_id_invite'), 'invite', ['invite_id'], ['id'])
batch_op.drop_column('invite_token')
def downgrade():
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_signup_invite_id_invite'), type_='foreignkey')
batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True))
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_roles_invite_id_invite'), type_='foreignkey')
batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True))
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_grant_invite_id_invite'), type_='foreignkey')
batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True))
op.execute(invite_grant.update().values(invite_token=sa.select([invite.c.token]).where(invite.c.id==invite_grant.c.invite_id).as_scalar()))
op.execute(invite_roles.update().values(invite_token=sa.select([invite.c.token]).where(invite.c.id==invite_roles.c.invite_id).as_scalar()))
op.execute(invite_signup.update().values(invite_token=sa.select([invite.c.token]).where(invite.c.id==invite_signup.c.invite_id).as_scalar()))
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_column('invite_id')
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_constraint(batch_op.f('pk_invite_roles'), type_='primary')
batch_op.create_primary_key(batch_op.f('pk_invite_roles'), ['invite_token', 'role_id'])
batch_op.drop_column('invite_id')
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_column('invite_id')
# CHECK constraints get lost when reflecting from the actual table
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('invite', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite')),
sa.UniqueConstraint('token', name=op.f('uq_invite_token'))
)
with op.batch_alter_table('invite', copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint(batch_op.f('uq_invite_token'), type_='unique')
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint(batch_op.f('pk_invite'), type_='primary')
batch_op.drop_column('id')
batch_op.create_primary_key(batch_op.f('pk_invite'), ['token'])
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.create_foreign_key('fk_invite_signup_invite_token_invite', 'invite', ['invite_token'], ['token'])
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.create_foreign_key('fk_invite_roles_invite_token_invite', 'invite', ['invite_token'], ['token'])
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.create_foreign_key('fk_invite_grant_invite_token_invite', 'invite', ['invite_token'], ['token'])
"""Role inclusion
Revision ID: 5a07d4a63b64
Revises: a29870f95175
Create Date: 2021-04-05 15:00:26.205433
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5a07d4a63b64'
down_revision = 'a29870f95175'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('role-inclusion',
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('included_role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['included_role_id'], ['role.id'], ),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.PrimaryKeyConstraint('role_id', 'included_role_id')
)
def downgrade():
op.drop_table('role-inclusion')
"""invite creator and role moderator group
Revision ID: 5cab70e95bf8
Revises: 54b2413586fd
Create Date: 2021-04-14 15:46:29.910342
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5cab70e95bf8'
down_revision = '54b2413586fd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('invite', schema=None) as batch_op:
batch_op.add_column(sa.Column('creator_dn', sa.String(length=128), nullable=True))
with op.batch_alter_table('role', schema=None) as batch_op:
batch_op.add_column(sa.Column('moderator_group_dn', sa.String(length=128), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('role', schema=None) as batch_op:
batch_op.drop_column('moderator_group_dn')
with op.batch_alter_table('invite', schema=None) as batch_op:
batch_op.drop_column('creator_dn')
# ### end Alembic commands ###
"""remailer setting and api permission
Revision ID: 704d1245331c
Revises: b9d3f7dac9db
Create Date: 2022-04-19 17:32:52.304313
"""
from alembic import op
import sqlalchemy as sa
revision = '704d1245331c'
down_revision = 'b9d3f7dac9db'
branch_labels = None
depends_on = None
def upgrade():
# Retrospective fix of this migration: Originally server_default was not set,
# which caused "Cannot add a NOT NULL column with default value NULL" errors.
# This only happens with recent Alembic versions that render
# batch_op.add_column as an "ALTER TABLE" statement instead of recreating the
# table. To keep the resulting database consistent, we remove the
# server_default afterwards.
with op.batch_alter_table('api_client') as batch_op:
batch_op.add_column(sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
with op.batch_alter_table('service') as batch_op:
batch_op.add_column(sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.alter_column('perm_remailer', server_default=None)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('use_remailer', server_default=None)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.drop_column('use_remailer')
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.drop_column('perm_remailer')
"""LDAP to DB
Revision ID: 878b25c4fae7
Revises: 11ecc8f1ac3b
Create Date: 2021-08-01 16:31:09.242380
"""
from warnings import warn
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '878b25c4fae7'
down_revision = '11ecc8f1ac3b'
branch_labels = None
depends_on = None
from flask import current_app
def encode_filter(filter_params):
from ldap3.utils.conv import escape_filter_chars
return '(&%s)'%(''.join(['(%s=%s)'%(attr, escape_filter_chars(value)) for attr, value in filter_params]))
def get_ldap_conn():
if 'LDAP_SERVICE_URL' in current_app.config and not current_app.config.get('UPGRADE_V1_TO_V2'):
raise Exception('Refusing to run v1 to v2 migrations: UPGRADE_V1_TO_V2 not set. Make sure to read upgrade instructions first!')
critical = True
if 'LDAP_SERVICE_URL' not in current_app.config:
critical = False
try:
if current_app.config.get('LDAP_SERVICE_USER_BIND'):
raise Exception('Import with LDAP_SERVICE_USER_BIND=True is not supported')
if current_app.config.get('LDAP_SERVICE_MOCK'):
# never reached if current_app.testing is True
raise Exception('Import with LDAP_SERVICE_MOCK=True is not supported')
import ldap3
server = ldap3.Server(current_app.config.get('LDAP_SERVICE_URL', 'ldapi:///'), get_info=ldap3.ALL)
# Using auto_bind cannot close the connection, so define the connection with extra steps
conn = ldap3.Connection(server, current_app.config.get('LDAP_SERVICE_BIND_DN', ''),
current_app.config.get('LDAP_SERVICE_BIND_PASSWORD', ''))
if conn.closed:
conn.open(read_server_info=False)
if current_app.config.get('LDAP_SERVICE_USE_STARTTLS', True):
conn.start_tls(read_server_info=False)
if not conn.bind(read_server_info=True):
conn.unbind()
raise ldap3.core.exceptions.LDAPBindError
return conn
except Exception as e:
if critical:
raise e
else:
warn(f'LDAP not properly configured, disabling import: {e}')
return None
def get_ldap_users():
if current_app.config.get('LDAP_SERVICE_MOCK') and current_app.testing:
return [
{'dn': 'uid=testuser,ou=users,dc=example,dc=com', 'unix_uid': 10000, 'loginname': 'testuser',
'displayname': 'Test User', 'mail': 'testuser@example.com',
'pwhash': '{ssha512}llnQc2ruKczLUHJUPA3/MGA1rkChXcmYdIeMRfKC8NfsqnHTtd2UmSZ7RL4uTExzAcMyYKxLwyjmjZfycjLHBjR6NJeK1sz3',
'is_service_user': False},
{'dn': 'uid=testadmin,ou=users,dc=example,dc=com', 'unix_uid': 10001, 'loginname': 'testadmin',
'displayname': 'Test Admin', 'mail': 'testadmin@example.com',
'pwhash': '{ssha512}8pI4sHQWEgDf9u4qj35QT3J1lskLrnWdvhlzSmYg1g2R1r/038f6we+8Hy5ld/KArApB9Gd9+4uitKbZVbR1CkuPT2iAWoMc',
'is_service_user': False},
]
conn = get_ldap_conn()
if not conn:
return []
conn.search(current_app.config.get('LDAP_USER_SEARCH_BASE', 'ou=users,dc=example,dc=com'),
encode_filter(current_app.config.get('LDAP_USER_SEARCH_FILTER', [('objectClass', 'person')])),
attributes='*')
users = []
for response in conn.response:
uid = response['attributes'][current_app.config.get('LDAP_USER_UID_ATTRIBUTE', 'uidNumber')]
pwhash = response['attributes'].get('userPassword', [None])[0]
if pwhash is None:
raise Exception('Cannot read userPassword attribute')
elif isinstance(pwhash, bytes):
pwhash = pwhash.decode()
users.append({
'dn': response['dn'],
'unix_uid': uid,
'loginname': response['attributes'][current_app.config.get('LDAP_USER_LOGINNAME_ATTRIBUTE', 'uid')][0],
'displayname': response['attributes'].get(current_app.config.get('LDAP_USER_DISPLAYNAME_ATTRIBUTE', 'cn'), [''])[0],
'mail': response['attributes'][current_app.config.get('LDAP_USER_MAIL_ATTRIBUTE', 'mail')][0],
'pwhash': pwhash,
'is_service_user': uid >= current_app.config.get('LDAP_USER_SERVICE_MIN_UID', 19000) and \
uid <= current_app.config.get('LDAP_USER_SERVICE_MAX_UID', 19999),
})
return users
def get_ldap_groups():
if current_app.config.get('LDAP_SERVICE_MOCK') and current_app.testing:
return [
{'dn': 'cn=users,ou=groups,dc=example,dc=com', 'unix_gid': 20001, 'name': 'users',
'description': 'Base group for all users', 'member_dns': ['cn=dummy,ou=system,dc=example,dc=com',
'uid=testuser,ou=users,dc=example,dc=com',
'uid=testadmin,ou=users,dc=example,dc=com']},
{'dn': 'cn=uffd_access,ou=groups,dc=example,dc=com', 'unix_gid': 20002, 'name': 'uffd_access',
'description': 'User access to uffd selfservice', 'member_dns': ['cn=dummy,ou=system,dc=example,dc=com',
'uid=testuser,ou=users,dc=example,dc=com',
'uid=testadmin,ou=users,dc=example,dc=com']},
{'dn': 'cn=uffd_admin,ou=groups,dc=example,dc=com', 'unix_gid': 20003, 'name': 'uffd_admin',
'description': 'User access to uffd selfservice', 'member_dns': ['cn=dummy,ou=system,dc=example,dc=com',
'uid=testadmin,ou=users,dc=example,dc=com']},
]
conn = get_ldap_conn()
if not conn:
return []
conn.search(current_app.config.get('LDAP_GROUP_SEARCH_BASE', 'ou=groups,dc=example,dc=com'),
encode_filter(current_app.config.get('LDAP_GROUP_SEARCH_FILTER', [('objectClass','groupOfUniqueNames')])),
attributes='*')
groups = []
for response in conn.response:
groups.append({
'dn': response['dn'],
'unix_gid': response['attributes'][current_app.config.get('LDAP_GROUP_GID_ATTRIBUTE', 'gidNumber')],
'name': response['attributes'][current_app.config.get('LDAP_GROUP_NAME_ATTRIBUTE', 'cn')][0],
'description': response['attributes'].get(current_app.config.get('LDAP_GROUP_DESCRIPTION_ATTRIBUTE', 'description'), [''])[0],
'member_dns': response['attributes'].get(current_app.config.get('LDAP_GROUP_MEMBER_ATTRIBUTE', 'uniqueMember'), []),
})
return groups
def get_ldap_mails():
if current_app.config.get('LDAP_SERVICE_MOCK') and current_app.testing:
return [
{'dn': 'uid=test,ou=postfix,dc=example,dc=com', 'uid': 'test',
'receivers': ['test1@example.com', 'test2@example.com'],
'destinations': ['testuser@mail.example.com']},
]
conn = get_ldap_conn()
if not conn:
return []
conn.search(current_app.config.get('LDAP_MAIL_SEARCH_BASE', 'ou=postfix,dc=example,dc=com'),
encode_filter(current_app.config.get('LDAP_MAIL_SEARCH_FILTER', [('objectClass','postfixVirtual')])),
attributes='*')
mails = []
for response in conn.response:
mails.append({
'dn': response['dn'],
'uid': response['attributes'][current_app.config.get('LDAP_MAIL_UID_ATTRIBUTE', 'uid')][0],
'receivers': response['attributes'].get(current_app.config.get('LDAP_MAIL_RECEIVERS_ATTRIBUTE', 'mailacceptinggeneralid'), []),
'destinations': response['attributes'].get(current_app.config.get('LDAP_MAIL_DESTINATIONS_ATTRIBUTE', 'maildrop'), []),
})
return mails
def upgrade():
# Load LDAP data first, so we fail as early as possible
ldap_mails = get_ldap_mails()
ldap_users = get_ldap_users()
ldap_groups = get_ldap_groups()
meta = sa.MetaData(bind=op.get_bind())
mail_table = op.create_table('mail',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('uid', sa.String(length=32), nullable=False),
sa.Column('dn', sa.String(length=128), nullable=False), # tmp
sa.PrimaryKeyConstraint('id', name=op.f('pk_mail')),
sa.UniqueConstraint('uid', name=op.f('uq_mail_uid'))
)
mail_receive_address_table = op.create_table('mail_receive_address',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('mail_id', sa.Integer(), nullable=True),
sa.Column('mail_dn', sa.String(length=128), nullable=False), # tmp
sa.Column('address', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['mail_id'], ['mail.id'], name=op.f('fk_mail_receive_address_mail_id_mail'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mail_receive_address'))
)
mail_destination_address_table = op.create_table('mail_destination_address',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('mail_id', sa.Integer(), nullable=True),
sa.Column('mail_dn', sa.String(length=128), nullable=False), # tmp
sa.Column('address', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['mail_id'], ['mail.id'], name=op.f('fk_mail_destination_address_mail_id_mail'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mail_destination_address'))
)
op.bulk_insert(mail_table, [{'uid': mail['uid'], 'dn': mail['dn']} for mail in ldap_mails])
rows = []
for mail in ldap_mails:
rows += [{'mail_dn': mail['dn'], 'address': address} for address in mail['receivers']]
op.bulk_insert(mail_receive_address_table, rows)
op.execute(mail_receive_address_table.update().values(mail_id=sa.select([mail_table.c.id]).where(mail_receive_address_table.c.mail_dn==mail_table.c.dn).limit(1).as_scalar()))
rows = []
for mail in ldap_mails:
rows += [{'mail_dn': mail['dn'], 'address': address} for address in mail['destinations']]
op.bulk_insert(mail_destination_address_table, rows)
op.execute(mail_destination_address_table.update().values(mail_id=sa.select([mail_table.c.id]).where(mail_destination_address_table.c.mail_dn==mail_table.c.dn).limit(1).as_scalar()))
with op.batch_alter_table('mail', schema=None) as batch_op:
batch_op.drop_column('dn')
with op.batch_alter_table('mail_destination_address', copy_from=mail_destination_address_table) as batch_op:
batch_op.alter_column('mail_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('mail_dn')
with op.batch_alter_table('mail_receive_address', copy_from=mail_receive_address_table) as batch_op:
batch_op.alter_column('mail_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('mail_dn')
user_table = op.create_table('user',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=False), # tmp
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('mail', sa.String(length=128), nullable=False),
sa.Column('pwhash', sa.String(length=256), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True, name=op.f('ck_user_is_service_user')), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
op.bulk_insert(user_table, ldap_users)
group_table = op.create_table('group',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=False), # tmp
sa.Column('unix_gid', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=32), nullable=False),
sa.Column('description', sa.String(length=128), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_group')),
sa.UniqueConstraint('name', name=op.f('uq_group_name')),
sa.UniqueConstraint('unix_gid', name=op.f('uq_group_unix_gid'))
)
op.bulk_insert(group_table, [{'dn': group['dn'], 'unix_gid': group['unix_gid'], 'name': group['name'], 'description': group['description']} for group in ldap_groups])
user_groups_table = op.create_table('user_groups',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), # tmp
sa.Column('user_dn', sa.String(length=128), nullable=False), # tmp
sa.Column('user_id', sa.Integer(), nullable=True), # tmp nullable
sa.Column('group_dn', sa.String(length=128), nullable=False), # tmp
sa.Column('group_id', sa.Integer(), nullable=True), # tmp nullable
sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_user_groups_group_id_group'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_groups_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_groups')),
)
rows = []
for group in ldap_groups:
rows += [{'group_dn': group['dn'], 'user_dn': member_dn} for member_dn in group['member_dns']]
op.bulk_insert(user_groups_table, rows)
op.execute(user_groups_table.update().values(user_id=sa.select([user_table.c.id]).where(user_groups_table.c.user_dn==user_table.c.dn).as_scalar()))
op.execute(user_groups_table.update().values(group_id=sa.select([group_table.c.id]).where(user_groups_table.c.group_dn==group_table.c.dn).as_scalar()))
# Delete member objects that are not users (like the "dummy" object)
op.execute(user_groups_table.delete().where(sa.or_(user_groups_table.c.user_id==None, user_groups_table.c.group_id==None)))
with op.batch_alter_table('user_groups', copy_from=user_groups_table) as batch_op:
batch_op.alter_column('user_id', existing_type=sa.Integer(), nullable=False)
batch_op.alter_column('group_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('group_dn')
batch_op.alter_column('id', existing_type=sa.Integer(), nullable=True, autoincrement=False)
batch_op.drop_constraint('pk_user_groups', 'primary')
batch_op.create_primary_key('pk_user_groups', ['user_id', 'group_id'])
batch_op.drop_column('id')
batch_op.drop_column('user_dn')
with op.batch_alter_table('role-group', schema=None) as batch_op:
batch_op.add_column(sa.Column('group_id', sa.Integer(), nullable=True))
role_groups_table = sa.Table('role-group', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('group_dn', sa.String(length=128), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.Column('requires_mfa', sa.Boolean(create_constraint=False), nullable=False),
sa.CheckConstraint('requires_mfa in (0,1)', name=op.f('ck_role-group_requires_mfa')),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('role_id', 'group_dn', name=op.f('pk_role-group'))
)
op.execute(role_groups_table.update().values(group_id=sa.select([group_table.c.id]).where(role_groups_table.c.group_dn==group_table.c.dn).as_scalar()))
op.execute(role_groups_table.delete().where(role_groups_table.c.group_id==None))
with op.batch_alter_table('role-group', copy_from=role_groups_table) as batch_op:
batch_op.drop_constraint('ck_role-group_requires_mfa', 'check')
batch_op.create_check_constraint('ck_role_groups_requires_mfa', role_groups_table.c.requires_mfa.in_([0,1]))
batch_op.drop_constraint('fk_role-group_role_id_role', 'foreignkey')
batch_op.drop_constraint('pk_role-group', 'primary')
batch_op.create_primary_key('pk_role_groups', ['role_id', 'group_id'])
batch_op.create_foreign_key(batch_op.f('fk_role_groups_role_id_role'), 'role', ['role_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.create_foreign_key(batch_op.f('fk_role_groups_group_id_group'), 'group', ['group_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_column('group_dn')
op.rename_table('role-group', 'role_groups')
with op.batch_alter_table('role-user', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
role_members_table = sa.Table('role-user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-user_role_id_role')),
sa.UniqueConstraint('dn', 'role_id', name='uq_role-user_dn'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-user'))
)
op.execute(role_members_table.update().values(user_id=sa.select([user_table.c.id]).where(role_members_table.c.dn==user_table.c.dn).as_scalar()))
op.execute(role_members_table.delete().where(role_members_table.c.user_id==None))
with op.batch_alter_table('role-user', copy_from=role_members_table) as batch_op:
batch_op.alter_column('user_id', existing_type=sa.Integer(), nullable=False)
batch_op.alter_column('role_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_constraint('fk_role-user_role_id_role', 'foreignkey')
batch_op.drop_constraint('uq_role-user_dn', 'unique')
batch_op.create_foreign_key(batch_op.f('fk_role_members_role_id_role'), 'role', ['role_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.create_foreign_key(batch_op.f('fk_role_members_user_id_user'), 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_column('dn')
batch_op.alter_column('id', existing_type=sa.Integer(), nullable=True, autoincrement=False)
batch_op.drop_constraint('pk_role-user', 'primary')
batch_op.create_primary_key('pk_role_members', ['role_id', 'user_id'])
batch_op.drop_column('id')
op.rename_table('role-user', 'role_members')
with op.batch_alter_table('device_login_confirmation', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
batch_op.create_unique_constraint(batch_op.f('uq_device_login_confirmation_user_id'), ['user_id'])
device_login_confirmation = sa.Table('device_login_confirmation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('initiation_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.ForeignKeyConstraint(['initiation_id'], ['device_login_initiation.id'], name=op.f('fk_device_login_confirmation_initiation_id_')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_confirmation')),
sa.UniqueConstraint('initiation_id', 'code0', name='uq_device_login_confirmation_initiation_id_code0'),
sa.UniqueConstraint('initiation_id', 'code1', name='uq_device_login_confirmation_initiation_id_code1'),
sa.UniqueConstraint('user_id', name=op.f('uq_device_login_confirmation_user_id'))
)
op.execute(device_login_confirmation.update().values(user_id=sa.select([user_table.c.id]).where(device_login_confirmation.c.user_dn==user_table.c.dn).as_scalar()))
op.execute(device_login_confirmation.delete().where(device_login_confirmation.c.user_id==None))
with op.batch_alter_table('device_login_confirmation', copy_from=device_login_confirmation) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_device_login_confirmation_user_id_user'), 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_constraint('fk_device_login_confirmation_initiation_id_', type_='foreignkey')
batch_op.create_foreign_key('fk_device_login_confirmation_initiation_id_', 'device_login_initiation', ['initiation_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.drop_column('user_dn')
with op.batch_alter_table('invite', schema=None) as batch_op:
batch_op.add_column(sa.Column('creator_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_invite_creator_id_user'), 'user', ['creator_id'], ['id'], onupdate='CASCADE')
invite = sa.Table('invite', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('creator_id', sa.Integer(), nullable=True),
sa.Column('creator_dn', sa.String(length=128), nullable=True),
sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.ForeignKeyConstraint(['creator_id'], ['user.id'], name=op.f('fk_invite_creator_id_user'), onupdate='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite')),
sa.UniqueConstraint('token', name=op.f('uq_invite_token'))
)
op.execute(invite.update().values(creator_id=sa.select([user_table.c.id]).where(invite.c.creator_dn==user_table.c.dn).as_scalar()))
with op.batch_alter_table('invite', copy_from=invite) as batch_op:
batch_op.drop_column('creator_dn')
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
invite_grant = sa.Table('invite_grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_grant_invite_id_invite')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_grant'))
)
op.execute(invite_grant.update().values(user_id=sa.select([user_table.c.id]).where(invite_grant.c.user_dn==user_table.c.dn).as_scalar()))
op.execute(invite_grant.delete().where(invite_grant.c.user_id==None))
with op.batch_alter_table('invite_grant', copy_from=invite_grant) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_invite_grant_user_id_user'), 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_constraint('fk_invite_grant_invite_id_invite', type_='foreignkey')
batch_op.create_foreign_key(batch_op.f('fk_invite_grant_invite_id_invite'), 'invite', ['invite_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.drop_column('user_dn')
with op.batch_alter_table('mfa_method', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_mfa_method_user_id_user'), 'user', ['user_id'], ['id'])
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', create_constraint=True, name='ck_mfa_method_type'), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('dn', sa.String(length=128), nullable=False),
sa.Column('recovery_salt', sa.String(length=64), nullable=True),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
op.execute(mfa_method.update().values(user_id=sa.select([user_table.c.id]).where(mfa_method.c.dn==user_table.c.dn).as_scalar()))
op.execute(mfa_method.delete().where(mfa_method.c.user_id==None))
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.alter_column('created', existing_type=sa.DateTime(), nullable=False)
batch_op.alter_column('type', existing_type=sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', create_constraint=True, name='ck_mfa_method_type'), nullable=False)
batch_op.drop_constraint('fk_mfa_method_user_id_user', type_='foreignkey')
batch_op.create_foreign_key(batch_op.f('fk_mfa_method_user_id_user'), 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_column('dn')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant')),
sa.Index('ix_oauth2grant_code', 'code')
)
op.execute(oauth2grant.update().values(user_id=sa.select([user_table.c.id]).where(oauth2grant.c.user_dn==user_table.c.dn).as_scalar()))
op.execute(oauth2grant.delete().where(oauth2grant.c.user_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_oauth2grant_user_id_user'), 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.alter_column('_scopes', nullable=False, existing_type=sa.Text())
batch_op.alter_column('client_id', nullable=False, existing_type=sa.String(length=40))
batch_op.alter_column('expires', nullable=False, existing_type=sa.DateTime())
batch_op.alter_column('redirect_uri', nullable=False, existing_type=sa.String(length=255))
batch_op.drop_column('user_dn')
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('token_type', sa.String(length=40), nullable=True),
sa.Column('access_token', sa.String(length=255), nullable=True),
sa.Column('refresh_token', sa.String(length=255), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token.update().values(user_id=sa.select([user_table.c.id]).where(oauth2token.c.user_dn==user_table.c.dn).as_scalar()))
op.execute(oauth2token.delete().where(oauth2token.c.user_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_oauth2token_user_id_user'), 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.alter_column('_scopes', nullable=False, existing_type=sa.Text())
batch_op.alter_column('access_token', nullable=False, existing_type=sa.String(length=255))
batch_op.alter_column('client_id', nullable=False, existing_type=sa.String(length=40))
batch_op.alter_column('expires', nullable=False, existing_type=sa.DateTime())
batch_op.alter_column('refresh_token', nullable=False, existing_type=sa.String(length=255))
batch_op.alter_column('token_type', nullable=False, existing_type=sa.String(length=40))
batch_op.drop_column('user_dn')
with op.batch_alter_table('role', schema=None) as batch_op:
batch_op.add_column(sa.Column('moderator_group_id', sa.Integer(), nullable=True))
role = sa.Table('role', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=32), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('moderator_group_id', sa.Integer(), nullable=True),
sa.Column('moderator_group_dn', sa.String(length=128), nullable=True),
sa.Column('locked', sa.Boolean(create_constraint=True, name=op.f('ck_role_locked')), nullable=False),
sa.Column('is_default', sa.Boolean(create_constraint=True, name=op.f('ck_role_is_default')), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role')),
sa.UniqueConstraint('name', name=op.f('uq_role_name'))
)
op.execute(role.update().values(moderator_group_id=sa.select([group_table.c.id]).where(role.c.moderator_group_dn==group_table.c.dn).as_scalar()))
with op.batch_alter_table('role', copy_from=role) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_role_moderator_group_id_group'), 'group', ['moderator_group_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL')
batch_op.alter_column('description', existing_type=sa.Text(), nullable=False)
batch_op.alter_column('name', existing_type=sa.String(length=32), nullable=False)
batch_op.drop_column('moderator_group_dn')
with op.batch_alter_table('signup', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
batch_op.create_unique_constraint(batch_op.f('uq_signup_user_id'), ['user_id'])
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup')),
sa.UniqueConstraint('user_id', name=op.f('uq_signup_user_id'))
)
op.execute(signup.update().values(user_id=sa.select([user_table.c.id]).where(signup.c.user_dn==user_table.c.dn).as_scalar()))
with op.batch_alter_table('signup', copy_from=signup) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_signup_user_id_user'), 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_column('user_dn')
with op.batch_alter_table('mailToken', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
mail_token = sa.Table('mailToken', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.Column('newmail', sa.String(length=255), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mailToken'))
)
op.execute(mail_token.update().values(user_id=sa.select([user_table.c.id]).where(mail_token.c.loginname==user_table.c.loginname).as_scalar()))
op.execute(mail_token.delete().where(mail_token.c.user_id==None))
with op.batch_alter_table('mailToken', copy_from=mail_token) as batch_op:
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.create_foreign_key(batch_op.f('fk_mailToken_user_id_user'), 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_column('loginname')
with op.batch_alter_table('passwordToken', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.Integer(), nullable=True))
password_token = sa.Table('passwordToken', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_passwordToken'))
)
op.execute(password_token.update().values(user_id=sa.select([user_table.c.id]).where(password_token.c.loginname==user_table.c.loginname).as_scalar()))
op.execute(password_token.delete().where(password_token.c.user_id==None))
with op.batch_alter_table('passwordToken', copy_from=password_token) as batch_op:
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.alter_column('created', existing_type=sa.DateTime(), nullable=False)
batch_op.create_foreign_key(batch_op.f('fk_passwordToken_user_id_user'), 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_column('loginname')
with op.batch_alter_table('group', copy_from=group_table) as batch_op:
batch_op.drop_column('dn')
with op.batch_alter_table('user', copy_from=user_table) as batch_op:
batch_op.drop_column('dn')
# These changes have nothing todo with the LDAP-to-DB migration, but since we add onupdate/ondelete clauses everywhere else ...
invite_roles = sa.Table('invite_roles', meta,
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_roles_invite_id_invite')),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_invite_roles_role_id_role')),
sa.PrimaryKeyConstraint('invite_id', 'role_id', name=op.f('pk_invite_roles'))
)
with op.batch_alter_table('invite_roles', copy_from=invite_roles) as batch_op:
batch_op.drop_constraint('fk_invite_roles_role_id_role', type_='foreignkey')
batch_op.drop_constraint('fk_invite_roles_invite_id_invite', type_='foreignkey')
batch_op.create_foreign_key(batch_op.f('fk_invite_roles_role_id_role'), 'role', ['role_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.create_foreign_key(batch_op.f('fk_invite_roles_invite_id_invite'), 'invite', ['invite_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
invite_signup = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id'], ['signup.id'], name=op.f('fk_invite_signup_id_signup'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table('invite_signup', copy_from=invite_signup) as batch_op:
batch_op.drop_constraint('fk_invite_signup_id_signup', type_='foreignkey')
batch_op.drop_constraint('fk_invite_signup_invite_id_invite', type_='foreignkey')
batch_op.create_foreign_key(batch_op.f('fk_invite_signup_id_signup'), 'signup', ['id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.create_foreign_key(batch_op.f('fk_invite_signup_invite_id_invite'), 'invite', ['invite_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
role_inclusion = sa.Table('role-inclusion', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('included_role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['included_role_id'], ['role.id'], name=op.f('fk_role-inclusion_included_role_id_role'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-inclusion_role_id_role'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('role_id', 'included_role_id', name=op.f('pk_role-inclusion'))
)
with op.batch_alter_table('role-inclusion', copy_from=role_inclusion) as batch_op:
batch_op.drop_constraint('fk_role-inclusion_role_id_role', type_='foreignkey')
batch_op.drop_constraint('fk_role-inclusion_included_role_id_role', type_='foreignkey')
batch_op.create_foreign_key(batch_op.f('fk_role-inclusion_role_id_role'), 'role', ['role_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.create_foreign_key(batch_op.f('fk_role-inclusion_included_role_id_role'), 'role', ['included_role_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
def downgrade():
# The downgrade is incomplete as it does not sync changes back to LDAP. The
# code is only here to keep check_migrations.py working.
if not current_app.testing:
raise Exception('Downgrade is not supported')
# Load LDAP data first, so we fail as early as possible
ldap_users = get_ldap_users()
ldap_groups = get_ldap_groups()
meta = sa.MetaData(bind=op.get_bind())
# These changes have nothing todo with the LDAP to DB migration
role_inclusion = sa.Table('role-inclusion', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('included_role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['included_role_id'], ['role.id'], name=op.f('fk_role-inclusion_included_role_id_role'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-inclusion_role_id_role'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('role_id', 'included_role_id', name=op.f('pk_role-inclusion'))
)
with op.batch_alter_table('role-inclusion', copy_from=role_inclusion) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_role-inclusion_included_role_id_role'), type_='foreignkey')
batch_op.drop_constraint(batch_op.f('fk_role-inclusion_role_id_role'), type_='foreignkey')
batch_op.create_foreign_key('fk_role-inclusion_included_role_id_role', 'role', ['included_role_id'], ['id'])
batch_op.create_foreign_key('fk_role-inclusion_role_id_role', 'role', ['role_id'], ['id'])
invite_signup = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id'], ['signup.id'], name=op.f('fk_invite_signup_id_signup'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table('invite_signup', copy_from=invite_signup) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_signup_invite_id_invite'), type_='foreignkey')
batch_op.drop_constraint(batch_op.f('fk_invite_signup_id_signup'), type_='foreignkey')
batch_op.create_foreign_key('fk_invite_signup_invite_id_invite', 'invite', ['invite_id'], ['id'])
batch_op.create_foreign_key('fk_invite_signup_id_signup', 'signup', ['id'], ['id'])
invite_roles = sa.Table('invite_roles', meta,
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_roles_invite_id_invite'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_invite_roles_role_id_role'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('invite_id', 'role_id', name=op.f('pk_invite_roles'))
)
with op.batch_alter_table('invite_roles', copy_from=invite_roles) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_roles_invite_id_invite'), type_='foreignkey')
batch_op.drop_constraint(batch_op.f('fk_invite_roles_role_id_role'), type_='foreignkey')
batch_op.create_foreign_key('fk_invite_roles_invite_id_invite', 'invite', ['invite_id'], ['id'])
batch_op.create_foreign_key('fk_invite_roles_role_id_role', 'role', ['role_id'], ['id'])
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('dn', sa.String(length=128), nullable=True)) # temporarily nullable
user_table = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('mail', sa.String(length=128), nullable=False),
sa.Column('pwhash', sa.String(length=256), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True, name=op.f('ck_user_is_service_user')), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
user_dn_map_table = op.create_table('user_dn_map', # deleted later
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_dn_map')),
sa.UniqueConstraint('dn', name=op.f('uq_user_dn_map_dn')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_dn_map_loginname'))
)
rows = [{'dn': user['dn'], 'loginname': user['loginname']} for user in ldap_users]
op.bulk_insert(user_dn_map_table, rows)
op.execute(user_table.update().values(dn=sa.select([user_dn_map_table.c.dn]).where(user_table.c.loginname==user_dn_map_table.c.loginname).as_scalar()))
with op.batch_alter_table('user', copy_from=user_table) as batch_op:
pass # Recreate table with dn not nullable
op.drop_table('user_dn_map')
with op.batch_alter_table('group', schema=None) as batch_op:
batch_op.add_column(sa.Column('dn', sa.String(length=128), nullable=True)) # temporarily nullable
group_table = sa.Table('group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=False),
sa.Column('unix_gid', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=32), nullable=False),
sa.Column('description', sa.String(length=128), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_group')),
sa.UniqueConstraint('name', name=op.f('uq_group_name')),
sa.UniqueConstraint('unix_gid', name=op.f('uq_group_unix_gid'))
)
group_dn_map_table = op.create_table('group_dn_map', # deleted later
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=False),
sa.Column('name', sa.String(length=32), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_group_dn_map')),
sa.UniqueConstraint('dn', name=op.f('uq_group_dn_map_dn')),
sa.UniqueConstraint('name', name=op.f('uq_group_dn_map_name'))
)
rows = [{'dn': group['dn'], 'name': group['name']} for group in ldap_groups]
op.bulk_insert(group_dn_map_table, rows)
op.execute(group_table.update().values(dn=sa.select([group_dn_map_table.c.dn]).where(group_table.c.name==group_dn_map_table.c.name).as_scalar()))
with op.batch_alter_table('group', copy_from=group_table) as batch_op:
pass # Recreate table with dn not nullable
op.drop_table('group_dn_map')
with op.batch_alter_table('passwordToken', schema=None) as batch_op:
batch_op.add_column(sa.Column('loginname', sa.String(length=32), nullable=True))
password_token = sa.Table('passwordToken', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_passwordToken_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_passwordToken'))
)
op.execute(password_token.update().values(loginname=sa.select([user_table.c.loginname]).where(password_token.c.user_id==user_table.c.id).as_scalar()))
with op.batch_alter_table('passwordToken', copy_from=password_token) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_passwordToken_user_id_user'), type_='foreignkey')
batch_op.alter_column('created', existing_type=sa.DateTime(), nullable=True)
batch_op.drop_column('user_id')
with op.batch_alter_table('mailToken', schema=None) as batch_op:
batch_op.add_column(sa.Column('loginname', sa.String(length=32), nullable=True))
mail_token = sa.Table('mailToken', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('newmail', sa.String(length=255), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mailToken_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mailToken'))
)
op.execute(mail_token.update().values(loginname=sa.select([user_table.c.loginname]).where(mail_token.c.user_id==user_table.c.id).as_scalar()))
with op.batch_alter_table('mailToken', copy_from=mail_token) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_mailToken_user_id_user'), type_='foreignkey')
batch_op.drop_column('user_id')
with op.batch_alter_table('signup', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_dn', sa.String(length=128), nullable=True))
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_signup_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup')),
sa.UniqueConstraint('user_id', name=op.f('uq_signup_user_id'))
)
op.execute(signup.update().values(user_dn=sa.select([user_table.c.dn]).where(signup.c.user_id==user_table.c.id).as_scalar()))
with op.batch_alter_table('signup', copy_from=signup) as batch_op:
batch_op.drop_constraint('fk_signup_user_id_user', 'foreignkey')
batch_op.drop_constraint('uq_signup_user_id', 'unique')
batch_op.drop_column('user_id')
with op.batch_alter_table('role', schema=None) as batch_op:
batch_op.add_column(sa.Column('moderator_group_dn', sa.String(length=128), nullable=True))
role = sa.Table('role', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=32), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('moderator_group_id', sa.Integer(), nullable=True),
sa.Column('moderator_group_dn', sa.String(length=128), nullable=True),
sa.Column('locked', sa.Boolean(create_constraint=True, name=op.f('ck_role_locked')), nullable=False),
sa.Column('is_default', sa.Boolean(create_constraint=True, name=op.f('ck_role_is_default')), nullable=False),
sa.ForeignKeyConstraint(['moderator_group_id'], ['group.id'], name=op.f('fk_role_moderator_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role')),
sa.UniqueConstraint('name', name=op.f('uq_role_name'))
)
op.execute(role.update().values(moderator_group_dn=sa.select([group_table.c.dn]).where(role.c.moderator_group_id==group_table.c.id).as_scalar()))
with op.batch_alter_table('role', copy_from=role) as batch_op:
batch_op.alter_column('description', existing_type=sa.Text(), nullable=True)
batch_op.alter_column('name', existing_type=sa.String(length=32), nullable=True)
batch_op.drop_constraint('fk_role_moderator_group_id_group', 'foreignkey')
batch_op.drop_column('moderator_group_id')
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_dn', sa.String(length=128), nullable=True))
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token.update().values(user_dn=sa.select([user_table.c.dn]).where(oauth2token.c.user_id==user_table.c.id).as_scalar()))
op.execute(oauth2token.delete().where(oauth2token.c.user_dn==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.alter_column('_scopes', nullable=True, existing_type=sa.Text())
batch_op.alter_column('access_token', nullable=True, existing_type=sa.String(length=255))
batch_op.alter_column('client_id', nullable=True, existing_type=sa.String(length=40))
batch_op.alter_column('expires', nullable=True, existing_type=sa.DateTime())
batch_op.alter_column('refresh_token', nullable=True, existing_type=sa.String(length=255))
batch_op.alter_column('token_type', nullable=True, existing_type=sa.String(length=40))
batch_op.drop_constraint('fk_oauth2token_user_id_user', 'foreignkey')
batch_op.drop_column('user_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_dn', sa.String(length=128), nullable=True))
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant')),
sa.Index('ix_oauth2grant_code', 'code')
)
op.execute(oauth2grant.update().values(user_dn=sa.select([user_table.c.dn]).where(oauth2grant.c.user_id==user_table.c.id).as_scalar()))
op.execute(oauth2grant.delete().where(oauth2grant.c.user_dn==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.alter_column('_scopes', nullable=True, existing_type=sa.Text())
batch_op.alter_column('client_id', nullable=True, existing_type=sa.String(length=40))
batch_op.alter_column('expires', nullable=True, existing_type=sa.DateTime())
batch_op.alter_column('redirect_uri', nullable=True, existing_type=sa.String(length=255))
batch_op.drop_constraint('fk_oauth2grant_user_id_user', 'foreignkey')
batch_op.drop_column('user_id')
with op.batch_alter_table('mfa_method', schema=None) as batch_op:
batch_op.add_column(sa.Column('dn', sa.String(length=128), nullable=True))
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', create_constraint=True, name='ck_mfa_method_type'), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('recovery_salt', sa.String(length=64), nullable=True),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
op.execute(mfa_method.update().values(dn=sa.select([user_table.c.dn]).where(mfa_method.c.user_id==user_table.c.id).as_scalar()))
op.execute(mfa_method.delete().where(mfa_method.c.dn==None))
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.drop_constraint('fk_mfa_method_user_id_user', 'foreignkey')
batch_op.alter_column('type', existing_type=sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', create_constraint=True, name='ck_mfa_method_type'), nullable=True)
batch_op.alter_column('created', existing_type=sa.DateTime(), nullable=True)
batch_op.drop_column('user_id')
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_dn', sa.String(length=128), nullable=True))
invite_grant = sa.Table('invite_grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_grant_invite_id_invite'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_invite_grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_grant'))
)
op.execute(invite_grant.update().values(user_dn=sa.select([user_table.c.dn]).where(invite_grant.c.user_id==user_table.c.id).as_scalar()))
op.execute(invite_grant.delete().where(invite_grant.c.user_dn==None))
with op.batch_alter_table('invite_grant', copy_from=invite_grant) as batch_op:
batch_op.drop_constraint('fk_invite_grant_user_id_user', 'foreignkey')
batch_op.drop_constraint(batch_op.f('fk_invite_grant_invite_id_invite'), type_='foreignkey')
batch_op.create_foreign_key('fk_invite_grant_invite_id_invite', 'invite', ['invite_id'], ['id'])
batch_op.drop_column('user_id')
with op.batch_alter_table('invite', schema=None) as batch_op:
batch_op.add_column(sa.Column('creator_dn', sa.String(length=128), nullable=True))
invite = sa.Table('invite', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('creator_id', sa.Integer(), nullable=True),
sa.Column('creator_dn', sa.String(length=128), nullable=True),
sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.ForeignKeyConstraint(['creator_id'], ['user.id'], name=op.f('fk_invite_creator_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite')),
sa.UniqueConstraint('token', name=op.f('uq_invite_token'))
)
op.execute(invite.update().values(creator_dn=sa.select([user_table.c.dn]).where(invite.c.creator_id==user_table.c.id).as_scalar()))
with op.batch_alter_table('invite', copy_from=invite) as batch_op:
batch_op.drop_constraint('fk_invite_creator_id_user', 'foreignkey')
batch_op.drop_column('creator_id')
with op.batch_alter_table('device_login_confirmation', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_dn', sa.String(length=128), nullable=True))
device_login_confirmation = sa.Table('device_login_confirmation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('initiation_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.ForeignKeyConstraint(['initiation_id'], ['device_login_initiation.id'], name=op.f('fk_device_login_confirmation_initiation_id_')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_device_login_confirmation_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_confirmation')),
sa.UniqueConstraint('initiation_id', 'code0', name='uq_device_login_confirmation_initiation_id_code0'),
sa.UniqueConstraint('initiation_id', 'code1', name='uq_device_login_confirmation_initiation_id_code1'),
sa.UniqueConstraint('user_dn', name=op.f('uq_device_login_confirmation_user_dn'))
)
op.execute(device_login_confirmation.update().values(user_dn=sa.select([user_table.c.dn]).where(device_login_confirmation.c.user_id==user_table.c.id).as_scalar()))
op.execute(device_login_confirmation.delete().where(device_login_confirmation.c.user_dn==None))
with op.batch_alter_table('device_login_confirmation', copy_from=device_login_confirmation) as batch_op:
batch_op.drop_constraint('fk_device_login_confirmation_user_id_user', 'foreignkey')
batch_op.drop_constraint('fk_device_login_confirmation_initiation_id_', type_='foreignkey')
batch_op.create_foreign_key('fk_device_login_confirmation_initiation_id_', 'device_login_initiation', ['initiation_id'], ['id'])
batch_op.drop_column('user_id')
with op.batch_alter_table('role_members', schema=None) as batch_op:
batch_op.add_column(sa.Column('dn', sa.String(length=128), nullable=True))
op.rename_table('role_members', 'role-user')
role_members_table = sa.Table('role-user', meta,
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_role_members_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role_members_role_id_role'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('role_id', 'user_id', name=op.f('pk_role_members'))
)
op.execute(role_members_table.update().values(dn=sa.select([user_table.c.dn]).where(role_members_table.c.user_id==user_table.c.id).as_scalar()))
op.execute(role_members_table.delete().where(role_members_table.c.dn==None))
with op.batch_alter_table('role-user', copy_from=role_members_table, recreate='always') as batch_op:
batch_op.drop_constraint('fk_role_members_role_id_role', 'foreignkey')
batch_op.create_foreign_key(batch_op.f('fk_role-user_role_id_role'), 'role', ['role_id'], ['id'])
batch_op.alter_column('dn', nullable=False, existing_type=sa.String(length=128))
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.drop_constraint('fk_role_members_user_id_user', 'foreignkey')
batch_op.drop_constraint('pk_role_members', 'primary')
batch_op.create_primary_key('pk_role-user', ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
batch_op.create_unique_constraint(batch_op.f('uq_role-user_dn'), ['dn', 'role_id'])
batch_op.alter_column('role_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('user_id')
with op.batch_alter_table('role_groups', schema=None) as batch_op:
batch_op.add_column(sa.Column('group_dn', sa.String(length=128), nullable=True))
op.rename_table('role_groups', 'role-group')
role_groups_table = sa.Table('role-group', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('group_dn', sa.String(length=128), nullable=True),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.Column('requires_mfa', sa.Boolean(create_constraint=False), nullable=False),
sa.CheckConstraint('requires_mfa in (0,1)', name=op.f('ck_role_groups_requires_mfa')),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_role_groups_group_id_group'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role_groups_role_id_role'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('role_id', 'group_id', name=op.f('pk_role_groups'))
)
op.execute(role_groups_table.update().values(group_dn=sa.select([group_table.c.dn]).where(role_groups_table.c.group_id==group_table.c.id).as_scalar()))
op.execute(role_groups_table.delete().where(role_groups_table.c.group_dn==None))
with op.batch_alter_table('role-group', copy_from=role_groups_table) as batch_op:
batch_op.drop_constraint('fk_role_groups_group_id_group', 'foreignkey')
batch_op.drop_constraint('fk_role_groups_role_id_role', 'foreignkey')
batch_op.drop_constraint('ck_role_groups_requires_mfa', 'check')
batch_op.create_check_constraint('ck_role-group_requires_mfa', role_groups_table.c.requires_mfa.in_([0,1]))
batch_op.alter_column('group_dn', nullable=False, existing_type=sa.String(length=128))
batch_op.drop_constraint('pk_role_groups', 'primary')
batch_op.create_primary_key('pk_role-group', ['role_id', 'group_dn'])
batch_op.create_foreign_key(batch_op.f('fk_role-group_role_id_role'), 'role', ['role_id'], ['id'])
batch_op.drop_column('group_id')
op.drop_table('mail_receive_address')
op.drop_table('mail_destination_address')
op.drop_table('mail')
op.drop_table('user_groups')
op.drop_table('user')
op.drop_table('group')