Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • uffd/uffd
  • rixx/uffd
  • thies/uffd
  • leona/uffd
  • enbewe/uffd
  • strifel/uffd
  • thies/uffd-2
7 results
Show changes
Showing
with 959 additions and 267 deletions
from .views import bp as _bp
bp = [_bp]
{% extends 'base.html' %}
{% block body %}
<p>When you proceed, all recovery codes, registered authenticator applications and devices will be invalidated.
You can later generate new recovery codes and setup your applications and devices again.</p>
<form class="form" action="{{ url_for('mfa.disable_confirm') }}" method="POST">
<button type="submit" class="btn btn-danger btn-block">Disable two-factor authentication</button>
</form>
{% endblock %}
from warnings import warn
import urllib.parse
from flask import Blueprint, render_template, session, request, redirect, url_for, flash, current_app, abort
from uffd.database import db
from uffd.mfa.models import MFAMethod, TOTPMethod, WebauthnMethod, RecoveryCodeMethod
from uffd.session.views import get_current_user, login_required, pre_mfa_login_required
from uffd.user.models import User
from uffd.csrf import csrf_protect
from uffd.ratelimit import Ratelimit, format_delay
bp = Blueprint('mfa', __name__, template_folder='templates', url_prefix='/mfa/')
mfa_ratelimit = Ratelimit('mfa', 1*60, 3)
@bp.route('/', methods=['GET'])
@login_required()
def setup():
user = get_current_user()
recovery_methods = RecoveryCodeMethod.query.filter_by(dn=user.dn).all()
totp_methods = TOTPMethod.query.filter_by(dn=user.dn).all()
webauthn_methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
return render_template('setup.html', totp_methods=totp_methods, webauthn_methods=webauthn_methods, recovery_methods=recovery_methods)
@bp.route('/setup/disable', methods=['GET'])
@login_required()
def disable():
return render_template('disable.html')
@bp.route('/setup/disable', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def disable_confirm():
user = get_current_user()
MFAMethod.query.filter_by(dn=user.dn).delete()
db.session.commit()
return redirect(url_for('mfa.setup'))
@bp.route('/admin/<int:uid>/disable')
@login_required()
@csrf_protect(blueprint=bp)
def admin_disable(uid):
# Group cannot be checked with login_required kwarg, because the config
# variable is not available when the decorator is processed
if not get_current_user().is_in_group(current_app.config['ACL_ADMIN_GROUP']):
flash('Access denied')
return redirect(url_for('index'))
user = User.query.filter_by(uid=uid).one()
MFAMethod.query.filter_by(dn=user.dn).delete()
db.session.commit()
flash('Two-factor authentication was reset')
return redirect(url_for('user.show', uid=uid))
@bp.route('/setup/recovery', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_recovery():
user = get_current_user()
for method in RecoveryCodeMethod.query.filter_by(dn=user.dn).all():
db.session.delete(method)
methods = []
for _ in range(10):
method = RecoveryCodeMethod(user)
methods.append(method)
db.session.add(method)
db.session.commit()
return render_template('setup_recovery.html', methods=methods)
@bp.route('/setup/totp', methods=['GET'])
@login_required()
def setup_totp():
user = get_current_user()
method = TOTPMethod(user)
session['mfa_totp_key'] = method.key
return render_template('setup_totp.html', method=method, name=request.values['name'])
@bp.route('/setup/totp', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_totp_finish():
user = get_current_user()
if not RecoveryCodeMethod.query.filter_by(dn=user.dn).all():
flash('Generate recovery codes first!')
return redirect(url_for('mfa.setup'))
method = TOTPMethod(user, name=request.values['name'], key=session.pop('mfa_totp_key'))
if method.verify(request.form['code']):
db.session.add(method)
db.session.commit()
return redirect(url_for('mfa.setup'))
flash('Code is invalid')
return redirect(url_for('mfa.setup_totp', name=request.values['name']))
@bp.route('/setup/totp/<int:id>/delete')
@login_required()
@csrf_protect(blueprint=bp)
def delete_totp(id): #pylint: disable=redefined-builtin
user = get_current_user()
method = TOTPMethod.query.filter_by(dn=user.dn, id=id).first_or_404()
db.session.delete(method)
db.session.commit()
return redirect(url_for('mfa.setup'))
# WebAuthn support is optional because fido2 has a pretty unstable
# interface (v0.5.0 on buster and current version are completely
# incompatible) and might be difficult to install with the correct version
try:
from fido2.client import ClientData
from fido2.server import Fido2Server, RelyingParty
from fido2.ctap2 import AttestationObject, AuthenticatorData
from fido2 import cbor
WEBAUTHN_SUPPORTED = True
except ImportError as err:
warn('2FA WebAuthn support disabled because import of the fido2 module failed (%s)'%err)
WEBAUTHN_SUPPORTED = False
bp.add_app_template_global(WEBAUTHN_SUPPORTED, name='webauthn_supported')
if WEBAUTHN_SUPPORTED:
def get_webauthn_server():
return Fido2Server(RelyingParty(current_app.config.get('MFA_RP_ID', urllib.parse.urlsplit(request.url).hostname), current_app.config['MFA_RP_NAME']))
@bp.route('/setup/webauthn/begin', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_webauthn_begin():
user = get_current_user()
if not RecoveryCodeMethod.query.filter_by(dn=user.dn).all():
abort(403)
methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
creds = [method.cred for method in methods]
server = get_webauthn_server()
registration_data, state = server.register_begin(
{
"id": user.dn.encode(),
"name": user.loginname,
"displayName": user.displayname,
},
creds,
user_verification='discouraged',
)
session["webauthn-state"] = state
return cbor.dumps(registration_data)
@bp.route('/setup/webauthn/complete', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_webauthn_complete():
user = get_current_user()
server = get_webauthn_server()
data = cbor.loads(request.get_data())[0]
client_data = ClientData(data["clientDataJSON"])
att_obj = AttestationObject(data["attestationObject"])
auth_data = server.register_complete(session["webauthn-state"], client_data, att_obj)
method = WebauthnMethod(user, auth_data.credential_data, name=data['name'])
db.session.add(method)
db.session.commit()
return cbor.dumps({"status": "OK"})
@bp.route("/auth/webauthn/begin", methods=["POST"])
@pre_mfa_login_required(no_redirect=True)
def auth_webauthn_begin():
user = get_current_user()
server = get_webauthn_server()
methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
creds = [method.cred for method in methods]
if not creds:
abort(404)
auth_data, state = server.authenticate_begin(creds, user_verification='discouraged')
session["webauthn-state"] = state
return cbor.dumps(auth_data)
@bp.route("/auth/webauthn/complete", methods=["POST"])
@pre_mfa_login_required(no_redirect=True)
def auth_webauthn_complete():
user = get_current_user()
server = get_webauthn_server()
methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
creds = [method.cred for method in methods]
if not creds:
abort(404)
data = cbor.loads(request.get_data())[0]
credential_id = data["credentialId"]
client_data = ClientData(data["clientDataJSON"])
auth_data = AuthenticatorData(data["authenticatorData"])
signature = data["signature"]
# authenticate_complete() (as of python-fido2 v0.5.0, the version in Debian Buster)
# does not check signCount, although the spec recommends it
server.authenticate_complete(
session.pop("webauthn-state"),
creds,
credential_id,
client_data,
auth_data,
signature,
)
session['user_mfa'] = True
return cbor.dumps({"status": "OK"})
@bp.route('/setup/webauthn/<int:id>/delete')
@login_required()
@csrf_protect(blueprint=bp)
def delete_webauthn(id): #pylint: disable=redefined-builtin
user = get_current_user()
method = WebauthnMethod.query.filter_by(dn=user.dn, id=id).first_or_404()
db.session.delete(method)
db.session.commit()
return redirect(url_for('mfa.setup'))
@bp.route('/auth', methods=['GET'])
@pre_mfa_login_required()
def auth():
user = get_current_user()
recovery_methods = RecoveryCodeMethod.query.filter_by(dn=user.dn).all()
totp_methods = TOTPMethod.query.filter_by(dn=user.dn).all()
webauthn_methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
if not totp_methods and not webauthn_methods:
session['user_mfa'] = True
if session.get('user_mfa'):
return redirect(request.values.get('ref', url_for('index')))
return render_template('auth.html', ref=request.values.get('ref'), totp_methods=totp_methods,
webauthn_methods=webauthn_methods, recovery_methods=recovery_methods)
@bp.route('/auth', methods=['POST'])
@pre_mfa_login_required()
def auth_finish():
user = get_current_user()
delay = mfa_ratelimit.get_delay(user.dn)
if delay:
flash('We received too many invalid attempts! Please wait at least %s.'%format_delay(delay))
return redirect(url_for('mfa.auth', ref=request.values.get('ref')))
recovery_methods = RecoveryCodeMethod.query.filter_by(dn=user.dn).all()
totp_methods = TOTPMethod.query.filter_by(dn=user.dn).all()
for method in totp_methods:
if method.verify(request.form['code']):
session['user_mfa'] = True
return redirect(request.values.get('ref', url_for('index')))
for method in recovery_methods:
if method.verify(request.form['code']):
db.session.delete(method)
db.session.commit()
session['user_mfa'] = True
if len(recovery_methods) <= 1:
flash('You have exhausted your recovery codes. Please generate new ones now!')
return redirect(url_for('mfa.setup'))
if len(recovery_methods) <= 5:
flash('You only have a few recovery codes remaining. Make sure to generate new ones before they run out.')
return redirect(url_for('mfa.setup'))
return redirect(request.values.get('ref', url_for('index')))
mfa_ratelimit.log(user.dn)
flash('Two-factor authentication failed')
return redirect(url_for('mfa.auth', ref=request.values.get('ref')))
Database Migrations
===================
While we use Alembic in a single-database configuration, the migration scripts
are compatible with both SQLite and MySQL/MariaDB.
Compatability with SQLite almost always requires `batch_alter_table` operations
to modify existing tables. These recreate the tables, copy the data and finally
replace the old with the newly creaed ones. Alembic is configured to
auto-generate those operations, but in most cases the generated code fails to
fully reflect all details of the original schema. This way some contraints
(i.e. `CHECK` contstrains on Enums) are lost. Define the full table and pass it
with `copy_from` to `batch_alter_table` to prevent this.
Compatability with MySQL requires special care when changing primary keys and
when dealing with foreign keys. It often helps to temporarily remove foreign
key constraints concerning the table that is subject to change. When adding an
autoincrement id column as the new primary key of a table, recreate the table
with `batch_alter_table`.
The `check_migrations.py` script verifies that upgrading and downgrading works
with both databases. While it is far from perfect, it catches many common
errors. It runs automatically as part of the CI pipeline. Make sure to update
the script when adding new tables and when making significant changes to
existing tables.
File moved
......@@ -3,6 +3,7 @@ from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging
import click
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
......@@ -74,6 +75,14 @@ def run_migrations_online():
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
if engine.name in ('mysql', 'mariadb'):
character_set_connection = connection.execute('SHOW VARIABLES LIKE "character_set_connection"').fetchone()[1]
if character_set_connection != 'utf8mb4':
raise click.ClickException(f'Unsupported connection charset "{character_set_connection}". Make sure to add "?charset=utf8mb4" to SQLALCHEMY_DATABASE_URI!')
collation_database = connection.execute('SHOW VARIABLES LIKE "collation_database"').fetchone()[1]
if collation_database != 'utf8mb4_nopad_bin':
raise click.ClickException(f'Unsupported database collation "{collation_database}". Create the database with "CHARACTER SET utf8mb4 COLLATE utf8mb4_nopad_bin"!')
connection.execute('SET NAMES utf8mb4 COLLATE utf8mb4_nopad_bin')
try:
with context.begin_transaction():
......
"""OpenID Connect Support
Revision ID: 01fdd7820f29
Revises: a9b449776953
Create Date: 2023-11-09 16:52:20.860871
"""
from alembic import op
import sqlalchemy as sa
import datetime
import secrets
import math
import logging
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.backends import default_backend # Only required for Buster
import jwt
# pyjwt v1.7.x compat (Buster/Bullseye)
if not hasattr(jwt, 'get_algorithm_by_name'):
jwt.get_algorithm_by_name = lambda name: jwt.algorithms.get_default_algorithms()[name]
# revision identifiers, used by Alembic.
revision = '01fdd7820f29'
down_revision = 'a9b449776953'
branch_labels = None
depends_on = None
logger = logging.getLogger('alembic.runtime.migration.01fdd7820f29')
def token_with_alphabet(alphabet, nbytes=None):
'''Return random text token that consists of characters from `alphabet`'''
if nbytes is None:
nbytes = max(secrets.DEFAULT_ENTROPY, 32)
nbytes_per_char = math.log(len(alphabet), 256)
nchars = math.ceil(nbytes / nbytes_per_char)
return ''.join([secrets.choice(alphabet) for _ in range(nchars)])
def token_urlfriendly(nbytes=None):
'''Return random text token that is urlsafe and works around common parsing bugs'''
alphabet = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
return token_with_alphabet(alphabet, nbytes=nbytes)
def upgrade():
logger.info('Generating 3072 bit RSA key pair (RS256) for OpenID Connect support ...')
private_key = rsa.generate_private_key(public_exponent=65537, key_size=3072, backend=default_backend())
meta = sa.MetaData(bind=op.get_bind())
oauth2_key = op.create_table('oauth2_key',
sa.Column('id', sa.String(length=64), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('active', sa.Boolean(create_constraint=False), nullable=False),
sa.Column('algorithm', sa.String(length=32), nullable=False),
sa.Column('private_key_jwk', sa.Text(), nullable=False),
sa.Column('public_key_jwk', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2_key'))
)
algorithm = jwt.get_algorithm_by_name('RS256')
op.bulk_insert(oauth2_key, [{
'id': token_urlfriendly(),
'created': datetime.datetime.utcnow(),
'active': True,
'algorithm': 'RS256',
'private_key_jwk': algorithm.to_jwk(private_key),
'public_key_jwk': algorithm.to_jwk(private_key.public_key()),
}])
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_oauth2grant_code'))
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.add_column(sa.Column('nonce', sa.Text(), nullable=True))
batch_op.add_column(sa.Column('claims', sa.Text(), nullable=True))
batch_op.alter_column('redirect_uri', existing_type=sa.VARCHAR(length=255), nullable=True)
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.add_column(sa.Column('claims', sa.Text(), nullable=True))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.drop_column('claims')
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.alter_column('redirect_uri', existing_type=sa.VARCHAR(length=255), nullable=False)
batch_op.drop_column('claims')
batch_op.drop_column('nonce')
batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False)
op.drop_table('oauth2_key')
"""lower-case mail receive addresses
Revision ID: 042879d5e3ac
Revises: 878b25c4fae7
Create Date: 2022-02-01 20:37:32.103288
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '042879d5e3ac'
down_revision = '878b25c4fae7'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
mail_receive_address_table = sa.Table('mail_receive_address', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('mail_id', sa.Integer(), nullable=False),
sa.Column('address', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['mail_id'], ['mail.id'], name=op.f('fk_mail_receive_address_mail_id_mail'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mail_receive_address'))
)
op.execute(mail_receive_address_table.update().values(address=sa.func.lower(mail_receive_address_table.c.address)))
def downgrade():
pass
"""add expires attribute to ratelimit_event
Revision ID: 09d2edcaf0cc
Revises: af07cea65391
Create Date: 2022-02-15 14:16:19.318253
"""
from alembic import op
import sqlalchemy as sa
revision = '09d2edcaf0cc'
down_revision = 'af07cea65391'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
ratelimit_event = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
op.execute(ratelimit_event.delete())
with op.batch_alter_table('ratelimit_event', copy_from=ratelimit_event) as batch_op:
batch_op.add_column(sa.Column('expires', sa.DateTime(), nullable=False))
batch_op.alter_column('name', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.alter_column('timestamp', existing_type=sa.DATETIME(), nullable=False)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
ratelimit_event = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=False),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
op.execute(ratelimit_event.delete())
with op.batch_alter_table('ratelimit_event', schema=None) as batch_op:
batch_op.alter_column('timestamp', existing_type=sa.DATETIME(), nullable=True)
batch_op.alter_column('name', existing_type=sa.VARCHAR(length=128), nullable=True)
batch_op.drop_column('expires')
"""MySQL compat fixes
Revision ID: 11ecc8f1ac3b
Revises: bf71799b7b9e
Create Date: 2021-09-13 04:15:07.479295
"""
from alembic import op
import sqlalchemy as sa
revision = '11ecc8f1ac3b'
down_revision = 'bf71799b7b9e'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('device_login_confirmation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('initiation_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_confirmation')),
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
meta = sa.MetaData(bind=op.get_bind())
# Previously "fk_device_login_confirmation_initiation_id_" was named
# "fk_device_login_confirmation_initiation_id_device_login_initiation"
# but this was too long for MySQL.
table = sa.Table('device_login_confirmation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('initiation_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.ForeignKeyConstraint(['initiation_id'], ['device_login_initiation.id'], name='fk_device_login_confirmation_initiation_id_'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_confirmation')),
sa.UniqueConstraint('initiation_id', 'code0', name='uq_device_login_confirmation_initiation_id_code0'),
sa.UniqueConstraint('initiation_id', 'code1', name='uq_device_login_confirmation_initiation_id_code1'),
sa.UniqueConstraint('user_dn', name=op.f('uq_device_login_confirmation_user_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
# Previously "fk_invite_signup_id_signup" was named
# "fk_invite_signup_signup_id_signup" by mistake.
table = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id'], ['signup.id'], name=op.f('fk_invite_signup_id_signup')),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
def downgrade():
pass
"""Deactivate users
Revision ID: 23293f32b503
Revises: e249233e2a31
Create Date: 2022-11-10 02:06:27.766520
"""
from alembic import op
import sqlalchemy as sa
revision = '23293f32b503'
down_revision = 'e249233e2a31'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('hide_deactivated_users', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('hide_deactivated_users', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('hide_deactivated_users', server_default=None)
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_deactivated', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
user = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('is_deactivated', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['unix_uid'], ['uid_allocation.id'], name=op.f('fk_user_unix_uid_uid_allocation')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
with op.batch_alter_table('user', copy_from=user) as batch_op:
batch_op.alter_column('is_deactivated', server_default=None)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
user = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('is_deactivated', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['unix_uid'], ['uid_allocation.id'], name=op.f('fk_user_unix_uid_uid_allocation')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_column('is_deactivated')
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('hide_deactivated_users', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.drop_column('hide_deactivated_users')
"""added missing oauth2grant.code index
Revision ID: 2a6b1fb82ce6
Revises: cbca20cf64d9
Create Date: 2021-04-13 23:03:46.280189
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2a6b1fb82ce6'
down_revision = 'cbca20cf64d9'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False)
def downgrade():
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_oauth2grant_code'))
"""Remailer v2
Revision ID: 2b68f688bec1
Revises: e13b733ec856
Create Date: 2022-10-20 03:40:11.522343
"""
from alembic import op
import sqlalchemy as sa
revision = '2b68f688bec1'
down_revision = 'e13b733ec856'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False, server_default='DISABLED'))
service = sa.table('service',
sa.column('id', sa.Integer),
sa.column('use_remailer', sa.Boolean(create_constraint=True)),
sa.column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode')),
)
op.execute(service.update().values(remailer_mode='ENABLED_V1').where(service.c.use_remailer))
meta = sa.MetaData(bind=op.get_bind())
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False, server_default='DISABLED'),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('remailer_mode', server_default=None)
batch_op.drop_column('use_remailer')
def downgrade():
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('use_remailer', sa.BOOLEAN(), nullable=False, server_default=sa.false()))
service = sa.table('service',
sa.column('id', sa.Integer),
sa.column('use_remailer', sa.Boolean(create_constraint=True)),
sa.column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode')),
)
op.execute(service.update().values(use_remailer=sa.true()).where(service.c.remailer_mode != 'DISABLED'))
meta = sa.MetaData(bind=op.get_bind())
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('use_remailer', server_default=None)
batch_op.drop_column('remailer_mode')
"""Unique email addresses
Revision ID: 468995a9c9ee
Revises: 2b68f688bec1
Create Date: 2022-10-21 01:25:01.469670
"""
import unicodedata
from alembic import op
import sqlalchemy as sa
revision = '468995a9c9ee'
down_revision = '2b68f688bec1'
branch_labels = None
depends_on = None
def normalize_address(value):
return unicodedata.normalize('NFKC', value).lower().strip()
def iter_rows_paged(table, pk='id', limit=1000):
conn = op.get_bind()
pk_column = getattr(table.c, pk)
last_pk = None
while True:
expr = table.select().order_by(pk_column).limit(limit)
if last_pk is not None:
expr = expr.where(pk_column > last_pk)
result = conn.execute(expr)
pk_index = list(result.keys()).index(pk)
rows = result.fetchall()
if not rows:
break
yield from rows
last_pk = rows[-1][pk_index]
def upgrade():
with op.batch_alter_table('user_email', schema=None) as batch_op:
batch_op.add_column(sa.Column('address_normalized', sa.String(length=128), nullable=True))
batch_op.add_column(sa.Column('enable_strict_constraints', sa.Boolean(create_constraint=True), nullable=True))
batch_op.alter_column('verified', existing_type=sa.Boolean(create_constraint=True), nullable=True)
meta = sa.MetaData(bind=op.get_bind())
user_email_table = sa.Table('user_email', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('address', sa.String(length=128), nullable=False),
sa.Column('address_normalized', sa.String(length=128), nullable=True),
sa.Column('enable_strict_constraints', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verified', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verification_legacy_id', sa.Integer(), nullable=True),
sa.Column('verification_secret', sa.Text(), nullable=True),
sa.Column('verification_expires', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_email_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_email')),
sa.UniqueConstraint('user_id', 'address', name='uq_user_email_user_id_address')
)
for row in iter_rows_paged(user_email_table):
id = row[0]
address = row[2]
verified = row[5]
op.execute(user_email_table.update()\
.where(user_email_table.c.id == id)\
.values(
address_normalized=normalize_address(address),
verified=(True if verified else None)
)
)
with op.batch_alter_table('user_email', copy_from=user_email_table) as batch_op:
batch_op.alter_column('address_normalized', existing_type=sa.String(length=128), nullable=False)
batch_op.create_unique_constraint('uq_user_email_address_normalized_verified', ['address_normalized', 'verified', 'enable_strict_constraints'])
batch_op.create_unique_constraint('uq_user_email_user_id_address_normalized', ['user_id', 'address_normalized', 'enable_strict_constraints'])
op.create_table('feature_flag',
sa.Column('name', sa.String(32), nullable=False),
sa.PrimaryKeyConstraint('name', name=op.f('pk_feature_flag')),
)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
user_email_table = sa.Table('user_email', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('address', sa.String(length=128), nullable=False),
sa.Column('address_normalized', sa.String(length=128), nullable=False),
sa.Column('enable_strict_constraints', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verified', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verification_legacy_id', sa.Integer(), nullable=True),
sa.Column('verification_secret', sa.Text(), nullable=True),
sa.Column('verification_expires', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_email_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_email')),
sa.UniqueConstraint('user_id', 'address', name='uq_user_email_user_id_address'),
sa.UniqueConstraint('address_normalized', 'verified', 'enable_strict_constraints', name='uq_user_email_address_normalized_verified'),
sa.UniqueConstraint('user_id', 'address_normalized', 'enable_strict_constraints', name='uq_user_email_user_id_address_normalized')
)
op.execute(user_email_table.update().where(user_email_table.c.verified == None).values(verified=False))
with op.batch_alter_table('user_email', copy_from=user_email_table) as batch_op:
batch_op.drop_constraint('uq_user_email_user_id_address_normalized', type_='unique')
batch_op.drop_constraint('uq_user_email_address_normalized_verified', type_='unique')
batch_op.alter_column('verified', existing_type=sa.Boolean(create_constraint=True), nullable=False)
batch_op.drop_column('enable_strict_constraints')
batch_op.drop_column('address_normalized')
op.drop_table('feature_flag')
"""Unified password hashing for recovery codes
Revision ID: 4bd316207e59
Revises: e71e29cc605a
Create Date: 2024-05-22 03:13:55.917641
"""
from alembic import op
import sqlalchemy as sa
revision = '4bd316207e59'
down_revision = 'e71e29cc605a'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', name='mfatype', create_constraint=True), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('recovery_salt', sa.String(length=64), nullable=True),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('totp_last_counter', sa.Integer(), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
# This field was already unused before the change to unified password hashing. So this is unrelated cleanup.
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.drop_column('recovery_salt')
op.execute(mfa_method.update().values(recovery_hash=('{crypt}' + mfa_method.c.recovery_hash)).where(mfa_method.c.type == 'RECOVERY_CODE'))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', name='mfatype', create_constraint=True), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('totp_last_counter', sa.Integer(), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.add_column(sa.Column('recovery_salt', sa.VARCHAR(length=64), nullable=True))
op.execute(
mfa_method.delete().where(sa.and_(
mfa_method.c.type == 'RECOVERY_CODE',
sa.not_(mfa_method.c.recovery_hash.ilike('{crypt}%'))
))
)
op.execute(
mfa_method.update().values(
recovery_hash=sa.func.substr(mfa_method.c.recovery_hash, len('{crypt}') + 1)
).where(sa.and_(
mfa_method.c.type == 'RECOVERY_CODE',
mfa_method.c.recovery_hash.ilike('{crypt}%')
))
)
"""invite pk change
Revision ID: 54b2413586fd
Revises: 2a6b1fb82ce6
Create Date: 2021-04-13 23:33:40.118507
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '54b2413586fd'
down_revision = '2a6b1fb82ce6'
branch_labels = None
depends_on = None
invite = sa.sql.table('invite',
sa.sql.column('id', sa.Integer()),
sa.sql.column('token', sa.String(length=128))
)
invite_grant = sa.sql.table('invite_grant',
sa.sql.column('invite_id', sa.Integer()),
sa.sql.column('invite_token', sa.String(length=128))
)
invite_roles = sa.sql.table('invite_roles',
sa.sql.column('invite_id', sa.Integer()),
sa.sql.column('invite_token', sa.String(length=128))
)
invite_signup = sa.sql.table('invite_signup',
sa.sql.column('invite_id', sa.Integer()),
sa.sql.column('invite_token', sa.String(length=128))
)
def upgrade():
# CHECK constraints get lost when reflecting from the actual table
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('invite', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite'))
)
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.drop_constraint('fk_invite_grant_invite_token_invite', type_='foreignkey')
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.drop_constraint('fk_invite_roles_invite_token_invite', type_='foreignkey')
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.drop_constraint('fk_invite_signup_invite_token_invite', type_='foreignkey')
with op.batch_alter_table('invite', copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint(batch_op.f('pk_invite'), type_='primary')
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key(batch_op.f('pk_invite'), ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
batch_op.create_unique_constraint(batch_op.f('uq_invite_token'), ['token'])
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('invite_id', sa.Integer(), nullable=True))
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.add_column(sa.Column('invite_id', sa.Integer(), nullable=True))
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.add_column(sa.Column('invite_id', sa.Integer(), nullable=True))
op.execute(invite_grant.update().values(invite_id=sa.select([invite.c.id]).where(invite.c.token==invite_grant.c.invite_token).as_scalar()))
op.execute(invite_roles.update().values(invite_id=sa.select([invite.c.id]).where(invite.c.token==invite_roles.c.invite_token).as_scalar()))
op.execute(invite_signup.update().values(invite_id=sa.select([invite.c.id]).where(invite.c.token==invite_signup.c.invite_token).as_scalar()))
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.alter_column('invite_id', existing_type=sa.INTEGER(), nullable=False)
batch_op.create_foreign_key(batch_op.f('fk_invite_grant_invite_id_invite'), 'invite', ['invite_id'], ['id'])
batch_op.drop_column('invite_token')
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('pk_invite_roles'), type_='primary')
batch_op.create_primary_key(batch_op.f('pk_invite_roles'), ['invite_id', 'role_id'])
batch_op.create_foreign_key(batch_op.f('fk_invite_roles_invite_id_invite'), 'invite', ['invite_id'], ['id'])
batch_op.drop_column('invite_token')
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.alter_column('invite_id', existing_type=sa.INTEGER(), nullable=False)
batch_op.create_foreign_key(batch_op.f('fk_invite_signup_invite_id_invite'), 'invite', ['invite_id'], ['id'])
batch_op.drop_column('invite_token')
def downgrade():
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_signup_invite_id_invite'), type_='foreignkey')
batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True))
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_roles_invite_id_invite'), type_='foreignkey')
batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True))
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_grant_invite_id_invite'), type_='foreignkey')
batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True))
op.execute(invite_grant.update().values(invite_token=sa.select([invite.c.token]).where(invite.c.id==invite_grant.c.invite_id).as_scalar()))
op.execute(invite_roles.update().values(invite_token=sa.select([invite.c.token]).where(invite.c.id==invite_roles.c.invite_id).as_scalar()))
op.execute(invite_signup.update().values(invite_token=sa.select([invite.c.token]).where(invite.c.id==invite_signup.c.invite_id).as_scalar()))
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_column('invite_id')
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_constraint(batch_op.f('pk_invite_roles'), type_='primary')
batch_op.create_primary_key(batch_op.f('pk_invite_roles'), ['invite_token', 'role_id'])
batch_op.drop_column('invite_id')
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_column('invite_id')
# CHECK constraints get lost when reflecting from the actual table
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('invite', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite')),
sa.UniqueConstraint('token', name=op.f('uq_invite_token'))
)
with op.batch_alter_table('invite', copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint(batch_op.f('uq_invite_token'), type_='unique')
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint(batch_op.f('pk_invite'), type_='primary')
batch_op.drop_column('id')
batch_op.create_primary_key(batch_op.f('pk_invite'), ['token'])
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.create_foreign_key('fk_invite_signup_invite_token_invite', 'invite', ['invite_token'], ['token'])
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.create_foreign_key('fk_invite_roles_invite_token_invite', 'invite', ['invite_token'], ['token'])
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.create_foreign_key('fk_invite_grant_invite_token_invite', 'invite', ['invite_token'], ['token'])
"""Role inclusion
Revision ID: 5a07d4a63b64
Revises: a29870f95175
Create Date: 2021-04-05 15:00:26.205433
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5a07d4a63b64'
down_revision = 'a29870f95175'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('role-inclusion',
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('included_role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['included_role_id'], ['role.id'], ),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.PrimaryKeyConstraint('role_id', 'included_role_id')
)
def downgrade():
op.drop_table('role-inclusion')
"""invite creator and role moderator group
Revision ID: 5cab70e95bf8
Revises: 54b2413586fd
Create Date: 2021-04-14 15:46:29.910342
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5cab70e95bf8'
down_revision = '54b2413586fd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('invite', schema=None) as batch_op:
batch_op.add_column(sa.Column('creator_dn', sa.String(length=128), nullable=True))
with op.batch_alter_table('role', schema=None) as batch_op:
batch_op.add_column(sa.Column('moderator_group_dn', sa.String(length=128), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('role', schema=None) as batch_op:
batch_op.drop_column('moderator_group_dn')
with op.batch_alter_table('invite', schema=None) as batch_op:
batch_op.drop_column('creator_dn')
# ### end Alembic commands ###
"""remailer setting and api permission
Revision ID: 704d1245331c
Revises: b9d3f7dac9db
Create Date: 2022-04-19 17:32:52.304313
"""
from alembic import op
import sqlalchemy as sa
revision = '704d1245331c'
down_revision = 'b9d3f7dac9db'
branch_labels = None
depends_on = None
def upgrade():
# Retrospective fix of this migration: Originally server_default was not set,
# which caused "Cannot add a NOT NULL column with default value NULL" errors.
# This only happens with recent Alembic versions that render
# batch_op.add_column as an "ALTER TABLE" statement instead of recreating the
# table. To keep the resulting database consistent, we remove the
# server_default afterwards.
with op.batch_alter_table('api_client') as batch_op:
batch_op.add_column(sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
with op.batch_alter_table('service') as batch_op:
batch_op.add_column(sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.alter_column('perm_remailer', server_default=None)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('use_remailer', server_default=None)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.drop_column('use_remailer')
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.drop_column('perm_remailer')