Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • Dockerfile
  • feature_invite_validuntil_minmax
  • incremental-sync
  • jwt_encode_inconsistencies
  • master
  • redis-rate-limits
  • roles-recursive-cte
  • typehints
  • v1.0.x
  • v1.1.x
  • v1.2.x
  • v1.x.x
  • v0.1.2
  • v0.1.4
  • v0.1.5
  • v0.2.0
  • v0.3.0
  • v1.0.0
  • v1.0.1
  • v1.0.2
  • v1.1.0
  • v1.1.1
  • v1.1.2
  • v1.2.0
  • v2.0.0
  • v2.0.1
  • v2.1.0
  • v2.2.0
  • v2.3.0
  • v2.3.1
30 results

Target

Select target project
  • uffd/uffd
  • rixx/uffd
  • thies/uffd
  • leona/uffd
  • enbewe/uffd
  • strifel/uffd
  • thies/uffd-2
7 results
Select Git revision
  • Dockerfile
  • claims-in-idtoke
  • feature_invite_validuntil_minmax
  • incremental-sync
  • jwt_encode_inconsistencies
  • master
  • recovery-code-pwhash
  • redis-rate-limits
  • roles-recursive-cte
  • typehints
  • v1.0.x
  • v1.1.x
  • v1.2.x
  • v1.x.x
  • v0.1.2
  • v0.1.4
  • v0.1.5
  • v0.2.0
  • v0.3.0
  • v1.0.0
  • v1.0.1
  • v1.0.2
  • v1.1.0
  • v1.1.1
  • v1.1.2
  • v1.2.0
  • v2.0.0
  • v2.0.1
  • v2.1.0
  • v2.2.0
  • v2.3.0
  • v2.3.1
32 results
Show changes
Showing
with 676 additions and 422 deletions
from collections.abc import MutableSet
from .model import make_modelobj, make_modelobjs, add_to_session
class UnboundObjectError(Exception):
pass
class RelationshipSet(MutableSet):
def __init__(self, ldap_object, name, model, destmodel):
self.__ldap_object = ldap_object
self.__name = name
self.__model = model # pylint: disable=unused-private-member
self.__destmodel = destmodel
def __modify_check(self, value):
if self.__ldap_object.session is None:
raise UnboundObjectError()
if not isinstance(value, self.__destmodel):
raise TypeError()
def __repr__(self):
return repr(set(self))
def __contains__(self, value):
if value is None or not isinstance(value, self.__destmodel):
return False
return value.ldap_object.dn in self.__ldap_object.getattr(self.__name)
def __iter__(self):
def get(dn):
return make_modelobj(self.__ldap_object.session.get(dn, self.__destmodel.ldap_filter_params), self.__destmodel)
dns = set(self.__ldap_object.getattr(self.__name))
return iter(filter(lambda obj: obj is not None, map(get, dns)))
def __len__(self):
return len(set(self))
def add(self, value):
self.__modify_check(value)
if value.ldap_object.session is None:
add_to_session(value, self.__ldap_object.session)
assert value.ldap_object.session == self.__ldap_object.session
self.__ldap_object.attr_append(self.__name, value.dn)
def discard(self, value):
self.__modify_check(value)
self.__ldap_object.attr_remove(self.__name, value.dn)
def update(self, values):
for value in values:
self.add(value)
class Relationship:
def __init__(self, name, destmodel, backref=None):
self.name = name
self.destmodel = destmodel
self.backref = backref
def __set_name__(self, cls, name):
if self.backref is not None:
setattr(self.destmodel, self.backref, Backreference(self.name, cls))
def __get__(self, obj, objtype=None):
if obj is None:
return self
return RelationshipSet(obj.ldap_object, self.name, type(obj), self.destmodel)
def __set__(self, obj, values):
tmp = self.__get__(obj)
tmp.clear()
for value in values:
tmp.add(value)
class BackreferenceSet(MutableSet):
def __init__(self, ldap_object, name, model, srcmodel):
self.__ldap_object = ldap_object
self.__name = name
self.__model = model # pylint: disable=unused-private-member
self.__srcmodel = srcmodel
def __modify_check(self, value):
if self.__ldap_object.session is None:
raise UnboundObjectError()
if not isinstance(value, self.__srcmodel):
raise TypeError()
def __get(self):
if self.__ldap_object.session is None:
return set()
filter_params = list(self.__srcmodel.ldap_filter_params) + [(self.__name, self.__ldap_object.dn)]
objs = self.__ldap_object.session.filter(self.__srcmodel.ldap_search_base, filter_params)
return set(make_modelobjs(objs, self.__srcmodel))
def __repr__(self):
return repr(self.__get())
def __contains__(self, value):
return value in self.__get()
def __iter__(self):
return iter(self.__get())
def __len__(self):
return len(self.__get())
def add(self, value):
self.__modify_check(value)
if value.ldap_object.session is None:
add_to_session(value, self.__ldap_object.session)
assert value.ldap_object.session == self.__ldap_object.session
if self.__ldap_object.dn not in value.ldap_object.getattr(self.__name):
value.ldap_object.attr_append(self.__name, self.__ldap_object.dn)
def discard(self, value):
self.__modify_check(value)
value.ldap_object.attr_remove(self.__name, self.__ldap_object.dn)
def update(self, values):
for value in values:
self.add(value)
class Backreference:
def __init__(self, name, srcmodel):
self.name = name
self.srcmodel = srcmodel
def __get__(self, obj, objtype=None):
if obj is None:
return self
return BackreferenceSet(obj.ldap_object, self.name, type(obj), self.srcmodel)
def __set__(self, obj, values):
tmp = self.__get__(obj)
tmp.clear()
for value in values:
tmp.add(value)
from .views import bp as bp_ui
bp = [bp_ui]
from uffd.ldap import ldap
from uffd.lazyconfig import lazyconfig_str, lazyconfig_list
class Mail(ldap.Model):
ldap_search_base = lazyconfig_str('LDAP_MAIL_SEARCH_BASE')
ldap_filter_params = lazyconfig_list('LDAP_MAIL_SEARCH_FILTER')
ldap_object_classes = lazyconfig_list('LDAP_MAIL_OBJECTCLASSES')
ldap_dn_attribute = lazyconfig_str('LDAP_MAIL_DN_ATTRIBUTE')
ldap_dn_base = lazyconfig_str('LDAP_MAIL_SEARCH_BASE')
uid = ldap.Attribute(lazyconfig_str('LDAP_MAIL_UID_ATTRIBUTE'))
receivers = ldap.Attribute(lazyconfig_str('LDAP_MAIL_RECEIVERS_ATTRIBUTE'), multi=True)
destinations = ldap.Attribute(lazyconfig_str('LDAP_MAIL_DESTINATIONS_ATTRIBUTE'), multi=True)
from .views import bp as _bp
bp = [_bp]
from warnings import warn
import urllib.parse
from flask import Blueprint, render_template, session, request, redirect, url_for, flash, current_app, abort
from flask_babel import gettext as _
from uffd.database import db
from uffd.ldap import ldap
from uffd.mfa.models import MFAMethod, TOTPMethod, WebauthnMethod, RecoveryCodeMethod
from uffd.session.views import login_required, login_required_pre_mfa, set_request_user
from uffd.user.models import User
from uffd.csrf import csrf_protect
from uffd.secure_redirect import secure_local_redirect
from uffd.ratelimit import Ratelimit, format_delay
bp = Blueprint('mfa', __name__, template_folder='templates', url_prefix='/mfa/')
mfa_ratelimit = Ratelimit('mfa', 1*60, 3)
@bp.route('/', methods=['GET'])
@login_required()
def setup():
return render_template('mfa/setup.html')
@bp.route('/setup/disable', methods=['GET'])
@login_required()
def disable():
return render_template('mfa/disable.html')
@bp.route('/setup/disable', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def disable_confirm():
MFAMethod.query.filter_by(dn=request.user.dn).delete()
db.session.commit()
request.user.update_groups()
ldap.session.commit()
return redirect(url_for('mfa.setup'))
@bp.route('/admin/<int:uid>/disable')
@login_required()
@csrf_protect(blueprint=bp)
def admin_disable(uid):
# Group cannot be checked with login_required kwarg, because the config
# variable is not available when the decorator is processed
if not request.user.is_in_group(current_app.config['ACL_ADMIN_GROUP']):
flash('Access denied')
return redirect(url_for('index'))
user = User.query.filter_by(uid=uid).one()
MFAMethod.query.filter_by(dn=user.dn).delete()
db.session.commit()
user.update_groups()
ldap.session.commit()
flash(_('Two-factor authentication was reset'))
return redirect(url_for('user.show', uid=uid))
@bp.route('/setup/recovery', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_recovery():
for method in RecoveryCodeMethod.query.filter_by(dn=request.user.dn).all():
db.session.delete(method)
methods = []
for _ in range(10):
method = RecoveryCodeMethod(request.user)
methods.append(method)
db.session.add(method)
db.session.commit()
return render_template('mfa/setup_recovery.html', methods=methods)
@bp.route('/setup/totp', methods=['GET'])
@login_required()
def setup_totp():
method = TOTPMethod(request.user)
session['mfa_totp_key'] = method.key
return render_template('mfa/setup_totp.html', method=method, name=request.values['name'])
@bp.route('/setup/totp', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_totp_finish():
if not RecoveryCodeMethod.query.filter_by(dn=request.user.dn).all():
flash(_('Generate recovery codes first!'))
return redirect(url_for('mfa.setup'))
method = TOTPMethod(request.user, name=request.values['name'], key=session.pop('mfa_totp_key'))
if method.verify(request.form['code']):
db.session.add(method)
db.session.commit()
request.user.update_groups()
ldap.session.commit()
return redirect(url_for('mfa.setup'))
flash(_('Code is invalid'))
return redirect(url_for('mfa.setup_totp', name=request.values['name']))
@bp.route('/setup/totp/<int:id>/delete')
@login_required()
@csrf_protect(blueprint=bp)
def delete_totp(id): #pylint: disable=redefined-builtin
method = TOTPMethod.query.filter_by(dn=request.user.dn, id=id).first_or_404()
db.session.delete(method)
db.session.commit()
request.user.update_groups()
ldap.session.commit()
return redirect(url_for('mfa.setup'))
# WebAuthn support is optional because fido2 has a pretty unstable
# interface (v0.5.0 on buster and current version are completely
# incompatible) and might be difficult to install with the correct version
try:
from fido2.client import ClientData
from fido2.server import Fido2Server, RelyingParty
from fido2.ctap2 import AttestationObject, AuthenticatorData
from fido2 import cbor
WEBAUTHN_SUPPORTED = True
except ImportError as err:
warn(_('2FA WebAuthn support disabled because import of the fido2 module failed (%s)')%err)
WEBAUTHN_SUPPORTED = False
bp.add_app_template_global(WEBAUTHN_SUPPORTED, name='webauthn_supported')
if WEBAUTHN_SUPPORTED:
def get_webauthn_server():
return Fido2Server(RelyingParty(current_app.config.get('MFA_RP_ID', urllib.parse.urlsplit(request.url).hostname), current_app.config['MFA_RP_NAME']))
@bp.route('/setup/webauthn/begin', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_webauthn_begin():
if not RecoveryCodeMethod.query.filter_by(dn=request.user.dn).all():
abort(403)
methods = WebauthnMethod.query.filter_by(dn=request.user.dn).all()
creds = [method.cred for method in methods]
server = get_webauthn_server()
registration_data, state = server.register_begin(
{
"id": request.user.dn.encode(),
"name": request.user.loginname,
"displayName": request.user.displayname,
},
creds,
user_verification='discouraged',
)
session["webauthn-state"] = state
return cbor.dumps(registration_data)
@bp.route('/setup/webauthn/complete', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_webauthn_complete():
server = get_webauthn_server()
data = cbor.loads(request.get_data())[0]
client_data = ClientData(data["clientDataJSON"])
att_obj = AttestationObject(data["attestationObject"])
auth_data = server.register_complete(session["webauthn-state"], client_data, att_obj)
method = WebauthnMethod(request.user, auth_data.credential_data, name=data['name'])
db.session.add(method)
db.session.commit()
request.user.update_groups()
ldap.session.commit()
return cbor.dumps({"status": "OK"})
@bp.route("/auth/webauthn/begin", methods=["POST"])
@login_required_pre_mfa(no_redirect=True)
def auth_webauthn_begin():
server = get_webauthn_server()
creds = [method.cred for method in request.user_pre_mfa.mfa_webauthn_methods]
if not creds:
abort(404)
auth_data, state = server.authenticate_begin(creds, user_verification='discouraged')
session["webauthn-state"] = state
return cbor.dumps(auth_data)
@bp.route("/auth/webauthn/complete", methods=["POST"])
@login_required_pre_mfa(no_redirect=True)
def auth_webauthn_complete():
server = get_webauthn_server()
creds = [method.cred for method in request.user_pre_mfa.mfa_webauthn_methods]
if not creds:
abort(404)
data = cbor.loads(request.get_data())[0]
credential_id = data["credentialId"]
client_data = ClientData(data["clientDataJSON"])
auth_data = AuthenticatorData(data["authenticatorData"])
signature = data["signature"]
# authenticate_complete() (as of python-fido2 v0.5.0, the version in Debian Buster)
# does not check signCount, although the spec recommends it
server.authenticate_complete(
session.pop("webauthn-state"),
creds,
credential_id,
client_data,
auth_data,
signature,
)
session['user_mfa'] = True
set_request_user()
return cbor.dumps({"status": "OK"})
@bp.route('/setup/webauthn/<int:id>/delete')
@login_required()
@csrf_protect(blueprint=bp)
def delete_webauthn(id): #pylint: disable=redefined-builtin
method = WebauthnMethod.query.filter_by(dn=request.user.dn, id=id).first_or_404()
db.session.delete(method)
db.session.commit()
request.user.update_groups()
ldap.session.commit()
return redirect(url_for('mfa.setup'))
@bp.route('/auth', methods=['GET'])
@login_required_pre_mfa()
def auth():
if not request.user_pre_mfa.mfa_enabled:
session['user_mfa'] = True
set_request_user()
if session.get('user_mfa'):
return secure_local_redirect(request.values.get('ref', url_for('index')))
return render_template('mfa/auth.html', ref=request.values.get('ref'))
@bp.route('/auth', methods=['POST'])
@login_required_pre_mfa()
def auth_finish():
delay = mfa_ratelimit.get_delay(request.user_pre_mfa.dn)
if delay:
flash(_('We received too many invalid attempts! Please wait at least %s.')%format_delay(delay))
return redirect(url_for('mfa.auth', ref=request.values.get('ref')))
for method in request.user_pre_mfa.mfa_totp_methods:
if method.verify(request.form['code']):
session['user_mfa'] = True
set_request_user()
return secure_local_redirect(request.values.get('ref', url_for('index')))
for method in request.user_pre_mfa.mfa_recovery_codes:
if method.verify(request.form['code']):
db.session.delete(method)
db.session.commit()
session['user_mfa'] = True
set_request_user()
if len(request.user_pre_mfa.mfa_recovery_codes) <= 1:
flash(_('You have exhausted your recovery codes. Please generate new ones now!'))
return redirect(url_for('mfa.setup'))
if len(request.user_pre_mfa.mfa_recovery_codes) <= 5:
flash(_('You only have a few recovery codes remaining. Make sure to generate new ones before they run out.'))
return redirect(url_for('mfa.setup'))
return secure_local_redirect(request.values.get('ref', url_for('index')))
mfa_ratelimit.log(request.user_pre_mfa.dn)
flash(_('Two-factor authentication failed'))
return redirect(url_for('mfa.auth', ref=request.values.get('ref')))
Database Migrations
===================
While we use Alembic in a single-database configuration, the migration scripts
are compatible with both SQLite and MySQL/MariaDB.
Compatability with SQLite almost always requires `batch_alter_table` operations
to modify existing tables. These recreate the tables, copy the data and finally
replace the old with the newly creaed ones. Alembic is configured to
auto-generate those operations, but in most cases the generated code fails to
fully reflect all details of the original schema. This way some contraints
(i.e. `CHECK` contstrains on Enums) are lost. Define the full table and pass it
with `copy_from` to `batch_alter_table` to prevent this.
Compatability with MySQL requires special care when changing primary keys and
when dealing with foreign keys. It often helps to temporarily remove foreign
key constraints concerning the table that is subject to change. When adding an
autoincrement id column as the new primary key of a table, recreate the table
with `batch_alter_table`.
The `check_migrations.py` script verifies that upgrading and downgrading works
with both databases. While it is far from perfect, it catches many common
errors. It runs automatically as part of the CI pipeline. Make sure to update
the script when adding new tables and when making significant changes to
existing tables.
File moved
...@@ -3,6 +3,7 @@ from alembic import context ...@@ -3,6 +3,7 @@ from alembic import context
from sqlalchemy import engine_from_config, pool from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig from logging.config import fileConfig
import logging import logging
import click
# this is the Alembic Config object, which provides # this is the Alembic Config object, which provides
# access to the values within the .ini file in use. # access to the values within the .ini file in use.
...@@ -74,6 +75,14 @@ def run_migrations_online(): ...@@ -74,6 +75,14 @@ def run_migrations_online():
target_metadata=target_metadata, target_metadata=target_metadata,
process_revision_directives=process_revision_directives, process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args) **current_app.extensions['migrate'].configure_args)
if engine.name in ('mysql', 'mariadb'):
character_set_connection = connection.execute('SHOW VARIABLES LIKE "character_set_connection"').fetchone()[1]
if character_set_connection != 'utf8mb4':
raise click.ClickException(f'Unsupported connection charset "{character_set_connection}". Make sure to add "?charset=utf8mb4" to SQLALCHEMY_DATABASE_URI!')
collation_database = connection.execute('SHOW VARIABLES LIKE "collation_database"').fetchone()[1]
if collation_database != 'utf8mb4_nopad_bin':
raise click.ClickException(f'Unsupported database collation "{collation_database}". Create the database with "CHARACTER SET utf8mb4 COLLATE utf8mb4_nopad_bin"!')
connection.execute('SET NAMES utf8mb4 COLLATE utf8mb4_nopad_bin')
try: try:
with context.begin_transaction(): with context.begin_transaction():
......
"""OpenID Connect Support
Revision ID: 01fdd7820f29
Revises: a9b449776953
Create Date: 2023-11-09 16:52:20.860871
"""
from alembic import op
import sqlalchemy as sa
import datetime
import secrets
import math
import logging
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.backends import default_backend # Only required for Buster
import jwt
# pyjwt v1.7.x compat (Buster/Bullseye)
if not hasattr(jwt, 'get_algorithm_by_name'):
jwt.get_algorithm_by_name = lambda name: jwt.algorithms.get_default_algorithms()[name]
# revision identifiers, used by Alembic.
revision = '01fdd7820f29'
down_revision = 'a9b449776953'
branch_labels = None
depends_on = None
logger = logging.getLogger('alembic.runtime.migration.01fdd7820f29')
def token_with_alphabet(alphabet, nbytes=None):
'''Return random text token that consists of characters from `alphabet`'''
if nbytes is None:
nbytes = max(secrets.DEFAULT_ENTROPY, 32)
nbytes_per_char = math.log(len(alphabet), 256)
nchars = math.ceil(nbytes / nbytes_per_char)
return ''.join([secrets.choice(alphabet) for _ in range(nchars)])
def token_urlfriendly(nbytes=None):
'''Return random text token that is urlsafe and works around common parsing bugs'''
alphabet = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
return token_with_alphabet(alphabet, nbytes=nbytes)
def upgrade():
logger.info('Generating 3072 bit RSA key pair (RS256) for OpenID Connect support ...')
private_key = rsa.generate_private_key(public_exponent=65537, key_size=3072, backend=default_backend())
meta = sa.MetaData(bind=op.get_bind())
oauth2_key = op.create_table('oauth2_key',
sa.Column('id', sa.String(length=64), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('active', sa.Boolean(create_constraint=False), nullable=False),
sa.Column('algorithm', sa.String(length=32), nullable=False),
sa.Column('private_key_jwk', sa.Text(), nullable=False),
sa.Column('public_key_jwk', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2_key'))
)
algorithm = jwt.get_algorithm_by_name('RS256')
op.bulk_insert(oauth2_key, [{
'id': token_urlfriendly(),
'created': datetime.datetime.utcnow(),
'active': True,
'algorithm': 'RS256',
'private_key_jwk': algorithm.to_jwk(private_key),
'public_key_jwk': algorithm.to_jwk(private_key.public_key()),
}])
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_oauth2grant_code'))
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.add_column(sa.Column('nonce', sa.Text(), nullable=True))
batch_op.add_column(sa.Column('claims', sa.Text(), nullable=True))
batch_op.alter_column('redirect_uri', existing_type=sa.VARCHAR(length=255), nullable=True)
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.add_column(sa.Column('claims', sa.Text(), nullable=True))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.drop_column('claims')
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.alter_column('redirect_uri', existing_type=sa.VARCHAR(length=255), nullable=False)
batch_op.drop_column('claims')
batch_op.drop_column('nonce')
batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False)
op.drop_table('oauth2_key')
"""lower-case mail receive addresses
Revision ID: 042879d5e3ac
Revises: 878b25c4fae7
Create Date: 2022-02-01 20:37:32.103288
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '042879d5e3ac'
down_revision = '878b25c4fae7'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
mail_receive_address_table = sa.Table('mail_receive_address', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('mail_id', sa.Integer(), nullable=False),
sa.Column('address', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['mail_id'], ['mail.id'], name=op.f('fk_mail_receive_address_mail_id_mail'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mail_receive_address'))
)
op.execute(mail_receive_address_table.update().values(address=sa.func.lower(mail_receive_address_table.c.address)))
def downgrade():
pass
"""add expires attribute to ratelimit_event
Revision ID: 09d2edcaf0cc
Revises: af07cea65391
Create Date: 2022-02-15 14:16:19.318253
"""
from alembic import op
import sqlalchemy as sa
revision = '09d2edcaf0cc'
down_revision = 'af07cea65391'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
ratelimit_event = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
op.execute(ratelimit_event.delete())
with op.batch_alter_table('ratelimit_event', copy_from=ratelimit_event) as batch_op:
batch_op.add_column(sa.Column('expires', sa.DateTime(), nullable=False))
batch_op.alter_column('name', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.alter_column('timestamp', existing_type=sa.DATETIME(), nullable=False)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
ratelimit_event = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=False),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
op.execute(ratelimit_event.delete())
with op.batch_alter_table('ratelimit_event', schema=None) as batch_op:
batch_op.alter_column('timestamp', existing_type=sa.DATETIME(), nullable=True)
batch_op.alter_column('name', existing_type=sa.VARCHAR(length=128), nullable=True)
batch_op.drop_column('expires')
"""MySQL compat fixes
Revision ID: 11ecc8f1ac3b
Revises: bf71799b7b9e
Create Date: 2021-09-13 04:15:07.479295
"""
from alembic import op
import sqlalchemy as sa
revision = '11ecc8f1ac3b'
down_revision = 'bf71799b7b9e'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('device_login_confirmation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('initiation_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_confirmation')),
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
meta = sa.MetaData(bind=op.get_bind())
# Previously "fk_device_login_confirmation_initiation_id_" was named
# "fk_device_login_confirmation_initiation_id_device_login_initiation"
# but this was too long for MySQL.
table = sa.Table('device_login_confirmation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('initiation_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.ForeignKeyConstraint(['initiation_id'], ['device_login_initiation.id'], name='fk_device_login_confirmation_initiation_id_'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_confirmation')),
sa.UniqueConstraint('initiation_id', 'code0', name='uq_device_login_confirmation_initiation_id_code0'),
sa.UniqueConstraint('initiation_id', 'code1', name='uq_device_login_confirmation_initiation_id_code1'),
sa.UniqueConstraint('user_dn', name=op.f('uq_device_login_confirmation_user_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
# Previously "fk_invite_signup_id_signup" was named
# "fk_invite_signup_signup_id_signup" by mistake.
table = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['id'], ['signup.id'], name=op.f('fk_invite_signup_id_signup')),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
def downgrade():
pass
"""Deactivate users
Revision ID: 23293f32b503
Revises: e249233e2a31
Create Date: 2022-11-10 02:06:27.766520
"""
from alembic import op
import sqlalchemy as sa
revision = '23293f32b503'
down_revision = 'e249233e2a31'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('hide_deactivated_users', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('hide_deactivated_users', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('hide_deactivated_users', server_default=None)
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_deactivated', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
user = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('is_deactivated', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['unix_uid'], ['uid_allocation.id'], name=op.f('fk_user_unix_uid_uid_allocation')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
with op.batch_alter_table('user', copy_from=user) as batch_op:
batch_op.alter_column('is_deactivated', server_default=None)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
user = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('is_deactivated', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['unix_uid'], ['uid_allocation.id'], name=op.f('fk_user_unix_uid_uid_allocation')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.drop_column('is_deactivated')
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('hide_deactivated_users', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.drop_column('hide_deactivated_users')
"""Remailer v2
Revision ID: 2b68f688bec1
Revises: e13b733ec856
Create Date: 2022-10-20 03:40:11.522343
"""
from alembic import op
import sqlalchemy as sa
revision = '2b68f688bec1'
down_revision = 'e13b733ec856'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False, server_default='DISABLED'))
service = sa.table('service',
sa.column('id', sa.Integer),
sa.column('use_remailer', sa.Boolean(create_constraint=True)),
sa.column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode')),
)
op.execute(service.update().values(remailer_mode='ENABLED_V1').where(service.c.use_remailer))
meta = sa.MetaData(bind=op.get_bind())
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False, server_default='DISABLED'),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('remailer_mode', server_default=None)
batch_op.drop_column('use_remailer')
def downgrade():
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('use_remailer', sa.BOOLEAN(), nullable=False, server_default=sa.false()))
service = sa.table('service',
sa.column('id', sa.Integer),
sa.column('use_remailer', sa.Boolean(create_constraint=True)),
sa.column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode')),
)
op.execute(service.update().values(use_remailer=sa.true()).where(service.c.remailer_mode != 'DISABLED'))
meta = sa.MetaData(bind=op.get_bind())
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('remailer_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=False),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('use_remailer', server_default=None)
batch_op.drop_column('remailer_mode')
"""Unique email addresses
Revision ID: 468995a9c9ee
Revises: 2b68f688bec1
Create Date: 2022-10-21 01:25:01.469670
"""
import unicodedata
from alembic import op
import sqlalchemy as sa
revision = '468995a9c9ee'
down_revision = '2b68f688bec1'
branch_labels = None
depends_on = None
def normalize_address(value):
return unicodedata.normalize('NFKC', value).lower().strip()
def iter_rows_paged(table, pk='id', limit=1000):
conn = op.get_bind()
pk_column = getattr(table.c, pk)
last_pk = None
while True:
expr = table.select().order_by(pk_column).limit(limit)
if last_pk is not None:
expr = expr.where(pk_column > last_pk)
result = conn.execute(expr)
pk_index = list(result.keys()).index(pk)
rows = result.fetchall()
if not rows:
break
yield from rows
last_pk = rows[-1][pk_index]
def upgrade():
with op.batch_alter_table('user_email', schema=None) as batch_op:
batch_op.add_column(sa.Column('address_normalized', sa.String(length=128), nullable=True))
batch_op.add_column(sa.Column('enable_strict_constraints', sa.Boolean(create_constraint=True), nullable=True))
batch_op.alter_column('verified', existing_type=sa.Boolean(create_constraint=True), nullable=True)
meta = sa.MetaData(bind=op.get_bind())
user_email_table = sa.Table('user_email', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('address', sa.String(length=128), nullable=False),
sa.Column('address_normalized', sa.String(length=128), nullable=True),
sa.Column('enable_strict_constraints', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verified', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verification_legacy_id', sa.Integer(), nullable=True),
sa.Column('verification_secret', sa.Text(), nullable=True),
sa.Column('verification_expires', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_email_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_email')),
sa.UniqueConstraint('user_id', 'address', name='uq_user_email_user_id_address')
)
for row in iter_rows_paged(user_email_table):
id = row[0]
address = row[2]
verified = row[5]
op.execute(user_email_table.update()\
.where(user_email_table.c.id == id)\
.values(
address_normalized=normalize_address(address),
verified=(True if verified else None)
)
)
with op.batch_alter_table('user_email', copy_from=user_email_table) as batch_op:
batch_op.alter_column('address_normalized', existing_type=sa.String(length=128), nullable=False)
batch_op.create_unique_constraint('uq_user_email_address_normalized_verified', ['address_normalized', 'verified', 'enable_strict_constraints'])
batch_op.create_unique_constraint('uq_user_email_user_id_address_normalized', ['user_id', 'address_normalized', 'enable_strict_constraints'])
op.create_table('feature_flag',
sa.Column('name', sa.String(32), nullable=False),
sa.PrimaryKeyConstraint('name', name=op.f('pk_feature_flag')),
)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
user_email_table = sa.Table('user_email', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('address', sa.String(length=128), nullable=False),
sa.Column('address_normalized', sa.String(length=128), nullable=False),
sa.Column('enable_strict_constraints', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verified', sa.Boolean(create_constraint=True), nullable=True),
sa.Column('verification_legacy_id', sa.Integer(), nullable=True),
sa.Column('verification_secret', sa.Text(), nullable=True),
sa.Column('verification_expires', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_email_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_email')),
sa.UniqueConstraint('user_id', 'address', name='uq_user_email_user_id_address'),
sa.UniqueConstraint('address_normalized', 'verified', 'enable_strict_constraints', name='uq_user_email_address_normalized_verified'),
sa.UniqueConstraint('user_id', 'address_normalized', 'enable_strict_constraints', name='uq_user_email_user_id_address_normalized')
)
op.execute(user_email_table.update().where(user_email_table.c.verified == None).values(verified=False))
with op.batch_alter_table('user_email', copy_from=user_email_table) as batch_op:
batch_op.drop_constraint('uq_user_email_user_id_address_normalized', type_='unique')
batch_op.drop_constraint('uq_user_email_address_normalized_verified', type_='unique')
batch_op.alter_column('verified', existing_type=sa.Boolean(create_constraint=True), nullable=False)
batch_op.drop_column('enable_strict_constraints')
batch_op.drop_column('address_normalized')
op.drop_table('feature_flag')
"""Unified password hashing for recovery codes
Revision ID: 4bd316207e59
Revises: e71e29cc605a
Create Date: 2024-05-22 03:13:55.917641
"""
from alembic import op
import sqlalchemy as sa
revision = '4bd316207e59'
down_revision = 'e71e29cc605a'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', name='mfatype', create_constraint=True), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('recovery_salt', sa.String(length=64), nullable=True),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('totp_last_counter', sa.Integer(), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
# This field was already unused before the change to unified password hashing. So this is unrelated cleanup.
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.drop_column('recovery_salt')
op.execute(mfa_method.update().values(recovery_hash=('{crypt}' + mfa_method.c.recovery_hash)).where(mfa_method.c.type == 'RECOVERY_CODE'))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', name='mfatype', create_constraint=True), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('totp_last_counter', sa.Integer(), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.add_column(sa.Column('recovery_salt', sa.VARCHAR(length=64), nullable=True))
op.execute(
mfa_method.delete().where(sa.and_(
mfa_method.c.type == 'RECOVERY_CODE',
sa.not_(mfa_method.c.recovery_hash.ilike('{crypt}%'))
))
)
op.execute(
mfa_method.update().values(
recovery_hash=sa.func.substr(mfa_method.c.recovery_hash, len('{crypt}') + 1)
).where(sa.and_(
mfa_method.c.type == 'RECOVERY_CODE',
mfa_method.c.recovery_hash.ilike('{crypt}%')
))
)
...@@ -38,16 +38,23 @@ def upgrade(): ...@@ -38,16 +38,23 @@ def upgrade():
sa.Column('token', sa.String(length=128), nullable=False), sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False), sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('valid_until', sa.DateTime(), nullable=False), sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(name=op.f('ck_invite_single_use')), nullable=False), sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(name=op.f('ck_invite_allow_signup')), nullable=False), sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(name=op.f('ck_invite_used')), nullable=False), sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(name=op.f('ck_invite_disabled')), nullable=False), sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite')) sa.PrimaryKeyConstraint('token', name=op.f('pk_invite'))
) )
with op.batch_alter_table('invite', copy_from=table) as batch_op: with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.drop_constraint('fk_invite_grant_invite_token_invite', type_='foreignkey')
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.drop_constraint('fk_invite_roles_invite_token_invite', type_='foreignkey')
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.drop_constraint('fk_invite_signup_invite_token_invite', type_='foreignkey')
with op.batch_alter_table('invite', copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint(batch_op.f('pk_invite'), type_='primary') batch_op.drop_constraint(batch_op.f('pk_invite'), type_='primary')
batch_op.add_column(sa.Column('id', sa.Integer(), autoincrement=True, nullable=False)) batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key(batch_op.f('pk_invite'), ['id']) batch_op.create_primary_key(batch_op.f('pk_invite'), ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
batch_op.create_unique_constraint(batch_op.f('uq_invite_token'), ['token']) batch_op.create_unique_constraint(batch_op.f('uq_invite_token'), ['token'])
with op.batch_alter_table('invite_grant', schema=None) as batch_op: with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('invite_id', sa.Integer(), nullable=True)) batch_op.add_column(sa.Column('invite_id', sa.Integer(), nullable=True))
...@@ -62,27 +69,27 @@ def upgrade(): ...@@ -62,27 +69,27 @@ def upgrade():
with op.batch_alter_table('invite_grant', schema=None) as batch_op: with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.alter_column('invite_id', existing_type=sa.INTEGER(), nullable=False) batch_op.alter_column('invite_id', existing_type=sa.INTEGER(), nullable=False)
batch_op.drop_constraint('fk_invite_grant_invite_token_invite', type_='foreignkey')
batch_op.create_foreign_key(batch_op.f('fk_invite_grant_invite_id_invite'), 'invite', ['invite_id'], ['id']) batch_op.create_foreign_key(batch_op.f('fk_invite_grant_invite_id_invite'), 'invite', ['invite_id'], ['id'])
batch_op.drop_column('invite_token') batch_op.drop_column('invite_token')
with op.batch_alter_table('invite_roles', schema=None) as batch_op: with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('pk_invite_roles'), type_='primary') batch_op.drop_constraint(batch_op.f('pk_invite_roles'), type_='primary')
batch_op.create_primary_key(batch_op.f('pk_invite_roles'), ['invite_id', 'role_id']) batch_op.create_primary_key(batch_op.f('pk_invite_roles'), ['invite_id', 'role_id'])
batch_op.drop_constraint('fk_invite_roles_invite_token_invite', type_='foreignkey')
batch_op.create_foreign_key(batch_op.f('fk_invite_roles_invite_id_invite'), 'invite', ['invite_id'], ['id']) batch_op.create_foreign_key(batch_op.f('fk_invite_roles_invite_id_invite'), 'invite', ['invite_id'], ['id'])
batch_op.drop_column('invite_token') batch_op.drop_column('invite_token')
with op.batch_alter_table('invite_signup', schema=None) as batch_op: with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.alter_column('invite_id', existing_type=sa.INTEGER(), nullable=False) batch_op.alter_column('invite_id', existing_type=sa.INTEGER(), nullable=False)
batch_op.drop_constraint('fk_invite_signup_invite_token_invite', type_='foreignkey')
batch_op.create_foreign_key(batch_op.f('fk_invite_signup_invite_id_invite'), 'invite', ['invite_id'], ['id']) batch_op.create_foreign_key(batch_op.f('fk_invite_signup_invite_id_invite'), 'invite', ['invite_id'], ['id'])
batch_op.drop_column('invite_token') batch_op.drop_column('invite_token')
def downgrade(): def downgrade():
with op.batch_alter_table('invite_signup', schema=None) as batch_op: with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_signup_invite_id_invite'), type_='foreignkey')
batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True)) batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True))
with op.batch_alter_table('invite_roles', schema=None) as batch_op: with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_roles_invite_id_invite'), type_='foreignkey')
batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True)) batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True))
with op.batch_alter_table('invite_grant', schema=None) as batch_op: with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_invite_grant_invite_id_invite'), type_='foreignkey')
batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True)) batch_op.add_column(sa.Column('invite_token', sa.VARCHAR(length=128), nullable=True))
op.execute(invite_grant.update().values(invite_token=sa.select([invite.c.token]).where(invite.c.id==invite_grant.c.invite_id).as_scalar())) op.execute(invite_grant.update().values(invite_token=sa.select([invite.c.token]).where(invite.c.id==invite_grant.c.invite_id).as_scalar()))
...@@ -91,20 +98,14 @@ def downgrade(): ...@@ -91,20 +98,14 @@ def downgrade():
with op.batch_alter_table('invite_signup', schema=None) as batch_op: with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False) batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_invite_signup_invite_id_invite'), type_='foreignkey')
batch_op.create_foreign_key('fk_invite_signup_invite_token_invite', 'invite', ['invite_token'], ['token'])
batch_op.drop_column('invite_id') batch_op.drop_column('invite_id')
with op.batch_alter_table('invite_roles', schema=None) as batch_op: with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False) batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_constraint(batch_op.f('pk_invite_roles'), type_='primary') batch_op.drop_constraint(batch_op.f('pk_invite_roles'), type_='primary')
batch_op.create_primary_key(batch_op.f('pk_invite_roles'), ['invite_token', 'role_id']) batch_op.create_primary_key(batch_op.f('pk_invite_roles'), ['invite_token', 'role_id'])
batch_op.drop_constraint(batch_op.f('fk_invite_roles_invite_id_invite'), type_='foreignkey')
batch_op.create_foreign_key('fk_invite_roles_invite_token_invite', 'invite', ['invite_token'], ['token'])
batch_op.drop_column('invite_id') batch_op.drop_column('invite_id')
with op.batch_alter_table('invite_grant', schema=None) as batch_op: with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False) batch_op.alter_column('invite_token', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_invite_grant_invite_id_invite'), type_='foreignkey')
batch_op.create_foreign_key('fk_invite_grant_invite_token_invite', 'invite', ['invite_token'], ['token'])
batch_op.drop_column('invite_id') batch_op.drop_column('invite_id')
# CHECK constraints get lost when reflecting from the actual table # CHECK constraints get lost when reflecting from the actual table
...@@ -114,15 +115,22 @@ def downgrade(): ...@@ -114,15 +115,22 @@ def downgrade():
sa.Column('token', sa.String(length=128), nullable=False), sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False), sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('valid_until', sa.DateTime(), nullable=False), sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(name=op.f('ck_invite_single_use')), nullable=False), sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(name=op.f('ck_invite_allow_signup')), nullable=False), sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(name=op.f('ck_invite_used')), nullable=False), sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(name=op.f('ck_invite_disabled')), nullable=False), sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite')), sa.PrimaryKeyConstraint('id', name=op.f('pk_invite')),
sa.UniqueConstraint('token', name=op.f('uq_invite_token')) sa.UniqueConstraint('token', name=op.f('uq_invite_token'))
) )
with op.batch_alter_table('invite', copy_from=table) as batch_op: with op.batch_alter_table('invite', copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint(batch_op.f('uq_invite_token'), type_='unique') batch_op.drop_constraint(batch_op.f('uq_invite_token'), type_='unique')
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint(batch_op.f('pk_invite'), type_='primary') batch_op.drop_constraint(batch_op.f('pk_invite'), type_='primary')
batch_op.drop_column('id') batch_op.drop_column('id')
batch_op.create_primary_key(batch_op.f('pk_invite'), ['token']) batch_op.create_primary_key(batch_op.f('pk_invite'), ['token'])
with op.batch_alter_table('invite_signup', schema=None) as batch_op:
batch_op.create_foreign_key('fk_invite_signup_invite_token_invite', 'invite', ['invite_token'], ['token'])
with op.batch_alter_table('invite_roles', schema=None) as batch_op:
batch_op.create_foreign_key('fk_invite_roles_invite_token_invite', 'invite', ['invite_token'], ['token'])
with op.batch_alter_table('invite_grant', schema=None) as batch_op:
batch_op.create_foreign_key('fk_invite_grant_invite_token_invite', 'invite', ['invite_token'], ['token'])