Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • uffd/uffd
  • rixx/uffd
  • thies/uffd
  • leona/uffd
  • enbewe/uffd
  • strifel/uffd
  • thies/uffd-2
7 results
Show changes
Showing
with 1777 additions and 0 deletions
"""added role.locked
Revision ID: a594d3b3e05b
Revises: 5cab70e95bf8
Create Date: 2021-06-14 00:32:47.792794
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a594d3b3e05b'
down_revision = '5cab70e95bf8'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('role', schema=None) as batch_op:
batch_op.add_column(sa.Column('locked', sa.Boolean(create_constraint=True, name=op.f('ck_role_locked')), nullable=False, default=False))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('role', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=32), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('moderator_group_dn', sa.String(length=128), nullable=True),
sa.Column('locked', sa.Boolean(create_constraint=False), nullable=False),
sa.CheckConstraint('locked IN (0, 1)', name=op.f('ck_role_locked')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role')),
sa.UniqueConstraint('name', name=op.f('uq_role_name'))
)
with op.batch_alter_table('role', copy_from=table) as batch_op:
batch_op.drop_constraint(op.f('ck_role_locked'), 'check')
batch_op.drop_column('locked')
"""Fix NOT NULL on role_groups.group_id
Revision 878b25c4fae7 wrongly left the column without a NOT NULL constraint.
The missing constraint is only detected by newer Alembic versions.
Revision ID: a60ce68b9214
Revises: 704d1245331c
Create Date: 2022-08-14 02:54:56.609390
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a60ce68b9214'
down_revision = '704d1245331c'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
role_groups = sa.Table('role_groups', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.Column('requires_mfa', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_role_groups_group_id_group'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role_groups_role_id_role'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('role_id', 'group_id', name=op.f('pk_role_groups'))
)
with op.batch_alter_table('role_groups', copy_from=role_groups) as batch_op:
batch_op.alter_column('group_id', existing_type=sa.INTEGER(), nullable=False)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
role_groups = sa.Table('role_groups', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.Column('requires_mfa', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], name=op.f('fk_role_groups_group_id_group'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role_groups_role_id_role'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('role_id', 'group_id', name=op.f('pk_role_groups'))
)
with op.batch_alter_table('role_groups', copy_from=role_groups) as batch_op:
batch_op.alter_column('group_id', existing_type=sa.INTEGER(), nullable=True)
"""device login
Revision ID: a8c6b6e91c28
Revises: bad6fc529510
Create Date: 2021-07-19 14:37:02.559667
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a8c6b6e91c28'
down_revision = 'bad6fc529510'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('device_login_initiation',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('OAUTH2', create_constraint=True, name='devicelogintype'), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.Column('secret', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('oauth2_client_id', sa.String(length=40), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_initiation')),
sa.UniqueConstraint('code0', name=op.f('uq_device_login_initiation_code0')),
sa.UniqueConstraint('code1', name=op.f('uq_device_login_initiation_code1'))
)
op.create_table('device_login_confirmation',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('initiation_id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
# name would be fk_device_login_confirmation_initiation_id_device_login_initiation, but that is too long for MySQL
sa.ForeignKeyConstraint(['initiation_id'], ['device_login_initiation.id'], name=op.f('fk_device_login_confirmation_initiation_id_')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_confirmation')),
sa.UniqueConstraint('initiation_id', 'code0', name=op.f('uq_device_login_confirmation_initiation_id_code0')),
sa.UniqueConstraint('initiation_id', 'code1', name=op.f('uq_device_login_confirmation_initiation_id_code1')),
sa.UniqueConstraint('user_dn', name=op.f('uq_device_login_confirmation_user_dn'))
)
def downgrade():
op.drop_table('device_login_confirmation')
op.drop_table('device_login_initiation')
"""Add mfa_method.totp_last_counter
Revision ID: a9b449776953
Revises: 23293f32b503
Create Date: 2023-11-07 12:09:23.843865
"""
from alembic import op
import sqlalchemy as sa
revision = 'a9b449776953'
down_revision = '23293f32b503'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', create_constraint=True, name='ck_mfa_method_type'), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('recovery_salt', sa.String(length=64), nullable=True),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.add_column(sa.Column('totp_last_counter', sa.Integer(), nullable=True))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
mfa_method = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', create_constraint=True, name='ck_mfa_method_type'), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('recovery_salt', sa.String(length=64), nullable=True),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('totp_last_counter', sa.Integer(), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_mfa_method_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
with op.batch_alter_table('mfa_method', copy_from=mfa_method) as batch_op:
batch_op.drop_column('totp_last_counter')
"""Locking and new ID allocation
Revision ID: aeb07202a6c8
Revises: 468995a9c9ee
Create Date: 2022-10-30 13:24:39.864612
"""
from alembic import op
import sqlalchemy as sa
from flask import current_app
# revision identifiers, used by Alembic.
revision = 'aeb07202a6c8'
down_revision = '468995a9c9ee'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
meta = sa.MetaData(bind=conn)
user_table = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
group_table = sa.Table('group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_gid', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=32), nullable=False),
sa.Column('description', sa.String(length=128), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_group')),
sa.UniqueConstraint('name', name=op.f('uq_group_name')),
sa.UniqueConstraint('unix_gid', name=op.f('uq_group_unix_gid'))
)
lock_table = op.create_table('lock',
sa.Column('name', sa.String(length=32), nullable=False),
sa.PrimaryKeyConstraint('name', name=op.f('pk_lock'))
)
conn.execute(sa.insert(lock_table).values(name='uid_allocation'))
conn.execute(sa.insert(lock_table).values(name='gid_allocation'))
uid_allocation_table = op.create_table('uid_allocation',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_uid_allocation'))
)
# Completely block range USER_MAX_UID to max UID currently in use (within
# the UID range) to account for users deleted in the past.
max_user_uid = conn.execute(
sa.select([sa.func.max(user_table.c.unix_uid)])
.where(user_table.c.unix_uid <= current_app.config['USER_MAX_UID'])
).scalar() or 0
insert_data = []
if max_user_uid:
for uid in range(current_app.config['USER_MIN_UID'], max_user_uid + 1):
insert_data.append({'id': uid})
op.bulk_insert(uid_allocation_table, insert_data)
max_service_uid = conn.execute(
sa.select([sa.func.max(user_table.c.unix_uid)])
.where(user_table.c.unix_uid <= current_app.config['USER_SERVICE_MAX_UID'])
).scalar() or 0
insert_data = []
if max_service_uid:
for uid in range(current_app.config['USER_SERVICE_MIN_UID'], max_service_uid + 1):
if uid < current_app.config['USER_MIN_UID'] or uid > max_user_uid:
insert_data.append({'id': uid})
op.bulk_insert(uid_allocation_table, insert_data)
# Also block all UIDs outside of both ranges that are in use
# (just to be sure, there should not be any)
conn.execute(sa.insert(uid_allocation_table).from_select(['id'],
sa.select([user_table.c.unix_uid]).where(sa.and_(
# Out of range for user
sa.or_(
user_table.c.unix_uid < current_app.config['USER_MIN_UID'],
user_table.c.unix_uid > current_app.config['USER_MAX_UID']
),
# and out of range for service user
sa.or_(
user_table.c.unix_uid < current_app.config['USER_SERVICE_MIN_UID'],
user_table.c.unix_uid > current_app.config['USER_SERVICE_MAX_UID']
),
))
))
# Normally we would pass copy_from=user_table, so we don't lose any metadata,
# but this somehow causes an AttributeError (Neither 'ColumnClause' object
# nor 'Comparator' object has an attribute 'copy'). Also, we don't seem to
# lose anything without it.
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_user_unix_uid_uid_allocation'), 'uid_allocation', ['unix_uid'], ['id'])
gid_allocation_table = op.create_table('gid_allocation',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_gid_allocation'))
)
group_table = sa.table('group', sa.column('unix_gid'))
# Completely block range GROUP_MAX_GID to max GID currently in use (within
# the GID range) to account for groups deleted in the past.
max_group_gid = conn.execute(
sa.select([sa.func.max(group_table.c.unix_gid)])
.where(group_table.c.unix_gid <= current_app.config['GROUP_MAX_GID'])
).scalar() or 0
insert_data = []
if max_group_gid:
for gid in range(current_app.config['GROUP_MIN_GID'], max_group_gid + 1):
insert_data.append({'id': gid})
op.bulk_insert(gid_allocation_table, insert_data)
# Also block out-of-range GIDs
conn.execute(sa.insert(gid_allocation_table).from_select(['id'],
sa.select([group_table.c.unix_gid]).where(
sa.or_(
group_table.c.unix_gid < current_app.config['GROUP_MIN_GID'],
group_table.c.unix_gid > current_app.config['GROUP_MAX_GID']
)
)
))
# See comment on batch_alter_table above
with op.batch_alter_table('group', schema=None) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_group_unix_gid_gid_allocation'), 'gid_allocation', ['unix_gid'], ['id'])
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
user_table = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['unix_uid'], ['uid_allocation.id'], name=op.f('fk_user_unix_uid_uid_allocation')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
group_table = sa.Table('group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_gid', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=32), nullable=False),
sa.Column('description', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['unix_gid'], ['gid_allocation.id'], name=op.f('fk_group_unix_gid_gid_allocation')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_group')),
sa.UniqueConstraint('name', name=op.f('uq_group_name')),
sa.UniqueConstraint('unix_gid', name=op.f('uq_group_unix_gid'))
)
with op.batch_alter_table('group', copy_from=group_table) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_group_unix_gid_gid_allocation'), type_='foreignkey')
with op.batch_alter_table('user', copy_from=user_table) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_user_unix_uid_uid_allocation'), type_='foreignkey')
op.drop_table('gid_allocation')
op.drop_table('uid_allocation')
op.drop_table('lock')
"""unified password hashing for User and Signup
Revision ID: af07cea65391
Revises: 042879d5e3ac
Create Date: 2022-02-11 23:55:35.502529
"""
from alembic import op
import sqlalchemy as sa
revision = 'af07cea65391'
down_revision = '042879d5e3ac'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_signup_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup')),
sa.UniqueConstraint('user_id', name=op.f('uq_signup_user_id'))
)
user = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('mail', sa.String(length=128), nullable=False),
sa.Column('pwhash', sa.String(length=256), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
with op.batch_alter_table('user', copy_from=user) as batch_op:
batch_op.alter_column('pwhash', existing_type=sa.String(length=256), type_=sa.Text())
op.execute(signup.update().values(pwhash=('{crypt}' + signup.c.pwhash)))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_signup_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup')),
sa.UniqueConstraint('user_id', name=op.f('uq_signup_user_id'))
)
user = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('mail', sa.String(length=128), nullable=False),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
with op.batch_alter_table('user', copy_from=user) as batch_op:
batch_op.alter_column('pwhash', existing_type=sa.Text(), type_=sa.String(length=256))
op.execute(signup.update().values(pwhash=None).where(sa.not_(signup.c.pwhash.ilike('{crypt}%'))))
op.execute(signup.update().values(pwhash=sa.func.substr(signup.c.pwhash, len('{crypt}') + 1)).where(signup.c.pwhash.ilike('{crypt}%')))
"""added role.is_default
Revision ID: aff5f350dcdf
Revises: a594d3b3e05b
Create Date: 2021-06-15 21:24:13.158828
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'aff5f350dcdf'
down_revision = 'a594d3b3e05b'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('role', schema=None) as batch_op:
batch_op.add_column(sa.Column('is_default', sa.Boolean(create_constraint=True, name=op.f('ck_role_is_default')), nullable=False, default=False))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('role', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=32), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('moderator_group_dn', sa.String(length=128), nullable=True),
sa.Column('locked', sa.Boolean(create_constraint=False), nullable=False),
sa.Column('is_default', sa.Boolean(create_constraint=False), nullable=False),
sa.CheckConstraint('locked IN (0, 1)', name=op.f('ck_role_locked')),
sa.CheckConstraint('is_default IN (0, 1)', name=op.f('ck_role_is_default')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role')),
sa.UniqueConstraint('name', name=op.f('uq_role_name'))
)
with op.batch_alter_table('role', copy_from=table) as batch_op:
batch_op.drop_constraint(op.f('ck_role_is_default'), 'check')
batch_op.drop_column('is_default')
"""Multiple email addresses
Revision ID: b273d7fdaa25
Revises: 9f824f61d8ac
Create Date: 2022-08-19 22:52:48.730877
"""
from alembic import op
import sqlalchemy as sa
import datetime
# revision identifiers, used by Alembic.
revision = 'b273d7fdaa25'
down_revision = 'b8fbefca3675'
branch_labels = None
depends_on = None
def iter_rows_paged(table, pk='id', limit=1000):
conn = op.get_bind()
pk_column = getattr(table.c, pk)
last_pk = None
while True:
expr = table.select().order_by(pk_column).limit(limit)
if last_pk is not None:
expr = expr.where(pk_column > last_pk)
result = conn.execute(expr)
pk_index = list(result.keys()).index(pk)
rows = result.fetchall()
if not rows:
break
yield from rows
last_pk = rows[-1][pk_index]
def upgrade():
user_email_table = op.create_table('user_email',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('address', sa.String(length=128), nullable=False),
sa.Column('verified', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('verification_legacy_id', sa.Integer(), nullable=True),
sa.Column('verification_secret', sa.Text(), nullable=True),
sa.Column('verification_expires', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_email_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_email')),
sa.UniqueConstraint('user_id', 'address', name='uq_user_email_user_id_address')
)
user_table = sa.table('user',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('mail', sa.VARCHAR(length=128), nullable=False),
)
op.execute(user_email_table.insert().from_select(
['user_id', 'address', 'verified'],
sa.select([user_table.c.id, user_table.c.mail, sa.literal(True, sa.Boolean(create_constraint=True))])
))
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('primary_email_id', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('recovery_email_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_user_primary_email_id_user_email'), 'user_email', ['primary_email_id'], ['id'], onupdate='CASCADE')
batch_op.create_foreign_key(batch_op.f('fk_user_recovery_email_id_user_email'), 'user_email', ['recovery_email_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL')
meta = sa.MetaData(bind=op.get_bind())
user_table = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('mail', sa.VARCHAR(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=True),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
op.execute(user_table.update().values(primary_email_id=sa.select([user_email_table.c.id]).where(user_email_table.c.user_id==user_table.c.id).limit(1).as_scalar()))
with op.batch_alter_table('user', copy_from=user_table) as batch_op:
batch_op.alter_column('primary_email_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('mail')
mailToken_table = sa.table('mailToken',
sa.column('id', sa.Integer()),
sa.column('token', sa.Text()),
sa.column('created', sa.DateTime()),
sa.column('newmail', sa.Text()),
sa.column('user_id', sa.Integer()),
)
for token_id, token, created, newmail, user_id in iter_rows_paged(mailToken_table):
op.execute(user_email_table.insert().insert().values(
user_id=user_id,
address=newmail,
verified=False,
verification_legacy_id=token_id,
verification_secret='{PLAIN}'+token,
# in-python because of this
verification_expires=(created + datetime.timedelta(days=2)),
))
op.drop_table('mailToken')
def downgrade():
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('mail', sa.VARCHAR(length=128), nullable=True))
meta = sa.MetaData(bind=op.get_bind())
user_table = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('mail', sa.VARCHAR(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
user_email_table = sa.table('user_email',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('address', sa.String(length=128), nullable=False),
)
op.execute(user_table.update().values(mail=sa.select([user_email_table.c.address]).where(user_email_table.c.id==user_table.c.primary_email_id).limit(1).as_scalar()))
with op.batch_alter_table('user', copy_from=user_table) as batch_op:
batch_op.alter_column('mail', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_user_recovery_email_id_user_email'), type_='foreignkey')
batch_op.drop_constraint(batch_op.f('fk_user_primary_email_id_user_email'), type_='foreignkey')
batch_op.drop_column('recovery_email_id')
batch_op.drop_column('primary_email_id')
op.create_table('mailToken',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('token', sa.VARCHAR(length=128), nullable=False),
sa.Column('created', sa.DATETIME(), nullable=True),
sa.Column('newmail', sa.VARCHAR(length=255), nullable=True),
sa.Column('user_id', sa.INTEGER(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.drop_table('user_email')
"""added api permission for metrics
Revision ID: b8fbefca3675
Revises: f2eb2c52a61f
Create Date: 2022-08-22 21:30:19.265531
"""
from alembic import op
import sqlalchemy as sa
revision = 'b8fbefca3675'
down_revision = 'f2eb2c52a61f'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.add_column(sa.Column('perm_metrics', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_metrics', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.drop_column('perm_metrics')
"""Move API and OAuth2 clients to DB
Revision ID: b9d3f7dac9db
Revises: 09d2edcaf0cc
Create Date: 2022-02-17 21:14:00.440057
"""
import secrets
import hashlib
import base64
from alembic import op
import sqlalchemy as sa
from flask import current_app
revision = 'b9d3f7dac9db'
down_revision = '09d2edcaf0cc'
branch_labels = None
depends_on = None
def hash_sha512(password):
ctx = hashlib.new('sha512', password.encode())
return '{sha512}' + base64.b64encode(ctx.digest()).decode()
def upgrade():
used_service_names = set()
services = {} # name -> limit_access, access_group_name
oauth2_clients = [] # service_name, client_id, client_secret, redirect_uris, logout_uris
api_clients = [] # service_name, auth_username, auth_password, perm_users, perm_checkpassword, perm_mail_aliases
for opts in current_app.config.get('OAUTH2_CLIENTS', {}).values():
if 'service_name' in opts:
used_service_names.add(opts['service_name'])
for opts in current_app.config.get('API_CLIENTS_2', {}).values():
if 'service_name' in opts:
used_service_names.add(opts['service_name'])
for client_id, opts in current_app.config.get('OAUTH2_CLIENTS', {}).items():
if 'client_secret' not in opts:
continue
if 'service_name' in opts:
service_name = opts['service_name']
else:
service_name = client_id
if service_name in used_service_names:
service_name = 'oauth2_' + service_name
if service_name in used_service_names:
num = 1
while (service_name + '_%d'%num) in used_service_names:
num += 1
service_name = service_name + '_%d'%num
if opts.get('required_group') is None:
limit_access = False
access_group_name = None
elif isinstance(opts.get('required_group'), str):
limit_access = True
access_group_name = opts['required_group']
else:
limit_access = True
access_group_name = None
client_secret = opts['client_secret']
redirect_uris = opts.get('redirect_uris') or []
logout_uris = []
for item in opts.get('logout_urls') or []:
if isinstance(item, str):
logout_uris.append(('GET', item))
else:
logout_uris.append(item)
used_service_names.add(service_name)
if service_name not in services or services[service_name] == (False, None):
services[service_name] = (limit_access, access_group_name)
elif services[service_name] == (limit_access, access_group_name):
pass
else:
services[service_name] = (True, None)
oauth2_clients.append((service_name, client_id, client_secret, redirect_uris, logout_uris))
for client_id, opts in current_app.config.get('API_CLIENTS_2', {}).items():
if 'client_secret' not in opts:
continue
if 'service_name' in opts:
service_name = opts['service_name']
else:
service_name = 'api_' + client_id
if service_name in used_service_names:
num = 1
while (service_name + '_%d'%num) in used_service_names:
num += 1
service_name = service_name + '_%d'%num
auth_username = client_id
auth_password = opts['client_secret']
perm_users = 'getusers' in opts.get('scopes', [])
perm_checkpassword = 'checkpassword' in opts.get('scopes', [])
perm_mail_aliases = 'getmails' in opts.get('scopes', [])
if service_name not in services:
services[service_name] = (False, None)
api_clients.append((service_name, auth_username, auth_password, perm_users, perm_checkpassword, perm_mail_aliases))
meta = sa.MetaData(bind=op.get_bind())
service_table = op.create_table('service',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
group_table = sa.table('group',
sa.column('id'),
sa.column('name'),
)
for service_name, args in services.items():
limit_access, access_group_name = args
op.execute(service_table.insert().values(name=service_name, limit_access=limit_access, access_group_id=sa.select([group_table.c.id]).where(group_table.c.name==access_group_name).as_scalar()))
api_client_table = op.create_table('api_client',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
for service_name, auth_username, auth_password, perm_users, perm_checkpassword, perm_mail_aliases in api_clients:
op.execute(api_client_table.insert().values(service_id=sa.select([service_table.c.id]).where(service_table.c.name==service_name).as_scalar(), auth_username=auth_username, auth_password=hash_sha512(auth_password), perm_users=perm_users, perm_checkpassword=perm_checkpassword, perm_mail_aliases=perm_mail_aliases))
oauth2client_table = op.create_table('oauth2client',
sa.Column('db_id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_oauth2client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('db_id', name=op.f('pk_oauth2client')),
sa.UniqueConstraint('client_id', name=op.f('uq_oauth2client_client_id'))
)
oauth2logout_uri_table = op.create_table('oauth2logout_uri',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('method', sa.String(length=40), nullable=False),
sa.Column('uri', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2logout_uri_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2logout_uri'))
)
oauth2redirect_uri_table = op.create_table('oauth2redirect_uri',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('uri', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2redirect_uri_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2redirect_uri'))
)
for service_name, client_id, client_secret, redirect_uris, logout_uris in oauth2_clients:
op.execute(oauth2client_table.insert().values(service_id=sa.select([service_table.c.id]).where(service_table.c.name==service_name).as_scalar(), client_id=client_id, client_secret=hash_sha512(client_secret)))
for method, uri, in logout_uris:
op.execute(oauth2logout_uri_table.insert().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2client_table.c.client_id==client_id).as_scalar(), method=method, uri=uri))
for uri in redirect_uris:
op.execute(oauth2redirect_uri_table.insert().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2client_table.c.client_id==client_id).as_scalar(), uri=uri))
with op.batch_alter_table('device_login_initiation', schema=None) as batch_op:
batch_op.add_column(sa.Column('oauth2_client_db_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), 'oauth2client', ['oauth2_client_db_id'], ['db_id'], onupdate='CASCADE', ondelete='CASCADE')
device_login_initiation_table = sa.Table('device_login_initiation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('OAUTH2', create_constraint=True, name='devicelogintype'), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.Column('secret', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('oauth2_client_id', sa.String(length=40), nullable=True),
sa.Column('oauth2_client_db_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['oauth2_client_db_id'], ['oauth2client.db_id'], name=op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_initiation')),
sa.UniqueConstraint('code0', name=op.f('uq_device_login_initiation_code0')),
sa.UniqueConstraint('code1', name=op.f('uq_device_login_initiation_code1'))
)
op.execute(device_login_initiation_table.update().values(oauth2_client_db_id=sa.select([oauth2client_table.c.db_id]).where(device_login_initiation_table.c.oauth2_client_id==oauth2client_table.c.client_id).as_scalar()))
op.execute(device_login_initiation_table.delete().where(device_login_initiation_table.c.oauth2_client_db_id==None))
with op.batch_alter_table('device_login_initiation', copy_from=device_login_initiation_table) as batch_op:
batch_op.drop_column('oauth2_client_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_db_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_oauth2grant_client_db_id_oauth2client'), 'oauth2client', ['client_db_id'], ['db_id'], onupdate='CASCADE', ondelete='CASCADE')
oauth2grant_table = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=True),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant')),
sa.Index('ix_oauth2grant_code', 'code')
)
op.execute(oauth2grant_table.update().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2grant_table.c.client_id==oauth2client_table.c.client_id).as_scalar()))
op.execute(oauth2grant_table.delete().where(oauth2grant_table.c.client_db_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant_table) as batch_op:
batch_op.alter_column('client_db_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('client_id')
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_db_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_oauth2token_client_db_id_oauth2client'), 'oauth2client', ['client_db_id'], ['db_id'], onupdate='CASCADE', ondelete='CASCADE')
oauth2token_table = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=True),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token_table.update().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2token_table.c.client_id==oauth2client_table.c.client_id).as_scalar()))
op.execute(oauth2token_table.delete().where(oauth2token_table.c.client_db_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token_table) as batch_op:
batch_op.alter_column('client_db_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('client_id')
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
oauth2client_table = sa.Table('oauth2client', meta,
sa.Column('db_id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_oauth2client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('db_id', name=op.f('pk_oauth2client')),
sa.UniqueConstraint('client_id', name=op.f('uq_oauth2client_client_id'))
)
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_id', sa.VARCHAR(length=40), nullable=True))
oauth2token_table = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token_table.update().values(client_id=sa.select([oauth2client_table.c.client_id]).where(oauth2token_table.c.client_db_id==oauth2client_table.c.db_id).as_scalar()))
op.execute(oauth2token_table.delete().where(oauth2token_table.c.client_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token_table) as batch_op:
batch_op.alter_column('client_id', existing_type=sa.VARCHAR(length=40), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_oauth2token_client_db_id_oauth2client'), type_='foreignkey')
batch_op.drop_column('client_db_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_id', sa.VARCHAR(length=40), nullable=True))
oauth2grant_table = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant')),
sa.Index('ix_oauth2grant_code', 'code')
)
op.execute(oauth2grant_table.update().values(client_id=sa.select([oauth2client_table.c.client_id]).where(oauth2grant_table.c.client_db_id==oauth2client_table.c.db_id).as_scalar()))
op.execute(oauth2grant_table.delete().where(oauth2grant_table.c.client_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant_table) as batch_op:
batch_op.alter_column('client_id', existing_type=sa.VARCHAR(length=40), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_oauth2grant_client_db_id_oauth2client'), type_='foreignkey')
batch_op.drop_column('client_db_id')
with op.batch_alter_table('device_login_initiation', schema=None) as batch_op:
batch_op.add_column(sa.Column('oauth2_client_id', sa.VARCHAR(length=40), nullable=True))
device_login_initiation_table = sa.Table('device_login_initiation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('OAUTH2', create_constraint=True, name='devicelogintype'), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.Column('secret', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('oauth2_client_id', sa.String(length=40), nullable=True),
sa.Column('oauth2_client_db_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['oauth2_client_db_id'], ['oauth2client.db_id'], name=op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_initiation')),
sa.UniqueConstraint('code0', name=op.f('uq_device_login_initiation_code0')),
sa.UniqueConstraint('code1', name=op.f('uq_device_login_initiation_code1'))
)
op.execute(device_login_initiation_table.update().values(oauth2_client_id=sa.select([oauth2client_table.c.client_id]).where(device_login_initiation_table.c.oauth2_client_db_id==oauth2client_table.c.db_id).as_scalar()))
op.execute(device_login_initiation_table.delete().where(device_login_initiation_table.c.oauth2_client_id==None))
with op.batch_alter_table('device_login_initiation', copy_from=device_login_initiation_table) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), type_='foreignkey')
batch_op.drop_column('oauth2_client_db_id')
op.drop_table('oauth2redirect_uri')
op.drop_table('oauth2logout_uri')
op.drop_table('oauth2client')
op.drop_table('api_client')
op.drop_table('service')
"""added RoleGroup.requires_mfa and cleanup
Revision ID: bad6fc529510
Revises: aff5f350dcdf
Create Date: 2021-06-22 15:58:10.515330
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bad6fc529510'
down_revision = 'aff5f350dcdf'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('role-group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-group')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-group_dn'))
)
with op.batch_alter_table(table.name, copy_from=table) as batch_op:
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint(batch_op.f('pk_role-group'), type_='primary')
batch_op.drop_constraint(batch_op.f('uq_role-group_dn'), type_='unique')
batch_op.drop_column('id')
batch_op.alter_column('dn', new_column_name='group_dn', nullable=False, existing_type=sa.String(128))
batch_op.alter_column('role_id', nullable=False, existing_type=sa.Integer())
batch_op.add_column(sa.Column('requires_mfa', sa.Boolean(create_constraint=True, name=op.f('ck_role-group_requires_mfa')), nullable=False, default=False))
batch_op.create_primary_key(batch_op.f('pk_role-group'), ['role_id', 'group_dn'])
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('role-group', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('group_dn', sa.String(128), nullable=False),
sa.Column('requires_mfa', sa.Boolean(create_constraint=True, name=op.f('ck_role-group_requires_mfa')), nullable=False, default=False),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('role_id', 'group_dn', name=op.f('pk_role-group'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
# For some reason MySQL does not allow us to drop the primary key if the foreignkey on role_id exists
batch_op.drop_constraint(batch_op.f('fk_role-group_role_id_role'), type_='foreignkey')
batch_op.drop_constraint(batch_op.f('pk_role-group'), type_='primary')
batch_op.drop_column('requires_mfa')
batch_op.alter_column('role_id', nullable=True, existing_type=sa.Integer())
batch_op.alter_column('group_dn', new_column_name='dn', nullable=True, existing_type=sa.String(128))
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key(batch_op.f('pk_role-group'), ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
# For some reason MySQL ignores this statement
#batch_op.create_unique_constraint(op.f('uq_role-group_dn'), ['dn', 'role_id'])
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('role-group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-group')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-group_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
"""Add id to signup table
Revision ID: bf71799b7b9e
Revises: e9a67175e179
Create Date: 2021-09-06 23:30:07.486102
"""
from alembic import op
import sqlalchemy as sa
revision = 'bf71799b7b9e'
down_revision = 'e9a67175e179'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
invite_signup = sa.Table('invite_signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.ForeignKeyConstraint(['token'], ['signup.token'], name=op.f('fk_invite_signup_token_signup')),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(invite_signup.name, copy_from=invite_signup) as batch_op:
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.drop_constraint('fk_invite_signup_token_signup', 'foreignkey')
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_signup'))
)
with op.batch_alter_table(signup.name, copy_from=signup, recreate='always') as batch_op:
batch_op.drop_constraint('pk_signup', 'primary')
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key('pk_signup', ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup'))
)
invite_signup = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite_signup'))
)
op.execute(invite_signup.update().values(id=sa.select([signup.c.id]).where(signup.c.token==invite_signup.c.token).limit(1).as_scalar()))
with op.batch_alter_table(invite_signup.name, copy_from=invite_signup) as batch_op:
batch_op.alter_column('id', nullable=False, existing_type=sa.Integer())
batch_op.create_foreign_key(batch_op.f('fk_invite_signup_id_signup'), 'signup', ['id'], ['id'])
batch_op.drop_constraint('pk_invite_signup', 'primary')
batch_op.drop_column('token')
batch_op.create_primary_key('pk_invite_signup', ['id'])
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
invite_signup = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.ForeignKeyConstraint(['id'], ['signup.id'], name=op.f('fk_invite_signup_id_signup')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(invite_signup.name, copy_from=invite_signup) as batch_op:
batch_op.add_column(sa.Column('token', sa.VARCHAR(length=128), nullable=True))
batch_op.drop_constraint('fk_invite_signup_id_signup', type_='foreignkey')
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup'))
)
with op.batch_alter_table(signup.name, copy_from=signup) as batch_op:
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint('pk_signup', 'primary')
batch_op.create_primary_key('pk_signup', ['token'])
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup'))
)
invite_signup = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.ForeignKeyConstraint(['id'], ['signup.id'], name=op.f('fk_invite_signup_id_signup')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
op.execute(invite_signup.update().values(token=sa.select([signup.c.token]).where(signup.c.id==invite_signup.c.id).limit(1).as_scalar()))
with op.batch_alter_table(invite_signup.name, copy_from=invite_signup) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_invite_signup_token_signup'), 'signup', ['token'], ['token'])
batch_op.drop_constraint('pk_invite_signup', 'primary')
batch_op.drop_column('id')
batch_op.create_primary_key('pk_invite_signup', ['token'])
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup'))
)
with op.batch_alter_table(signup.name, copy_from=signup) as batch_op:
batch_op.drop_column('id')
"""constraint name fixes
Revision ID: cbca20cf64d9
Revises:
Create Date: 2021-04-13 18:10:58.210232
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'cbca20cf64d9'
down_revision = '5a07d4a63b64'
branch_labels = None
depends_on = None
def upgrade():
# This migration recreates all tables with identical columns and constraints.
# The only difference is that all contraints are named according to the newly
# defined naming conventions. This enables changing constraints in future
# migrations.
#
# We call batch_alter_table without any operations to have it recreate all
# tables with the column/constraint definitions from "table" and populate it
# with the data from the original table.
# First recreate tables that have (unnamed) foreign keys without any foreign keys
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('invite_grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_grant'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_roles', meta,
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('invite_token', 'role_id', name=op.f('pk_invite_roles'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-group')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-group_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-inclusion', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('included_role_id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('role_id', 'included_role_id', name=op.f('pk_role-inclusion'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-user')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-user_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
# Then recreate all tables with properly named constraints and readd foreign key constraints
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('invite', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('mailToken', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.Column('newmail', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_mailToken'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', create_constraint=True, name='mfatype'), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('recovery_salt', sa.String(length=64), nullable=True),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('token_type', sa.String(length=40), nullable=True),
sa.Column('access_token', sa.String(length=255), nullable=True),
sa.Column('refresh_token', sa.String(length=255), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('passwordToken', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_passwordToken'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=32), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role')),
sa.UniqueConstraint('name', name=op.f('uq_role_name'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], name=op.f('fk_invite_grant_invite_token_invite')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_grant'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_roles', meta,
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], name=op.f('fk_invite_roles_invite_token_invite')),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_invite_roles_role_id_role')),
sa.PrimaryKeyConstraint('invite_token', 'role_id', name=op.f('pk_invite_roles'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], name=op.f('fk_invite_signup_invite_token_invite')),
sa.ForeignKeyConstraint(['token'], ['signup.token'], name=op.f('fk_invite_signup_token_signup')),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-group')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-group_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-inclusion', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('included_role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['included_role_id'], ['role.id'], name=op.f('fk_role-inclusion_included_role_id_role')),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-inclusion_role_id_role')),
sa.PrimaryKeyConstraint('role_id', 'included_role_id', name=op.f('pk_role-inclusion'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-user_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-user')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-user_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
def downgrade():
# upgrade only adds names to all constraints, no need to undo much
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False)
"""Per-service email preferences
Revision ID: e13b733ec856
Revises: b273d7fdaa25
Create Date: 2022-10-17 02:13:11.598210
"""
from alembic import op
import sqlalchemy as sa
revision = 'e13b733ec856'
down_revision = 'b273d7fdaa25'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
with op.batch_alter_table('service_user', schema=None) as batch_op:
batch_op.add_column(sa.Column('service_email_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_service_user_service_email_id_user_email'), 'user_email', ['service_email_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL')
meta = sa.MetaData(bind=op.get_bind())
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('enable_email_preferences', server_default=None)
def downgrade():
with op.batch_alter_table('service_user', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_service_user_service_email_id_user_email'), type_='foreignkey')
batch_op.drop_column('service_email_id')
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.drop_column('enable_email_preferences')
"""Remailer mode overwrite
Revision ID: e249233e2a31
Revises: aeb07202a6c8
Create Date: 2022-11-05 03:42:38.036623
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e249233e2a31'
down_revision = 'aeb07202a6c8'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
service_user = sa.Table('service_user', meta,
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('service_email_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['service_email_id'], ['user_email.id'], name=op.f('fk_service_user_service_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_service_user_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_service_user_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('service_id', 'user_id', name=op.f('pk_service_user'))
)
with op.batch_alter_table('service_user', copy_from=service_user) as batch_op:
batch_op.add_column(sa.Column('remailer_overwrite_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=True))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
service_user = sa.Table('service_user', meta,
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('remailer_overwrite_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=True),
sa.Column('service_email_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['service_email_id'], ['user_email.id'], name=op.f('fk_service_user_service_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_service_user_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_service_user_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('service_id', 'user_id', name=op.f('pk_service_user'))
)
with op.batch_alter_table('service_user', copy_from=service_user) as batch_op:
batch_op.drop_column('remailer_overwrite_mode')
"""Migrate oauth2 state from user to session
Revision ID: e71e29cc605a
Revises: 99df71f0f4a0
Create Date: 2024-05-18 21:59:20.435912
"""
from alembic import op
import sqlalchemy as sa
revision = 'e71e29cc605a'
down_revision = '99df71f0f4a0'
branch_labels = None
depends_on = None
def upgrade():
op.drop_table('oauth2grant')
op.drop_table('oauth2token')
op.create_table('oauth2grant',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2grant_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
op.create_table('oauth2token',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2token_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
def downgrade():
# We don't drop and recreate the table here to improve fuzzy migration test coverage
meta = sa.MetaData(bind=op.get_bind())
session = sa.table('session',
sa.column('id', sa.Integer),
sa.column('user_id', sa.Integer()),
)
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.INTEGER(), nullable=True))
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2token_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token.update().values(user_id=sa.select([session.c.user_id]).where(oauth2token.c.session_id==session.c.id).as_scalar()))
op.execute(oauth2token.delete().where(oauth2token.c.user_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.create_foreign_key('fk_oauth2token_user_id_user', 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_constraint(batch_op.f('fk_oauth2token_session_id_session'), type_='foreignkey')
batch_op.drop_column('session_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.INTEGER(), nullable=True))
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2grant_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
op.execute(oauth2grant.update().values(user_id=sa.select([session.c.user_id]).where(oauth2grant.c.session_id==session.c.id).as_scalar()))
op.execute(oauth2grant.delete().where(oauth2grant.c.user_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.create_foreign_key('fk_oauth2grant_user_id_user', 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_constraint(batch_op.f('fk_oauth2grant_session_id_session'), type_='foreignkey')
batch_op.drop_column('session_id')
"""Add id to selfservice tokens
Revision ID: e9a67175e179
Revises: a8c6b6e91c28
Create Date: 2021-09-06 22:04:46.741233
"""
from alembic import op
import sqlalchemy as sa
revision = 'e9a67175e179'
down_revision = 'a8c6b6e91c28'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('mailToken', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.Column('newmail', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_mailToken'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint('pk_mailToken', 'primary')
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key('pk_mailToken', ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
table = sa.Table('passwordToken', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_passwordToken'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint('pk_passwordToken', 'primary')
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key('pk_passwordToken', ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('mailToken', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.Column('newmail', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_mailToken'))
)
with op.batch_alter_table(table.name, copy_from=table) as batch_op:
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint('pk_mailToken', 'primary')
batch_op.create_primary_key('pk_mailToken', ['token'])
batch_op.drop_column('id')
table = sa.Table('passwordToken', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_passwordToken'))
)
with op.batch_alter_table(table.name, copy_from=table) as batch_op:
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint('pk_passwordToken', 'primary')
batch_op.create_primary_key('pk_passwordToken', ['token'])
batch_op.drop_column('id')
"""Add ServiceUser
Revision ID: f2eb2c52a61f
Revises: 9f824f61d8ac
Create Date: 2022-08-21 00:42:37.896970
"""
from alembic import op
import sqlalchemy as sa
revision = 'f2eb2c52a61f'
down_revision = '9f824f61d8ac'
branch_labels = None
depends_on = None
def upgrade():
service_user = op.create_table('service_user',
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_service_user_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_service_user_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('service_id', 'user_id', name=op.f('pk_service_user'))
)
service = sa.table('service', sa.column('id'))
user = sa.table('user', sa.column('id'))
op.execute(service_user.insert().from_select(
['service_id', 'user_id'],
sa.select([service.c.id, user.c.id]).select_from(sa.join(service, user, sa.true()))
))
def downgrade():
op.drop_table('service_user')
from .api import APIClient
from .invite import Invite, InviteGrant, InviteSignup
from .mail import Mail, MailReceiveAddress, MailDestinationAddress
from .mfa import MFAType, MFAMethod, RecoveryCodeMethod, TOTPMethod, WebauthnMethod
from .oauth2 import OAuth2Client, OAuth2RedirectURI, OAuth2LogoutURI, OAuth2Grant, OAuth2Token, OAuth2DeviceLoginInitiation, OAuth2Key
from .role import Role, RoleGroup, RoleGroupMap
from .selfservice import PasswordToken
from .service import RemailerMode, Service, ServiceUser, get_services
from .session import Session, DeviceLoginType, DeviceLoginInitiation, DeviceLoginConfirmation
from .signup import Signup
from .user import User, UserEmail, Group, IDAllocator, IDRangeExhaustedError, IDAlreadyAllocatedError
from .ratelimit import RatelimitEvent, Ratelimit, HostRatelimit, host_ratelimit, format_delay
from .misc import FeatureFlag, Lock
__all__ = [
'APIClient',
'Invite', 'InviteGrant', 'InviteSignup',
'Mail', 'MailReceiveAddress', 'MailDestinationAddress',
'MFAType', 'MFAMethod', 'RecoveryCodeMethod', 'TOTPMethod', 'WebauthnMethod',
'OAuth2Client', 'OAuth2RedirectURI', 'OAuth2LogoutURI', 'OAuth2Grant', 'OAuth2Token', 'OAuth2DeviceLoginInitiation',
'Role', 'RoleGroup', 'RoleGroupMap',
'PasswordToken',
'RemailerMode', 'Service', 'ServiceUser', 'get_services',
'DeviceLoginType', 'DeviceLoginInitiation', 'DeviceLoginConfirmation',
'Signup',
'User', 'UserEmail', 'Group', 'IDAllocator', 'IDRangeExhaustedError', 'IDAlreadyAllocatedError',
'RatelimitEvent', 'Ratelimit', 'HostRatelimit', 'host_ratelimit', 'format_delay',
'FeatureFlag', 'Lock',
]
from sqlalchemy import Column, Integer, String, ForeignKey, Boolean, Text
from sqlalchemy.orm import relationship
from uffd.database import db
from uffd.password_hash import PasswordHashAttribute, HighEntropyPasswordHash
class APIClient(db.Model):
__tablename__ = 'api_client'
id = Column(Integer, primary_key=True, autoincrement=True)
service_id = Column(Integer, ForeignKey('service.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
service = relationship('Service', back_populates='api_clients')
auth_username = Column(String(40), unique=True, nullable=False)
_auth_password = Column('auth_password', Text(), nullable=False)
auth_password = PasswordHashAttribute('_auth_password', HighEntropyPasswordHash)
# Permissions are defined by adding an attribute named "perm_NAME"
perm_users = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_checkpassword = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_mail_aliases = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_remailer = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_metrics = Column(Boolean(create_constraint=True), default=False, nullable=False)
@classmethod
def permission_exists(cls, name):
return hasattr(cls, 'perm_'+name)
def has_permission(self, name):
return getattr(self, 'perm_' + name)