Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • Dockerfile
  • feature_invite_validuntil_minmax
  • incremental-sync
  • jwt_encode_inconsistencies
  • master
  • redis-rate-limits
  • roles-recursive-cte
  • typehints
  • v1.0.x
  • v1.1.x
  • v1.2.x
  • v1.x.x
  • v0.1.2
  • v0.1.4
  • v0.1.5
  • v0.2.0
  • v0.3.0
  • v1.0.0
  • v1.0.1
  • v1.0.2
  • v1.1.0
  • v1.1.1
  • v1.1.2
  • v1.2.0
  • v2.0.0
  • v2.0.1
  • v2.1.0
  • v2.2.0
  • v2.3.0
  • v2.3.1
30 results

Target

Select target project
  • uffd/uffd
  • rixx/uffd
  • thies/uffd
  • leona/uffd
  • enbewe/uffd
  • strifel/uffd
  • thies/uffd-2
7 results
Select Git revision
  • Dockerfile
  • claims-in-idtoke
  • feature_invite_validuntil_minmax
  • incremental-sync
  • jwt_encode_inconsistencies
  • master
  • recovery-code-pwhash
  • redis-rate-limits
  • roles-recursive-cte
  • typehints
  • v1.0.x
  • v1.1.x
  • v1.2.x
  • v1.x.x
  • v0.1.2
  • v0.1.4
  • v0.1.5
  • v0.2.0
  • v0.3.0
  • v1.0.0
  • v1.0.1
  • v1.0.2
  • v1.1.0
  • v1.1.1
  • v1.1.2
  • v1.2.0
  • v2.0.0
  • v2.0.1
  • v2.1.0
  • v2.2.0
  • v2.3.0
  • v2.3.1
32 results
Show changes
Showing
with 1921 additions and 50 deletions
"""added api permission for metrics
Revision ID: b8fbefca3675
Revises: f2eb2c52a61f
Create Date: 2022-08-22 21:30:19.265531
"""
from alembic import op
import sqlalchemy as sa
revision = 'b8fbefca3675'
down_revision = 'f2eb2c52a61f'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.add_column(sa.Column('perm_metrics', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_metrics', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.drop_column('perm_metrics')
"""Move API and OAuth2 clients to DB
Revision ID: b9d3f7dac9db
Revises: 09d2edcaf0cc
Create Date: 2022-02-17 21:14:00.440057
"""
import secrets
import hashlib
import base64
from alembic import op
import sqlalchemy as sa
from flask import current_app
revision = 'b9d3f7dac9db'
down_revision = '09d2edcaf0cc'
branch_labels = None
depends_on = None
def hash_sha512(password):
ctx = hashlib.new('sha512', password.encode())
return '{sha512}' + base64.b64encode(ctx.digest()).decode()
def upgrade():
used_service_names = set()
services = {} # name -> limit_access, access_group_name
oauth2_clients = [] # service_name, client_id, client_secret, redirect_uris, logout_uris
api_clients = [] # service_name, auth_username, auth_password, perm_users, perm_checkpassword, perm_mail_aliases
for opts in current_app.config.get('OAUTH2_CLIENTS', {}).values():
if 'service_name' in opts:
used_service_names.add(opts['service_name'])
for opts in current_app.config.get('API_CLIENTS_2', {}).values():
if 'service_name' in opts:
used_service_names.add(opts['service_name'])
for client_id, opts in current_app.config.get('OAUTH2_CLIENTS', {}).items():
if 'client_secret' not in opts:
continue
if 'service_name' in opts:
service_name = opts['service_name']
else:
service_name = client_id
if service_name in used_service_names:
service_name = 'oauth2_' + service_name
if service_name in used_service_names:
num = 1
while (service_name + '_%d'%num) in used_service_names:
num += 1
service_name = service_name + '_%d'%num
if opts.get('required_group') is None:
limit_access = False
access_group_name = None
elif isinstance(opts.get('required_group'), str):
limit_access = True
access_group_name = opts['required_group']
else:
limit_access = True
access_group_name = None
client_secret = opts['client_secret']
redirect_uris = opts.get('redirect_uris') or []
logout_uris = []
for item in opts.get('logout_urls') or []:
if isinstance(item, str):
logout_uris.append(('GET', item))
else:
logout_uris.append(item)
used_service_names.add(service_name)
if service_name not in services or services[service_name] == (False, None):
services[service_name] = (limit_access, access_group_name)
elif services[service_name] == (limit_access, access_group_name):
pass
else:
services[service_name] = (True, None)
oauth2_clients.append((service_name, client_id, client_secret, redirect_uris, logout_uris))
for client_id, opts in current_app.config.get('API_CLIENTS_2', {}).items():
if 'client_secret' not in opts:
continue
if 'service_name' in opts:
service_name = opts['service_name']
else:
service_name = 'api_' + client_id
if service_name in used_service_names:
num = 1
while (service_name + '_%d'%num) in used_service_names:
num += 1
service_name = service_name + '_%d'%num
auth_username = client_id
auth_password = opts['client_secret']
perm_users = 'getusers' in opts.get('scopes', [])
perm_checkpassword = 'checkpassword' in opts.get('scopes', [])
perm_mail_aliases = 'getmails' in opts.get('scopes', [])
if service_name not in services:
services[service_name] = (False, None)
api_clients.append((service_name, auth_username, auth_password, perm_users, perm_checkpassword, perm_mail_aliases))
meta = sa.MetaData(bind=op.get_bind())
service_table = op.create_table('service',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
group_table = sa.table('group',
sa.column('id'),
sa.column('name'),
)
for service_name, args in services.items():
limit_access, access_group_name = args
op.execute(service_table.insert().values(name=service_name, limit_access=limit_access, access_group_id=sa.select([group_table.c.id]).where(group_table.c.name==access_group_name).as_scalar()))
api_client_table = op.create_table('api_client',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
for service_name, auth_username, auth_password, perm_users, perm_checkpassword, perm_mail_aliases in api_clients:
op.execute(api_client_table.insert().values(service_id=sa.select([service_table.c.id]).where(service_table.c.name==service_name).as_scalar(), auth_username=auth_username, auth_password=hash_sha512(auth_password), perm_users=perm_users, perm_checkpassword=perm_checkpassword, perm_mail_aliases=perm_mail_aliases))
oauth2client_table = op.create_table('oauth2client',
sa.Column('db_id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_oauth2client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('db_id', name=op.f('pk_oauth2client')),
sa.UniqueConstraint('client_id', name=op.f('uq_oauth2client_client_id'))
)
oauth2logout_uri_table = op.create_table('oauth2logout_uri',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('method', sa.String(length=40), nullable=False),
sa.Column('uri', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2logout_uri_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2logout_uri'))
)
oauth2redirect_uri_table = op.create_table('oauth2redirect_uri',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('uri', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2redirect_uri_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2redirect_uri'))
)
for service_name, client_id, client_secret, redirect_uris, logout_uris in oauth2_clients:
op.execute(oauth2client_table.insert().values(service_id=sa.select([service_table.c.id]).where(service_table.c.name==service_name).as_scalar(), client_id=client_id, client_secret=hash_sha512(client_secret)))
for method, uri, in logout_uris:
op.execute(oauth2logout_uri_table.insert().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2client_table.c.client_id==client_id).as_scalar(), method=method, uri=uri))
for uri in redirect_uris:
op.execute(oauth2redirect_uri_table.insert().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2client_table.c.client_id==client_id).as_scalar(), uri=uri))
with op.batch_alter_table('device_login_initiation', schema=None) as batch_op:
batch_op.add_column(sa.Column('oauth2_client_db_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), 'oauth2client', ['oauth2_client_db_id'], ['db_id'], onupdate='CASCADE', ondelete='CASCADE')
device_login_initiation_table = sa.Table('device_login_initiation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('OAUTH2', create_constraint=True, name='devicelogintype'), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.Column('secret', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('oauth2_client_id', sa.String(length=40), nullable=True),
sa.Column('oauth2_client_db_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['oauth2_client_db_id'], ['oauth2client.db_id'], name=op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_initiation')),
sa.UniqueConstraint('code0', name=op.f('uq_device_login_initiation_code0')),
sa.UniqueConstraint('code1', name=op.f('uq_device_login_initiation_code1'))
)
op.execute(device_login_initiation_table.update().values(oauth2_client_db_id=sa.select([oauth2client_table.c.db_id]).where(device_login_initiation_table.c.oauth2_client_id==oauth2client_table.c.client_id).as_scalar()))
op.execute(device_login_initiation_table.delete().where(device_login_initiation_table.c.oauth2_client_db_id==None))
with op.batch_alter_table('device_login_initiation', copy_from=device_login_initiation_table) as batch_op:
batch_op.drop_column('oauth2_client_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_db_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_oauth2grant_client_db_id_oauth2client'), 'oauth2client', ['client_db_id'], ['db_id'], onupdate='CASCADE', ondelete='CASCADE')
oauth2grant_table = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=True),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant')),
sa.Index('ix_oauth2grant_code', 'code')
)
op.execute(oauth2grant_table.update().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2grant_table.c.client_id==oauth2client_table.c.client_id).as_scalar()))
op.execute(oauth2grant_table.delete().where(oauth2grant_table.c.client_db_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant_table) as batch_op:
batch_op.alter_column('client_db_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('client_id')
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_db_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_oauth2token_client_db_id_oauth2client'), 'oauth2client', ['client_db_id'], ['db_id'], onupdate='CASCADE', ondelete='CASCADE')
oauth2token_table = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=True),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token_table.update().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2token_table.c.client_id==oauth2client_table.c.client_id).as_scalar()))
op.execute(oauth2token_table.delete().where(oauth2token_table.c.client_db_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token_table) as batch_op:
batch_op.alter_column('client_db_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('client_id')
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
oauth2client_table = sa.Table('oauth2client', meta,
sa.Column('db_id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_oauth2client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('db_id', name=op.f('pk_oauth2client')),
sa.UniqueConstraint('client_id', name=op.f('uq_oauth2client_client_id'))
)
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_id', sa.VARCHAR(length=40), nullable=True))
oauth2token_table = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token_table.update().values(client_id=sa.select([oauth2client_table.c.client_id]).where(oauth2token_table.c.client_db_id==oauth2client_table.c.db_id).as_scalar()))
op.execute(oauth2token_table.delete().where(oauth2token_table.c.client_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token_table) as batch_op:
batch_op.alter_column('client_id', existing_type=sa.VARCHAR(length=40), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_oauth2token_client_db_id_oauth2client'), type_='foreignkey')
batch_op.drop_column('client_db_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_id', sa.VARCHAR(length=40), nullable=True))
oauth2grant_table = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant')),
sa.Index('ix_oauth2grant_code', 'code')
)
op.execute(oauth2grant_table.update().values(client_id=sa.select([oauth2client_table.c.client_id]).where(oauth2grant_table.c.client_db_id==oauth2client_table.c.db_id).as_scalar()))
op.execute(oauth2grant_table.delete().where(oauth2grant_table.c.client_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant_table) as batch_op:
batch_op.alter_column('client_id', existing_type=sa.VARCHAR(length=40), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_oauth2grant_client_db_id_oauth2client'), type_='foreignkey')
batch_op.drop_column('client_db_id')
with op.batch_alter_table('device_login_initiation', schema=None) as batch_op:
batch_op.add_column(sa.Column('oauth2_client_id', sa.VARCHAR(length=40), nullable=True))
device_login_initiation_table = sa.Table('device_login_initiation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('OAUTH2', create_constraint=True, name='devicelogintype'), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.Column('secret', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('oauth2_client_id', sa.String(length=40), nullable=True),
sa.Column('oauth2_client_db_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['oauth2_client_db_id'], ['oauth2client.db_id'], name=op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_initiation')),
sa.UniqueConstraint('code0', name=op.f('uq_device_login_initiation_code0')),
sa.UniqueConstraint('code1', name=op.f('uq_device_login_initiation_code1'))
)
op.execute(device_login_initiation_table.update().values(oauth2_client_id=sa.select([oauth2client_table.c.client_id]).where(device_login_initiation_table.c.oauth2_client_db_id==oauth2client_table.c.db_id).as_scalar()))
op.execute(device_login_initiation_table.delete().where(device_login_initiation_table.c.oauth2_client_id==None))
with op.batch_alter_table('device_login_initiation', copy_from=device_login_initiation_table) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), type_='foreignkey')
batch_op.drop_column('oauth2_client_db_id')
op.drop_table('oauth2redirect_uri')
op.drop_table('oauth2logout_uri')
op.drop_table('oauth2client')
op.drop_table('api_client')
op.drop_table('service')
"""added RoleGroup.requires_mfa and cleanup
Revision ID: bad6fc529510
Revises: aff5f350dcdf
Create Date: 2021-06-22 15:58:10.515330
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bad6fc529510'
down_revision = 'aff5f350dcdf'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('role-group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-group')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-group_dn'))
)
with op.batch_alter_table(table.name, copy_from=table) as batch_op:
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint(batch_op.f('pk_role-group'), type_='primary')
batch_op.drop_constraint(batch_op.f('uq_role-group_dn'), type_='unique')
batch_op.drop_column('id')
batch_op.alter_column('dn', new_column_name='group_dn', nullable=False, existing_type=sa.String(128))
batch_op.alter_column('role_id', nullable=False, existing_type=sa.Integer())
batch_op.add_column(sa.Column('requires_mfa', sa.Boolean(create_constraint=True, name=op.f('ck_role-group_requires_mfa')), nullable=False, default=False))
batch_op.create_primary_key(batch_op.f('pk_role-group'), ['role_id', 'group_dn'])
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('role-group', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('group_dn', sa.String(128), nullable=False),
sa.Column('requires_mfa', sa.Boolean(create_constraint=True, name=op.f('ck_role-group_requires_mfa')), nullable=False, default=False),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('role_id', 'group_dn', name=op.f('pk_role-group'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
# For some reason MySQL does not allow us to drop the primary key if the foreignkey on role_id exists
batch_op.drop_constraint(batch_op.f('fk_role-group_role_id_role'), type_='foreignkey')
batch_op.drop_constraint(batch_op.f('pk_role-group'), type_='primary')
batch_op.drop_column('requires_mfa')
batch_op.alter_column('role_id', nullable=True, existing_type=sa.Integer())
batch_op.alter_column('group_dn', new_column_name='dn', nullable=True, existing_type=sa.String(128))
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key(batch_op.f('pk_role-group'), ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
# For some reason MySQL ignores this statement
#batch_op.create_unique_constraint(op.f('uq_role-group_dn'), ['dn', 'role_id'])
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('role-group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-group')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-group_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
"""Add id to signup table
Revision ID: bf71799b7b9e
Revises: e9a67175e179
Create Date: 2021-09-06 23:30:07.486102
"""
from alembic import op
import sqlalchemy as sa
revision = 'bf71799b7b9e'
down_revision = 'e9a67175e179'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
invite_signup = sa.Table('invite_signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.ForeignKeyConstraint(['token'], ['signup.token'], name=op.f('fk_invite_signup_token_signup')),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(invite_signup.name, copy_from=invite_signup) as batch_op:
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.drop_constraint('fk_invite_signup_token_signup', 'foreignkey')
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_signup'))
)
with op.batch_alter_table(signup.name, copy_from=signup, recreate='always') as batch_op:
batch_op.drop_constraint('pk_signup', 'primary')
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key('pk_signup', ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup'))
)
invite_signup = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite_signup'))
)
op.execute(invite_signup.update().values(id=sa.select([signup.c.id]).where(signup.c.token==invite_signup.c.token).limit(1).as_scalar()))
with op.batch_alter_table(invite_signup.name, copy_from=invite_signup) as batch_op:
batch_op.alter_column('id', nullable=False, existing_type=sa.Integer())
batch_op.create_foreign_key(batch_op.f('fk_invite_signup_id_signup'), 'signup', ['id'], ['id'])
batch_op.drop_constraint('pk_invite_signup', 'primary')
batch_op.drop_column('token')
batch_op.create_primary_key('pk_invite_signup', ['id'])
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
invite_signup = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.ForeignKeyConstraint(['id'], ['signup.id'], name=op.f('fk_invite_signup_id_signup')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(invite_signup.name, copy_from=invite_signup) as batch_op:
batch_op.add_column(sa.Column('token', sa.VARCHAR(length=128), nullable=True))
batch_op.drop_constraint('fk_invite_signup_id_signup', type_='foreignkey')
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup'))
)
with op.batch_alter_table(signup.name, copy_from=signup) as batch_op:
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint('pk_signup', 'primary')
batch_op.create_primary_key('pk_signup', ['token'])
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup'))
)
invite_signup = sa.Table('invite_signup', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_id'], ['invite.id'], name=op.f('fk_invite_signup_invite_id_invite')),
sa.ForeignKeyConstraint(['id'], ['signup.id'], name=op.f('fk_invite_signup_id_signup')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_signup'))
)
op.execute(invite_signup.update().values(token=sa.select([signup.c.token]).where(signup.c.id==invite_signup.c.id).limit(1).as_scalar()))
with op.batch_alter_table(invite_signup.name, copy_from=invite_signup) as batch_op:
batch_op.create_foreign_key(batch_op.f('fk_invite_signup_token_signup'), 'signup', ['token'], ['token'])
batch_op.drop_constraint('pk_invite_signup', 'primary')
batch_op.drop_column('id')
batch_op.create_primary_key('pk_invite_signup', ['token'])
meta = sa.MetaData(bind=op.get_bind())
signup = sa.Table('signup', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_signup'))
)
with op.batch_alter_table(signup.name, copy_from=signup) as batch_op:
batch_op.drop_column('id')
"""constraint name fixes
Revision ID: cbca20cf64d9
Revises:
Create Date: 2021-04-13 18:10:58.210232
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'cbca20cf64d9'
down_revision = '5a07d4a63b64'
branch_labels = None
depends_on = None
def upgrade():
# This migration recreates all tables with identical columns and constraints.
# The only difference is that all contraints are named according to the newly
# defined naming conventions. This enables changing constraints in future
# migrations.
#
# We call batch_alter_table without any operations to have it recreate all
# tables with the column/constraint definitions from "table" and populate it
# with the data from the original table.
# First recreate tables that have (unnamed) foreign keys without any foreign keys
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('invite_grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_grant'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_roles', meta,
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('invite_token', 'role_id', name=op.f('pk_invite_roles'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-group')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-group_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-inclusion', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('included_role_id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('role_id', 'included_role_id', name=op.f('pk_role-inclusion'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-user')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-user_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
# Then recreate all tables with properly named constraints and readd foreign key constraints
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('invite', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('mailToken', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.Column('newmail', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_mailToken'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', create_constraint=True, name='mfatype'), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('recovery_salt', sa.String(length=64), nullable=True),
sa.Column('recovery_hash', sa.String(length=256), nullable=True),
sa.Column('totp_key', sa.String(length=64), nullable=True),
sa.Column('webauthn_cred', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mfa_method'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('token_type', sa.String(length=40), nullable=True),
sa.Column('access_token', sa.String(length=255), nullable=True),
sa.Column('refresh_token', sa.String(length=255), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('passwordToken', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_passwordToken'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=32), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role')),
sa.UniqueConstraint('name', name=op.f('uq_role_name'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('loginname', sa.Text(), nullable=True),
sa.Column('displayname', sa.Text(), nullable=True),
sa.Column('mail', sa.Text(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('user_dn', sa.String(length=128), nullable=True),
sa.Column('type', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.Column('user_dn', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], name=op.f('fk_invite_grant_invite_token_invite')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_invite_grant'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_roles', meta,
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], name=op.f('fk_invite_roles_invite_token_invite')),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_invite_roles_role_id_role')),
sa.PrimaryKeyConstraint('invite_token', 'role_id', name=op.f('pk_invite_roles'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('invite_signup', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('invite_token', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['invite_token'], ['invite.token'], name=op.f('fk_invite_signup_invite_token_invite')),
sa.ForeignKeyConstraint(['token'], ['signup.token'], name=op.f('fk_invite_signup_token_signup')),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite_signup'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-group', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-group')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-group_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-inclusion', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('included_role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['included_role_id'], ['role.id'], name=op.f('fk_role-inclusion_included_role_id_role')),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-inclusion_role_id_role')),
sa.PrimaryKeyConstraint('role_id', 'included_role_id', name=op.f('pk_role-inclusion'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
table = sa.Table('role-user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('dn', sa.String(length=128), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-user_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role-user')),
sa.UniqueConstraint('dn', 'role_id', name=op.f('uq_role-user_dn'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
pass
def downgrade():
# upgrade only adds names to all constraints, no need to undo much
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False)
"""Per-service email preferences
Revision ID: e13b733ec856
Revises: b273d7fdaa25
Create Date: 2022-10-17 02:13:11.598210
"""
from alembic import op
import sqlalchemy as sa
revision = 'e13b733ec856'
down_revision = 'b273d7fdaa25'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
with op.batch_alter_table('service_user', schema=None) as batch_op:
batch_op.add_column(sa.Column('service_email_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_service_user_service_email_id_user_email'), 'user_email', ['service_email_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL')
meta = sa.MetaData(bind=op.get_bind())
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('enable_email_preferences', server_default=None)
def downgrade():
with op.batch_alter_table('service_user', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_service_user_service_email_id_user_email'), type_='foreignkey')
batch_op.drop_column('service_email_id')
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.drop_column('enable_email_preferences')
"""Remailer mode overwrite
Revision ID: e249233e2a31
Revises: aeb07202a6c8
Create Date: 2022-11-05 03:42:38.036623
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e249233e2a31'
down_revision = 'aeb07202a6c8'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
service_user = sa.Table('service_user', meta,
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('service_email_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['service_email_id'], ['user_email.id'], name=op.f('fk_service_user_service_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_service_user_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_service_user_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('service_id', 'user_id', name=op.f('pk_service_user'))
)
with op.batch_alter_table('service_user', copy_from=service_user) as batch_op:
batch_op.add_column(sa.Column('remailer_overwrite_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=True))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
service_user = sa.Table('service_user', meta,
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('remailer_overwrite_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=True),
sa.Column('service_email_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['service_email_id'], ['user_email.id'], name=op.f('fk_service_user_service_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_service_user_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_service_user_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('service_id', 'user_id', name=op.f('pk_service_user'))
)
with op.batch_alter_table('service_user', copy_from=service_user) as batch_op:
batch_op.drop_column('remailer_overwrite_mode')
"""Migrate oauth2 state from user to session
Revision ID: e71e29cc605a
Revises: 99df71f0f4a0
Create Date: 2024-05-18 21:59:20.435912
"""
from alembic import op
import sqlalchemy as sa
revision = 'e71e29cc605a'
down_revision = '99df71f0f4a0'
branch_labels = None
depends_on = None
def upgrade():
op.drop_table('oauth2grant')
op.drop_table('oauth2token')
op.create_table('oauth2grant',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2grant_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
op.create_table('oauth2token',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2token_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
def downgrade():
# We don't drop and recreate the table here to improve fuzzy migration test coverage
meta = sa.MetaData(bind=op.get_bind())
session = sa.table('session',
sa.column('id', sa.Integer),
sa.column('user_id', sa.Integer()),
)
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.INTEGER(), nullable=True))
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2token_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token.update().values(user_id=sa.select([session.c.user_id]).where(oauth2token.c.session_id==session.c.id).as_scalar()))
op.execute(oauth2token.delete().where(oauth2token.c.user_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.create_foreign_key('fk_oauth2token_user_id_user', 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_constraint(batch_op.f('fk_oauth2token_session_id_session'), type_='foreignkey')
batch_op.drop_column('session_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.INTEGER(), nullable=True))
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2grant_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
op.execute(oauth2grant.update().values(user_id=sa.select([session.c.user_id]).where(oauth2grant.c.session_id==session.c.id).as_scalar()))
op.execute(oauth2grant.delete().where(oauth2grant.c.user_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.create_foreign_key('fk_oauth2grant_user_id_user', 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_constraint(batch_op.f('fk_oauth2grant_session_id_session'), type_='foreignkey')
batch_op.drop_column('session_id')
"""Add id to selfservice tokens
Revision ID: e9a67175e179
Revises: a8c6b6e91c28
Create Date: 2021-09-06 22:04:46.741233
"""
from alembic import op
import sqlalchemy as sa
revision = 'e9a67175e179'
down_revision = 'a8c6b6e91c28'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('mailToken', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.Column('newmail', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_mailToken'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint('pk_mailToken', 'primary')
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key('pk_mailToken', ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
table = sa.Table('passwordToken', meta,
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_passwordToken'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
batch_op.drop_constraint('pk_passwordToken', 'primary')
batch_op.add_column(sa.Column('id', sa.Integer(), nullable=True))
batch_op.create_primary_key('pk_passwordToken', ['id'])
batch_op.alter_column('id', autoincrement=True, nullable=False, existing_type=sa.Integer())
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
table = sa.Table('mailToken', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.Column('newmail', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_mailToken'))
)
with op.batch_alter_table(table.name, copy_from=table) as batch_op:
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint('pk_mailToken', 'primary')
batch_op.create_primary_key('pk_mailToken', ['token'])
batch_op.drop_column('id')
table = sa.Table('passwordToken', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('loginname', sa.String(length=32), nullable=True),
sa.PrimaryKeyConstraint('token', name=op.f('pk_passwordToken'))
)
with op.batch_alter_table(table.name, copy_from=table) as batch_op:
batch_op.alter_column('id', autoincrement=False, existing_type=sa.Integer())
batch_op.drop_constraint('pk_passwordToken', 'primary')
batch_op.create_primary_key('pk_passwordToken', ['token'])
batch_op.drop_column('id')
"""Add ServiceUser
Revision ID: f2eb2c52a61f
Revises: 9f824f61d8ac
Create Date: 2022-08-21 00:42:37.896970
"""
from alembic import op
import sqlalchemy as sa
revision = 'f2eb2c52a61f'
down_revision = '9f824f61d8ac'
branch_labels = None
depends_on = None
def upgrade():
service_user = op.create_table('service_user',
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_service_user_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_service_user_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('service_id', 'user_id', name=op.f('pk_service_user'))
)
service = sa.table('service', sa.column('id'))
user = sa.table('user', sa.column('id'))
op.execute(service_user.insert().from_select(
['service_id', 'user_id'],
sa.select([service.c.id, user.c.id]).select_from(sa.join(service, user, sa.true()))
))
def downgrade():
op.drop_table('service_user')
from .api import APIClient
from .invite import Invite, InviteGrant, InviteSignup
from .mail import Mail, MailReceiveAddress, MailDestinationAddress
from .mfa import MFAType, MFAMethod, RecoveryCodeMethod, TOTPMethod, WebauthnMethod
from .oauth2 import OAuth2Client, OAuth2RedirectURI, OAuth2LogoutURI, OAuth2Grant, OAuth2Token, OAuth2DeviceLoginInitiation, OAuth2Key
from .role import Role, RoleGroup, RoleGroupMap
from .selfservice import PasswordToken
from .service import RemailerMode, Service, ServiceUser, get_services
from .session import Session, DeviceLoginType, DeviceLoginInitiation, DeviceLoginConfirmation
from .signup import Signup
from .user import User, UserEmail, Group, IDAllocator, IDRangeExhaustedError, IDAlreadyAllocatedError
from .ratelimit import RatelimitEvent, Ratelimit, HostRatelimit, host_ratelimit, format_delay
from .misc import FeatureFlag, Lock
__all__ = [
'APIClient',
'Invite', 'InviteGrant', 'InviteSignup',
'Mail', 'MailReceiveAddress', 'MailDestinationAddress',
'MFAType', 'MFAMethod', 'RecoveryCodeMethod', 'TOTPMethod', 'WebauthnMethod',
'OAuth2Client', 'OAuth2RedirectURI', 'OAuth2LogoutURI', 'OAuth2Grant', 'OAuth2Token', 'OAuth2DeviceLoginInitiation',
'Role', 'RoleGroup', 'RoleGroupMap',
'PasswordToken',
'RemailerMode', 'Service', 'ServiceUser', 'get_services',
'DeviceLoginType', 'DeviceLoginInitiation', 'DeviceLoginConfirmation',
'Signup',
'User', 'UserEmail', 'Group', 'IDAllocator', 'IDRangeExhaustedError', 'IDAlreadyAllocatedError',
'RatelimitEvent', 'Ratelimit', 'HostRatelimit', 'host_ratelimit', 'format_delay',
'FeatureFlag', 'Lock',
]
from sqlalchemy import Column, Integer, String, ForeignKey, Boolean, Text
from sqlalchemy.orm import relationship
from uffd.database import db
from uffd.password_hash import PasswordHashAttribute, HighEntropyPasswordHash
class APIClient(db.Model):
__tablename__ = 'api_client'
id = Column(Integer, primary_key=True, autoincrement=True)
service_id = Column(Integer, ForeignKey('service.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
service = relationship('Service', back_populates='api_clients')
auth_username = Column(String(40), unique=True, nullable=False)
_auth_password = Column('auth_password', Text(), nullable=False)
auth_password = PasswordHashAttribute('_auth_password', HighEntropyPasswordHash)
# Permissions are defined by adding an attribute named "perm_NAME"
perm_users = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_checkpassword = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_mail_aliases = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_remailer = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_metrics = Column(Boolean(create_constraint=True), default=False, nullable=False)
@classmethod
def permission_exists(cls, name):
return hasattr(cls, 'perm_'+name)
def has_permission(self, name):
return getattr(self, 'perm_' + name)
import datetime
from flask_babel import gettext as _
from flask import current_app
from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Boolean
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship
from uffd.utils import token_urlfriendly
from uffd.database import db
from .signup import Signup
invite_roles = db.Table('invite_roles',
Column('invite_id', Integer(), ForeignKey('invite.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True),
Column('role_id', Integer, ForeignKey('role.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
)
class Invite(db.Model):
__tablename__ = 'invite'
id = Column(Integer(), primary_key=True, autoincrement=True)
token = Column(String(128), unique=True, nullable=False, default=token_urlfriendly)
created = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
creator_id = Column(Integer(), ForeignKey('user.id', onupdate='CASCADE'), nullable=True)
creator = relationship('User')
valid_until = Column(DateTime, nullable=False)
single_use = Column(Boolean(create_constraint=True), default=True, nullable=False)
allow_signup = Column(Boolean(create_constraint=True), default=True, nullable=False)
used = Column(Boolean(create_constraint=True), default=False, nullable=False)
disabled = Column(Boolean(create_constraint=True), default=False, nullable=False)
roles = relationship('Role', secondary=invite_roles)
signups = relationship('InviteSignup', back_populates='invite', lazy=True, cascade='all, delete-orphan')
grants = relationship('InviteGrant', back_populates='invite', lazy=True, cascade='all, delete-orphan')
@hybrid_property
def expired(self):
return self.valid_until < datetime.datetime.utcnow().replace(second=0, microsecond=0)
@hybrid_property
def voided(self):
return self.single_use and self.used
@property
def permitted(self):
if self.creator is None:
return False # Creator does not exist (anymore)
if self.creator.is_deactivated:
return False
if self.creator.is_in_group(current_app.config['ACL_ADMIN_GROUP']):
return True
if self.allow_signup and not self.creator.is_in_group(current_app.config['ACL_SIGNUP_GROUP']):
return False
for role in self.roles:
if role.moderator_group is None or role.moderator_group not in self.creator.groups:
return False
return True
@property
def active(self):
return not self.disabled and not self.voided and not self.expired and self.permitted
@property
def short_token(self):
if len(self.token) < 30:
return '<too short>'
return self.token[:10] + ''
def disable(self):
self.disabled = True
def reset(self):
self.disabled = False
self.used = False
class InviteGrant(db.Model):
__tablename__ = 'invite_grant'
id = Column(Integer(), primary_key=True, autoincrement=True)
invite_id = Column(Integer(), ForeignKey('invite.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
invite = relationship('Invite', back_populates='grants')
user_id = Column(Integer(), ForeignKey('user.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
user = relationship('User')
def apply(self):
if not self.invite.active:
return False, _('Invite link is invalid')
if not self.invite.roles:
return False, _('Invite link does not grant any roles')
if set(self.invite.roles).issubset(self.user.roles):
return False, _('Invite link does not grant any new roles')
for role in self.invite.roles:
self.user.roles.append(role)
self.user.update_groups()
self.invite.used = True
return True, _('Success')
class InviteSignup(Signup):
__tablename__ = 'invite_signup'
id = Column(Integer(), ForeignKey('signup.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
invite_id = Column(Integer(), ForeignKey('invite.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
invite = relationship('Invite', back_populates='signups')
__mapper_args__ = {
'polymorphic_identity': 'InviteSignup'
}
def validate(self):
if not self.invite.active or not self.invite.allow_signup:
return False, _('Invite link is invalid')
return super().validate()
def finish(self, password):
if not self.invite.active or not self.invite.allow_signup:
return None, _('Invite link is invalid')
user, msg = super().finish(password)
if user is not None:
for role in self.invite.roles:
user.roles.append(role)
user.update_groups()
self.invite.used = True
return user, msg
import re
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.associationproxy import association_proxy
from uffd.database import db
class Mail(db.Model):
# Aliases are looked up by receiver addresses with api.getmails. To emulate
# the pre-v2/LDAP behaviour, the lookup needs to be case-insensitive. To not
# rely on database-specific behaviour, we ensure that all receiver addresses
# are stored lower-case and convert incoming addresses in api.getmails to
# lower-case. Note that full emulation of LDAP behaviour would also require
# whitespace normalization. Instead we disallow spaces in receiver addresses.
# Match ASCII code points 33 (!) to 64 (@) and 91 ([) to 126 (~), i.e. any
# number of lower-case ASCII letters, digits, symbols
RECEIVER_REGEX = '[!-@[-~]*'
RECEIVER_REGEX_COMPILED = re.compile(RECEIVER_REGEX)
__tablename__ = 'mail'
id = Column(Integer(), primary_key=True, autoincrement=True)
uid = Column(String(32), unique=True, nullable=False)
_receivers = relationship('MailReceiveAddress', cascade='all, delete-orphan')
receivers = association_proxy('_receivers', 'address')
_destinations = relationship('MailDestinationAddress', cascade='all, delete-orphan')
destinations = association_proxy('_destinations', 'address')
@property
def invalid_receivers(self):
return [addr for addr in self.receivers if not re.fullmatch(self.RECEIVER_REGEX_COMPILED, addr)]
class MailReceiveAddress(db.Model):
__tablename__ = 'mail_receive_address'
id = Column(Integer(), primary_key=True, autoincrement=True)
mail_id = Column(Integer(), ForeignKey('mail.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
address = Column(String(128), nullable=False)
def __init__(self, address):
self.address = address
class MailDestinationAddress(db.Model):
__tablename__ = 'mail_destination_address'
id = Column(Integer(), primary_key=True, autoincrement=True)
mail_id = Column(Integer(), ForeignKey('mail.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
address = Column(String(128), nullable=False)
def __init__(self, address):
self.address = address
......@@ -8,14 +8,19 @@ import hmac
import hashlib
import base64
import urllib.parse
# imports for recovery codes
import crypt
from flask import request, current_app
from sqlalchemy import Column, Integer, Enum, String, DateTime, Text
from sqlalchemy import Column, Integer, Enum, String, DateTime, Text, ForeignKey
from sqlalchemy.orm import relationship, backref
from uffd.utils import nopad_b32decode, nopad_b32encode
from uffd.password_hash import PasswordHashAttribute, CryptPasswordHash
from uffd.database import db
from uffd.user.models import User
from .user import User
User.mfa_recovery_codes = relationship('RecoveryCodeMethod', viewonly=True)
User.mfa_totp_methods = relationship('TOTPMethod', viewonly=True)
User.mfa_webauthn_methods = relationship('WebauthnMethod', viewonly=True)
User.mfa_enabled = property(lambda user: bool(user.mfa_totp_methods or user.mfa_webauthn_methods))
class MFAType(enum.Enum):
RECOVERY_CODE = 0
......@@ -25,10 +30,11 @@ class MFAType(enum.Enum):
class MFAMethod(db.Model):
__tablename__ = 'mfa_method'
id = Column(Integer(), primary_key=True, autoincrement=True)
type = Column(Enum(MFAType))
created = Column(DateTime())
type = Column(Enum(MFAType, create_constraint=True), nullable=False)
created = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow)
name = Column(String(128))
dn = Column(String(128))
user_id = Column(Integer(), ForeignKey('user.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
user = relationship('User', backref=backref('mfa_methods', cascade='all, delete-orphan'))
__mapper_args__ = {
'polymorphic_on': type,
......@@ -37,19 +43,11 @@ class MFAMethod(db.Model):
def __init__(self, user, name=None):
self.user = user
self.name = name
self.created = datetime.datetime.now()
@property
def user(self):
return User.ldap_get(self.dn)
@user.setter
def user(self, new_user):
self.dn = new_user.dn
self.created = datetime.datetime.utcnow()
class RecoveryCodeMethod(MFAMethod):
code_salt = Column('recovery_salt', String(64))
code_hash = Column('recovery_hash', String(256))
_code = Column('recovery_hash', String(256))
code = PasswordHashAttribute('_code', CryptPasswordHash)
__mapper_args__ = {
'polymorphic_identity': MFAType.RECOVERY_CODE
......@@ -57,14 +55,11 @@ class RecoveryCodeMethod(MFAMethod):
def __init__(self, user):
super().__init__(user, None)
# The code attribute is only available in newly created objects as only
# it's hash is stored in the database
self.code = secrets.token_hex(8).replace(' ', '').lower()
self.code_hash = crypt.crypt(self.code)
# self.code_value is not stored and only available on freshly initiated objects
self.code = self.code_value = secrets.token_hex(8).replace(' ', '').lower()
def verify(self, code):
code = code.replace(' ', '').lower()
return crypt.crypt(code, self.code_hash) == self.code_hash
return self.code.verify(code.replace(' ', '').lower())
def _hotp(counter, key, digits=6):
'''Generates HMAC-based one-time password according to RFC4226
......@@ -82,6 +77,7 @@ def _hotp(counter, key, digits=6):
class TOTPMethod(MFAMethod):
key = Column('totp_key', String(64))
last_counter = Column('totp_last_counter', Integer())
__mapper_args__ = {
'polymorphic_identity': MFAType.TOTP
......@@ -90,13 +86,12 @@ class TOTPMethod(MFAMethod):
def __init__(self, user, name=None, key=None):
super().__init__(user, name)
if key is None:
key = base64.b32encode(secrets.token_bytes(16)).rstrip(b'=').decode()
key = nopad_b32encode(secrets.token_bytes(16)).decode()
self.key = key
@property
def raw_key(self):
tmp = self.key + '='*(8 - (len(self.key) % 8))
return base64.b32decode(tmp.encode())
return nopad_b32decode(self.key)
@property
def issuer(self):
......@@ -123,8 +118,12 @@ class TOTPMethod(MFAMethod):
:param code: String of digits (as entered by the user)
:returns: True if code is valid, False otherwise'''
counter = int(time.time()/30)
if _hotp(counter-1, self.raw_key) == code or _hotp(counter, self.raw_key) == code:
current_counter = int(time.time()/30)
for counter in (current_counter - 1, current_counter):
if counter > (self.last_counter or 0):
valid_code = _hotp(counter, self.raw_key)
if secrets.compare_digest(code, valid_code):
self.last_counter = counter
return True
return False
......@@ -141,7 +140,7 @@ class WebauthnMethod(MFAMethod):
@property
def cred(self):
from fido2.ctap2 import AttestedCredentialData #pylint: disable=import-outside-toplevel
from uffd.fido2_compat import AttestedCredentialData #pylint: disable=import-outside-toplevel
return AttestedCredentialData(base64.b64decode(self._cred))
@cred.setter
......
from uffd.database import db
# pylint completely fails to understand SQLAlchemy's query functions
# pylint: disable=no-member
feature_flag_table = db.Table('feature_flag',
db.Column('name', db.String(32), primary_key=True),
)
class FeatureFlag:
def __init__(self, name):
self.name = name
self.enable_hooks = []
self.disable_hooks = []
@property
def expr(self):
return db.exists().where(feature_flag_table.c.name == self.name)
def __bool__(self):
return db.session.execute(db.select([self.expr])).scalar()
def enable_hook(self, func):
self.enable_hooks.append(func)
return func
def enable(self):
db.session.execute(db.insert(feature_flag_table).values(name=self.name))
for func in self.enable_hooks:
func()
def disable_hook(self, func):
self.disable_hooks.append(func)
return func
def disable(self):
db.session.execute(db.delete(feature_flag_table).where(feature_flag_table.c.name == self.name))
for func in self.disable_hooks:
func()
FeatureFlag.unique_email_addresses = FeatureFlag('unique-email-addresses')
lock_table = db.Table('lock',
db.Column('name', db.String(32), primary_key=True),
)
class Lock:
ALL_LOCKS = set()
def __init__(self, name):
self.name = name
assert name not in self.ALL_LOCKS
self.ALL_LOCKS.add(name)
def acquire(self):
'''Acquire the lock until the end of the current transaction
Calling acquire while the specific lock is already held has no effect.'''
if db.engine.name == 'sqlite':
# SQLite does not support with_for_update, but we can lock the whole DB
# with any write operation. So we do a dummy update.
db.session.execute(db.update(lock_table).where(False).values(name=None))
elif db.engine.name in ('mysql', 'mariadb'):
result = db.session.execute(db.select([lock_table.c.name]).where(lock_table.c.name == self.name).with_for_update()).scalar()
if result is not None:
return
# We add all lock rows with migrations so we should never end up here
raise Exception(f'Lock "{self.name}" is missing')
else:
raise NotImplementedError()
# Only executed when lock_table is created with db.create/db.create_all (e.g.
# during testing). Otherwise the rows are inserted with migrations.
@db.event.listens_for(lock_table, 'after_create') # pylint: disable=no-member
def insert_lock_rows(target, connection, **kwargs): # pylint: disable=unused-argument
for name in Lock.ALL_LOCKS:
db.session.execute(db.insert(lock_table).values(name=name))
db.session.commit()
import datetime
import json
import secrets
import base64
from sqlalchemy import Column, Integer, String, DateTime, Text, ForeignKey, Boolean
from sqlalchemy.orm import relationship
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.ext.associationproxy import association_proxy
import jwt
from uffd.database import db, CommaSeparatedList
from uffd.tasks import cleanup_task
from uffd.password_hash import PasswordHashAttribute, HighEntropyPasswordHash
from uffd.utils import token_urlfriendly
from .session import DeviceLoginInitiation, DeviceLoginType
from .service import ServiceUser
# pyjwt v1.7.x compat (Buster/Bullseye)
if not hasattr(jwt, 'get_algorithm_by_name'):
jwt.get_algorithm_by_name = lambda name: jwt.algorithms.get_default_algorithms()[name]
class OAuth2Client(db.Model):
__tablename__ = 'oauth2client'
# Inconsistently named "db_id" instead of "id" because of the naming conflict
# with "client_id" in the OAuth2 standard
db_id = Column(Integer, primary_key=True, autoincrement=True)
service_id = Column(Integer, ForeignKey('service.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
service = relationship('Service', back_populates='oauth2_clients')
client_id = Column(String(40), unique=True, nullable=False)
_client_secret = Column('client_secret', Text(), nullable=False)
client_secret = PasswordHashAttribute('_client_secret', HighEntropyPasswordHash)
_redirect_uris = relationship('OAuth2RedirectURI', cascade='all, delete-orphan')
redirect_uris = association_proxy('_redirect_uris', 'uri')
logout_uris = relationship('OAuth2LogoutURI', cascade='all, delete-orphan')
@property
def default_redirect_uri(self):
return self.redirect_uris[0] if len(self.redirect_uris) == 1 else None
def access_allowed(self, user):
service_user = ServiceUser.query.get((self.service_id, user.id))
return service_user and service_user.has_access
@property
def logout_uris_json(self):
return json.dumps([[item.method, item.uri] for item in self.logout_uris])
class OAuth2RedirectURI(db.Model):
__tablename__ = 'oauth2redirect_uri'
id = Column(Integer, primary_key=True, autoincrement=True)
client_db_id = Column(Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
uri = Column(String(255), nullable=False)
def __init__(self, uri):
self.uri = uri
class OAuth2LogoutURI(db.Model):
__tablename__ = 'oauth2logout_uri'
id = Column(Integer, primary_key=True, autoincrement=True)
client_db_id = Column(Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
method = Column(String(40), nullable=False, default='GET')
uri = Column(String(255), nullable=False)
@cleanup_task.delete_by_attribute('expired')
class OAuth2Grant(db.Model):
__tablename__ = 'oauth2grant'
id = Column(Integer, primary_key=True, autoincrement=True)
EXPIRES_IN = 100
expires = Column(DateTime, nullable=False, default=lambda: datetime.datetime.utcnow() + datetime.timedelta(seconds=OAuth2Grant.EXPIRES_IN))
session_id = Column(Integer(), ForeignKey('session.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
session = relationship('Session')
client_db_id = Column(Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
client = relationship('OAuth2Client')
_code = Column('code', String(255), nullable=False, default=token_urlfriendly)
code = property(lambda self: f'{self.id}-{self._code}')
redirect_uri = Column(String(255), nullable=True)
nonce = Column(Text(), nullable=True)
scopes = Column('_scopes', CommaSeparatedList(), nullable=False, default=tuple())
_claims = Column('claims', Text(), nullable=True)
@property
def claims(self):
return json.loads(self._claims) if self._claims is not None else None
@claims.setter
def claims(self, value):
self._claims = json.dumps(value) if value is not None else None
@property
def service_user(self):
return ServiceUser.query.get((self.client.service_id, self.session.user_id))
@hybrid_property
def expired(self):
if self.expires is None:
return False
return self.expires < datetime.datetime.utcnow()
@classmethod
def get_by_authorization_code(cls, code):
# pylint: disable=protected-access
if '-' not in code:
return None
grant_id, grant_code = code.split('-', 2)
grant = cls.query.filter_by(id=grant_id, expired=False).first()
if not grant or not secrets.compare_digest(grant._code, grant_code):
return None
if grant.session.expired or grant.session.user.is_deactivated:
return None
if not grant.service_user or not grant.service_user.has_access:
return None
return grant
def make_token(self, **kwargs):
return OAuth2Token(
session=self.session,
client=self.client,
scopes=self.scopes,
claims=self.claims,
**kwargs
)
# OAuth2Token objects are cleaned-up when the session expires and is
# auto-deleted (or the user manually revokes it).
class OAuth2Token(db.Model):
__tablename__ = 'oauth2token'
id = Column(Integer, primary_key=True, autoincrement=True)
EXPIRES_IN = 3600
expires = Column(DateTime, nullable=False, default=lambda: datetime.datetime.utcnow() + datetime.timedelta(seconds=OAuth2Token.EXPIRES_IN))
session_id = Column(Integer(), ForeignKey('session.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
session = relationship('Session')
client_db_id = Column(Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
client = relationship('OAuth2Client')
# currently only bearer is supported
token_type = Column(String(40), nullable=False, default='bearer')
_access_token = Column('access_token', String(255), unique=True, nullable=False, default=token_urlfriendly)
access_token = property(lambda self: f'{self.id}-{self._access_token}')
_refresh_token = Column('refresh_token', String(255), unique=True, nullable=False, default=token_urlfriendly)
refresh_token = property(lambda self: f'{self.id}-{self._refresh_token}')
scopes = Column('_scopes', CommaSeparatedList(), nullable=False, default=tuple())
_claims = Column('claims', Text(), nullable=True)
@property
def claims(self):
return json.loads(self._claims) if self._claims is not None else None
@claims.setter
def claims(self, value):
self._claims = json.dumps(value) if value is not None else None
@property
def service_user(self):
return ServiceUser.query.get((self.client.service_id, self.session.user_id))
@hybrid_property
def expired(self):
return self.expires < datetime.datetime.utcnow()
@classmethod
def get_by_access_token(cls, access_token):
# pylint: disable=protected-access
if '-' not in access_token:
return None
token_id, token_secret = access_token.split('-', 2)
token = cls.query.filter_by(id=token_id, expired=False).first()
if not token or not secrets.compare_digest(token._access_token, token_secret):
return None
if token.session.expired or token.session.user.is_deactivated:
return None
if not token.service_user or not token.service_user.has_access:
return None
return token
class OAuth2DeviceLoginInitiation(DeviceLoginInitiation):
__mapper_args__ = {
'polymorphic_identity': DeviceLoginType.OAUTH2
}
client_db_id = Column('oauth2_client_db_id', Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'))
client = relationship('OAuth2Client')
@property
def description(self):
return self.client.service.name
class OAuth2Key(db.Model):
__tablename__ = 'oauth2_key'
id = Column(String(64), primary_key=True, default=token_urlfriendly)
created = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
active = Column(Boolean(create_constraint=False), default=True, nullable=False)
algorithm = Column(String(32), nullable=False)
private_key_jwk = Column(Text(), nullable=False)
public_key_jwk = Column(Text(), nullable=False)
def __init__(self, **kwargs):
if kwargs.get('algorithm') and kwargs.get('private_key') \
and not kwargs.get('private_key_jwk') \
and not kwargs.get('public_key_jwk'):
algorithm = jwt.get_algorithm_by_name(kwargs['algorithm'])
private_key = kwargs.pop('private_key')
kwargs['private_key_jwk'] = algorithm.to_jwk(private_key)
kwargs['public_key_jwk'] = algorithm.to_jwk(private_key.public_key())
super().__init__(**kwargs)
@property
def private_key(self):
# pylint: disable=protected-access,import-outside-toplevel
# cryptography performs expensive checks when loading RSA private keys.
# Since we only load keys we generated ourselves with help of cryptography,
# these checks are unnecessary.
import cryptography.hazmat.backends.openssl
cryptography.hazmat.backends.openssl.backend._rsa_skip_check_key = True
res = jwt.get_algorithm_by_name(self.algorithm).from_jwk(self.private_key_jwk)
cryptography.hazmat.backends.openssl.backend._rsa_skip_check_key = False
return res
@property
def public_key(self):
return jwt.get_algorithm_by_name(self.algorithm).from_jwk(self.public_key_jwk)
@property
def public_key_jwks_dict(self):
res = json.loads(self.public_key_jwk)
res['kid'] = self.id
res['alg'] = self.algorithm
res['use'] = 'sig'
# RFC7517 4.3 "The "use" and "key_ops" JWK members SHOULD NOT be used together [...]"
res.pop('key_ops', None)
return res
def encode_jwt(self, payload):
if not self.active:
raise jwt.exceptions.InvalidKeyError(f'Key {self.id} not active')
res = jwt.encode(payload, key=self.private_key, algorithm=self.algorithm, headers={'kid': self.id})
# pyjwt pre-v2 compat (Buster/Bullseye)
if isinstance(res, bytes):
res = res.decode()
return res
# Hash algorithm for at_hash/c_hash from OpenID Connect Core 1.0 section 3.1.3.6
def oidc_hash(self, value):
# pylint: disable=import-outside-toplevel
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.backends import default_backend # Only required for Buster
hash_alg = jwt.get_algorithm_by_name(self.algorithm).hash_alg
digest = hashes.Hash(hash_alg(), backend=default_backend())
digest.update(value)
return base64.urlsafe_b64encode(
digest.finalize()[:hash_alg.digest_size // 2]
).decode('ascii').rstrip('=')
@classmethod
def get_preferred_key(cls, algorithm='RS256'):
return cls.query.filter_by(active=True, algorithm=algorithm).order_by(OAuth2Key.created.desc()).first()
@classmethod
def get_available_algorithms(cls):
return ['RS256']
@classmethod
def decode_jwt(cls, data, algorithms=('RS256',), **kwargs):
headers = jwt.get_unverified_header(data)
if 'kid' not in headers:
raise jwt.exceptions.InvalidKeyError('JWT without kid')
kid = headers['kid']
key = cls.query.get(kid)
if not key:
raise jwt.exceptions.InvalidKeyError(f'Key {kid} not found')
if not key.active:
raise jwt.exceptions.InvalidKeyError(f'Key {kid} not active')
return jwt.decode(data, key=key.public_key, algorithms=algorithms, **kwargs)
@classmethod
def generate_rsa_key(cls, public_exponent=65537, key_size=3072):
# pylint: disable=import-outside-toplevel
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.backends import default_backend # Only required for Buster
return cls(algorithm='RS256', private_key=rsa.generate_private_key(public_exponent=public_exponent, key_size=key_size, backend=default_backend()))
......@@ -3,17 +3,26 @@ import ipaddress
import math
from flask import request
from flask_babel import gettext as _
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy.ext.hybrid import hybrid_property
from uffd.tasks import cleanup_task
from uffd.database import db
@cleanup_task.delete_by_attribute('expired')
class RatelimitEvent(db.Model):
__tablename__ = 'ratelimit_event'
id = Column(Integer(), primary_key=True, autoincrement=True)
timestamp = Column(DateTime(), default=datetime.datetime.now)
name = Column(String(128))
timestamp = Column(DateTime(), default=datetime.datetime.utcnow, nullable=False)
expires = Column(DateTime(), nullable=False)
name = Column(String(128), nullable=False)
key = Column(String(128))
@hybrid_property
def expired(self):
return self.expires < datetime.datetime.utcnow()
class Ratelimit:
def __init__(self, name, interval, limit):
self.name = name
......@@ -21,25 +30,23 @@ class Ratelimit:
self.limit = limit
self.base = interval**(1/limit)
def cleanup(self):
limit = datetime.datetime.now() - datetime.timedelta(seconds=self.interval)
RatelimitEvent.query.filter(RatelimitEvent.name == self.name, RatelimitEvent.timestamp <= limit).delete()
db.session.commit()
def log(self, key=None):
db.session.add(RatelimitEvent(name=self.name, key=key))
db.session.add(RatelimitEvent(name=self.name, key=key, expires=datetime.datetime.utcnow() + datetime.timedelta(seconds=self.interval)))
db.session.commit()
def get_delay(self, key=None):
self.cleanup()
events = RatelimitEvent.query.filter(RatelimitEvent.name == self.name, RatelimitEvent.key == key).all()
events = RatelimitEvent.query\
.filter(db.not_(RatelimitEvent.expired))\
.filter_by(name=self.name, key=key)\
.order_by(RatelimitEvent.timestamp)\
.all()
if not events:
return 0
delay = math.ceil(self.base**len(events))
if delay < 5:
delay = 0
delay = min(delay, 365*24*60*60) # prevent overflow of datetime objetcs
remaining = events[0].timestamp + datetime.timedelta(seconds=delay) - datetime.datetime.now()
delay = min(delay, 365*24*60*60) # prevent overflow of datetime objects
remaining = events[0].timestamp + datetime.timedelta(seconds=delay) - datetime.datetime.utcnow()
return max(0, math.ceil(remaining.total_seconds()))
def get_addrkey(addr=None):
......@@ -66,16 +73,16 @@ class HostRatelimit(Ratelimit):
def format_delay(seconds):
if seconds <= 15:
return 'a few seconds'
return _('a few seconds')
if seconds <= 30:
return '30 seconds'
return _('30 seconds')
if seconds <= 60:
return 'one minute'
return _('one minute')
if seconds < 3000:
return '%d minutes'%(math.ceil(seconds/60)+1)
return _('%(minutes)d minutes', minutes=(math.ceil(seconds/60)+1))
if seconds <= 3600:
return 'one hour'
return '%d hours'%math.ceil(seconds/3600)
return _('one hour')
return _('%(hours)d hours', hours=math.ceil(seconds/3600))
# Global host-based ratelimit
host_ratelimit = HostRatelimit('host', 1*60*60, 25)
from sqlalchemy import Column, String, Integer, Text, ForeignKey, Boolean
from sqlalchemy.orm import relationship
from sqlalchemy.orm.collections import MappedCollection, collection
from uffd.database import db
from .user import User
class RoleGroup(db.Model):
__tablename__ = 'role_groups'
role_id = Column(Integer(), ForeignKey('role.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
role = relationship('Role', back_populates='groups')
group_id = Column(Integer(), ForeignKey('group.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
group = relationship('Group')
requires_mfa = Column(Boolean(create_constraint=True), default=False, nullable=False)
# pylint: disable=E1101
role_members = db.Table('role_members',
db.Column('role_id', db.Integer(), db.ForeignKey('role.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True),
db.Column('user_id', db.Integer(), db.ForeignKey('user.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
)
# pylint: disable=E1101
role_inclusion = db.Table('role-inclusion',
Column('role_id', Integer, ForeignKey('role.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True),
Column('included_role_id', Integer, ForeignKey('role.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
)
def flatten_recursive(objs, attr):
'''Returns a set of objects and all objects included in object.`attr` recursivly while avoiding loops'''
objs = set(objs)
new_objs = set(objs)
while new_objs:
for obj in getattr(new_objs.pop(), attr):
if obj not in objs:
objs.add(obj)
new_objs.add(obj)
return objs
def get_user_roles_effective(user):
base = set(user.roles)
if not user.is_service_user:
base.update(Role.query.filter_by(is_default=True))
return flatten_recursive(base, 'included_roles')
User.roles_effective = property(get_user_roles_effective)
def compute_user_groups(user, ignore_mfa=False):
groups = set()
for role in user.roles_effective:
for group in role.groups:
if ignore_mfa or not role.groups[group].requires_mfa or user.mfa_enabled:
groups.add(group)
return groups
User.compute_groups = compute_user_groups
def update_user_groups(user):
current_groups = set(user.groups)
groups = user.compute_groups()
if groups == current_groups:
return set(), set()
groups_added = groups - current_groups
groups_removed = current_groups - groups
for group in groups_removed:
user.groups.remove(group)
for group in groups_added:
user.groups.append(group)
return groups_added, groups_removed
User.update_groups = update_user_groups
class RoleGroupMap(MappedCollection):
def __init__(self):
super().__init__(keyfunc=lambda rolegroup: rolegroup.group)
@collection.internally_instrumented
def __setitem__(self, key, value, _sa_initiator=None):
value.group = key
super().__setitem__(key, value, _sa_initiator)
class Role(db.Model):
__tablename__ = 'role'
id = Column(Integer(), primary_key=True, autoincrement=True)
name = Column(String(32), unique=True, nullable=False)
description = Column(Text(), default='', nullable=False)
included_roles = relationship('Role', secondary=role_inclusion,
primaryjoin=id == role_inclusion.c.role_id,
secondaryjoin=id == role_inclusion.c.included_role_id,
backref='including_roles')
including_roles = [] # overwritten by backref
moderator_group_id = Column(Integer(), ForeignKey('group.id', onupdate='CASCADE', ondelete='SET NULL'), nullable=True)
moderator_group = relationship('Group')
members = relationship('User', secondary='role_members', back_populates='roles')
groups = relationship('RoleGroup', collection_class=RoleGroupMap, cascade='all, delete-orphan', back_populates='role')
# Roles that are managed externally (e.g. by Ansible) can be locked to
# prevent accidental editing of name, moderator group, included roles
# and groups as well as deletion in the web interface.
locked = Column(Boolean(create_constraint=True), default=False, nullable=False)
is_default = Column(Boolean(create_constraint=True), default=False, nullable=False)
@property
def members_effective(self):
members = set()
for role in flatten_recursive([self], 'including_roles'):
members.update(role.members)
if role.is_default:
members.update([user for user in User.query.all() if not user.is_service_user])
return members
@property
def included_roles_recursive(self):
return flatten_recursive(self.included_roles, 'included_roles')
@property
def groups_effective(self):
groups = set(self.groups)
for role in self.included_roles_recursive:
groups.update(role.groups)
return groups
def update_member_groups(self):
for user in self.members_effective:
user.update_groups()
import datetime
from sqlalchemy import Column, String, DateTime, Integer, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.hybrid import hybrid_property
from uffd.database import db
from uffd.utils import token_urlfriendly
from uffd.tasks import cleanup_task
@cleanup_task.delete_by_attribute('expired')
class PasswordToken(db.Model):
__tablename__ = 'passwordToken'
id = Column(Integer(), primary_key=True, autoincrement=True)
token = Column(String(128), default=token_urlfriendly, nullable=False)
created = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
user_id = Column(Integer(), ForeignKey('user.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
user = relationship('User')
@hybrid_property
def expired(self):
if self.created is None:
return False
return self.created < datetime.datetime.utcnow() - datetime.timedelta(days=2)