Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
Loading items

Target

Select target project
  • uffd/uffd
  • rixx/uffd
  • thies/uffd
  • leona/uffd
  • strifel/uffd
  • thies/uffd-2
6 results
Select Git revision
Loading items
Show changes
Showing
with 1551 additions and 76 deletions
"""Multiple email addresses
Revision ID: b273d7fdaa25
Revises: 9f824f61d8ac
Create Date: 2022-08-19 22:52:48.730877
"""
from alembic import op
import sqlalchemy as sa
import datetime
# revision identifiers, used by Alembic.
revision = 'b273d7fdaa25'
down_revision = 'b8fbefca3675'
branch_labels = None
depends_on = None
def iter_rows_paged(table, pk='id', limit=1000):
conn = op.get_bind()
pk_column = getattr(table.c, pk)
last_pk = None
while True:
expr = table.select().order_by(pk_column).limit(limit)
if last_pk is not None:
expr = expr.where(pk_column > last_pk)
result = conn.execute(expr)
pk_index = list(result.keys()).index(pk)
rows = result.fetchall()
if not rows:
break
yield from rows
last_pk = rows[-1][pk_index]
def upgrade():
user_email_table = op.create_table('user_email',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('address', sa.String(length=128), nullable=False),
sa.Column('verified', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('verification_legacy_id', sa.Integer(), nullable=True),
sa.Column('verification_secret', sa.Text(), nullable=True),
sa.Column('verification_expires', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_user_email_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user_email')),
sa.UniqueConstraint('user_id', 'address', name='uq_user_email_user_id_address')
)
user_table = sa.table('user',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('mail', sa.VARCHAR(length=128), nullable=False),
)
op.execute(user_email_table.insert().from_select(
['user_id', 'address', 'verified'],
sa.select([user_table.c.id, user_table.c.mail, sa.literal(True, sa.Boolean(create_constraint=True))])
))
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('primary_email_id', sa.Integer(), nullable=True))
batch_op.add_column(sa.Column('recovery_email_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_user_primary_email_id_user_email'), 'user_email', ['primary_email_id'], ['id'], onupdate='CASCADE')
batch_op.create_foreign_key(batch_op.f('fk_user_recovery_email_id_user_email'), 'user_email', ['recovery_email_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL')
meta = sa.MetaData(bind=op.get_bind())
user_table = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('mail', sa.VARCHAR(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=True),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
op.execute(user_table.update().values(primary_email_id=sa.select([user_email_table.c.id]).where(user_email_table.c.user_id==user_table.c.id).limit(1).as_scalar()))
with op.batch_alter_table('user', copy_from=user_table) as batch_op:
batch_op.alter_column('primary_email_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('mail')
mailToken_table = sa.table('mailToken',
sa.column('id', sa.Integer()),
sa.column('token', sa.Text()),
sa.column('created', sa.DateTime()),
sa.column('newmail', sa.Text()),
sa.column('user_id', sa.Integer()),
)
for token_id, token, created, newmail, user_id in iter_rows_paged(mailToken_table):
op.execute(user_email_table.insert().insert().values(
user_id=user_id,
address=newmail,
verified=False,
verification_legacy_id=token_id,
verification_secret='{PLAIN}'+token,
# in-python because of this
verification_expires=(created + datetime.timedelta(days=2)),
))
op.drop_table('mailToken')
def downgrade():
with op.batch_alter_table('user', schema=None) as batch_op:
batch_op.add_column(sa.Column('mail', sa.VARCHAR(length=128), nullable=True))
meta = sa.MetaData(bind=op.get_bind())
user_table = sa.Table('user', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('unix_uid', sa.Integer(), nullable=False),
sa.Column('loginname', sa.String(length=32), nullable=False),
sa.Column('displayname', sa.String(length=128), nullable=False),
sa.Column('mail', sa.VARCHAR(length=128), nullable=False),
sa.Column('primary_email_id', sa.Integer(), nullable=False),
sa.Column('recovery_email_id', sa.Integer(), nullable=True),
sa.Column('pwhash', sa.Text(), nullable=True),
sa.Column('is_service_user', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['primary_email_id'], ['user_email.id'], name=op.f('fk_user_primary_email_id_user_email'), onupdate='CASCADE'),
sa.ForeignKeyConstraint(['recovery_email_id'], ['user_email.id'], name=op.f('fk_user_recovery_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user')),
sa.UniqueConstraint('loginname', name=op.f('uq_user_loginname')),
sa.UniqueConstraint('unix_uid', name=op.f('uq_user_unix_uid'))
)
user_email_table = sa.table('user_email',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('address', sa.String(length=128), nullable=False),
)
op.execute(user_table.update().values(mail=sa.select([user_email_table.c.address]).where(user_email_table.c.id==user_table.c.primary_email_id).limit(1).as_scalar()))
with op.batch_alter_table('user', copy_from=user_table) as batch_op:
batch_op.alter_column('mail', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_user_recovery_email_id_user_email'), type_='foreignkey')
batch_op.drop_constraint(batch_op.f('fk_user_primary_email_id_user_email'), type_='foreignkey')
batch_op.drop_column('recovery_email_id')
batch_op.drop_column('primary_email_id')
op.create_table('mailToken',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('token', sa.VARCHAR(length=128), nullable=False),
sa.Column('created', sa.DATETIME(), nullable=True),
sa.Column('newmail', sa.VARCHAR(length=255), nullable=True),
sa.Column('user_id', sa.INTEGER(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.drop_table('user_email')
"""added api permission for metrics
Revision ID: b8fbefca3675
Revises: f2eb2c52a61f
Create Date: 2022-08-22 21:30:19.265531
"""
from alembic import op
import sqlalchemy as sa
revision = 'b8fbefca3675'
down_revision = 'f2eb2c52a61f'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.add_column(sa.Column('perm_metrics', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
api_client = sa.Table('api_client', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_metrics', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
with op.batch_alter_table('api_client', copy_from=api_client) as batch_op:
batch_op.drop_column('perm_metrics')
"""Move API and OAuth2 clients to DB
Revision ID: b9d3f7dac9db
Revises: 09d2edcaf0cc
Create Date: 2022-02-17 21:14:00.440057
"""
import secrets
import hashlib
import base64
from alembic import op
import sqlalchemy as sa
from flask import current_app
revision = 'b9d3f7dac9db'
down_revision = '09d2edcaf0cc'
branch_labels = None
depends_on = None
def hash_sha512(password):
ctx = hashlib.new('sha512', password.encode())
return '{sha512}' + base64.b64encode(ctx.digest()).decode()
def upgrade():
used_service_names = set()
services = {} # name -> limit_access, access_group_name
oauth2_clients = [] # service_name, client_id, client_secret, redirect_uris, logout_uris
api_clients = [] # service_name, auth_username, auth_password, perm_users, perm_checkpassword, perm_mail_aliases
for opts in current_app.config.get('OAUTH2_CLIENTS', {}).values():
if 'service_name' in opts:
used_service_names.add(opts['service_name'])
for opts in current_app.config.get('API_CLIENTS_2', {}).values():
if 'service_name' in opts:
used_service_names.add(opts['service_name'])
for client_id, opts in current_app.config.get('OAUTH2_CLIENTS', {}).items():
if 'client_secret' not in opts:
continue
if 'service_name' in opts:
service_name = opts['service_name']
else:
service_name = client_id
if service_name in used_service_names:
service_name = 'oauth2_' + service_name
if service_name in used_service_names:
num = 1
while (service_name + '_%d'%num) in used_service_names:
num += 1
service_name = service_name + '_%d'%num
if opts.get('required_group') is None:
limit_access = False
access_group_name = None
elif isinstance(opts.get('required_group'), str):
limit_access = True
access_group_name = opts['required_group']
else:
limit_access = True
access_group_name = None
client_secret = opts['client_secret']
redirect_uris = opts.get('redirect_uris') or []
logout_uris = []
for item in opts.get('logout_urls') or []:
if isinstance(item, str):
logout_uris.append(('GET', item))
else:
logout_uris.append(item)
used_service_names.add(service_name)
if service_name not in services or services[service_name] == (False, None):
services[service_name] = (limit_access, access_group_name)
elif services[service_name] == (limit_access, access_group_name):
pass
else:
services[service_name] = (True, None)
oauth2_clients.append((service_name, client_id, client_secret, redirect_uris, logout_uris))
for client_id, opts in current_app.config.get('API_CLIENTS_2', {}).items():
if 'client_secret' not in opts:
continue
if 'service_name' in opts:
service_name = opts['service_name']
else:
service_name = 'api_' + client_id
if service_name in used_service_names:
num = 1
while (service_name + '_%d'%num) in used_service_names:
num += 1
service_name = service_name + '_%d'%num
auth_username = client_id
auth_password = opts['client_secret']
perm_users = 'getusers' in opts.get('scopes', [])
perm_checkpassword = 'checkpassword' in opts.get('scopes', [])
perm_mail_aliases = 'getmails' in opts.get('scopes', [])
if service_name not in services:
services[service_name] = (False, None)
api_clients.append((service_name, auth_username, auth_password, perm_users, perm_checkpassword, perm_mail_aliases))
meta = sa.MetaData(bind=op.get_bind())
service_table = op.create_table('service',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
group_table = sa.table('group',
sa.column('id'),
sa.column('name'),
)
for service_name, args in services.items():
limit_access, access_group_name = args
op.execute(service_table.insert().values(name=service_name, limit_access=limit_access, access_group_id=sa.select([group_table.c.id]).where(group_table.c.name==access_group_name).as_scalar()))
api_client_table = op.create_table('api_client',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('auth_username', sa.String(length=40), nullable=False),
sa.Column('auth_password', sa.Text(), nullable=False),
sa.Column('perm_users', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_checkpassword', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('perm_mail_aliases', sa.Boolean(create_constraint=True), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_api_client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_api_client')),
sa.UniqueConstraint('auth_username', name=op.f('uq_api_client_auth_username'))
)
for service_name, auth_username, auth_password, perm_users, perm_checkpassword, perm_mail_aliases in api_clients:
op.execute(api_client_table.insert().values(service_id=sa.select([service_table.c.id]).where(service_table.c.name==service_name).as_scalar(), auth_username=auth_username, auth_password=hash_sha512(auth_password), perm_users=perm_users, perm_checkpassword=perm_checkpassword, perm_mail_aliases=perm_mail_aliases))
oauth2client_table = op.create_table('oauth2client',
sa.Column('db_id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_oauth2client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('db_id', name=op.f('pk_oauth2client')),
sa.UniqueConstraint('client_id', name=op.f('uq_oauth2client_client_id'))
)
oauth2logout_uri_table = op.create_table('oauth2logout_uri',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('method', sa.String(length=40), nullable=False),
sa.Column('uri', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2logout_uri_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2logout_uri'))
)
oauth2redirect_uri_table = op.create_table('oauth2redirect_uri',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('uri', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2redirect_uri_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2redirect_uri'))
)
for service_name, client_id, client_secret, redirect_uris, logout_uris in oauth2_clients:
op.execute(oauth2client_table.insert().values(service_id=sa.select([service_table.c.id]).where(service_table.c.name==service_name).as_scalar(), client_id=client_id, client_secret=hash_sha512(client_secret)))
for method, uri, in logout_uris:
op.execute(oauth2logout_uri_table.insert().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2client_table.c.client_id==client_id).as_scalar(), method=method, uri=uri))
for uri in redirect_uris:
op.execute(oauth2redirect_uri_table.insert().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2client_table.c.client_id==client_id).as_scalar(), uri=uri))
with op.batch_alter_table('device_login_initiation', schema=None) as batch_op:
batch_op.add_column(sa.Column('oauth2_client_db_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), 'oauth2client', ['oauth2_client_db_id'], ['db_id'], onupdate='CASCADE', ondelete='CASCADE')
device_login_initiation_table = sa.Table('device_login_initiation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('OAUTH2', create_constraint=True, name='devicelogintype'), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.Column('secret', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('oauth2_client_id', sa.String(length=40), nullable=True),
sa.Column('oauth2_client_db_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['oauth2_client_db_id'], ['oauth2client.db_id'], name=op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_initiation')),
sa.UniqueConstraint('code0', name=op.f('uq_device_login_initiation_code0')),
sa.UniqueConstraint('code1', name=op.f('uq_device_login_initiation_code1'))
)
op.execute(device_login_initiation_table.update().values(oauth2_client_db_id=sa.select([oauth2client_table.c.db_id]).where(device_login_initiation_table.c.oauth2_client_id==oauth2client_table.c.client_id).as_scalar()))
op.execute(device_login_initiation_table.delete().where(device_login_initiation_table.c.oauth2_client_db_id==None))
with op.batch_alter_table('device_login_initiation', copy_from=device_login_initiation_table) as batch_op:
batch_op.drop_column('oauth2_client_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_db_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_oauth2grant_client_db_id_oauth2client'), 'oauth2client', ['client_db_id'], ['db_id'], onupdate='CASCADE', ondelete='CASCADE')
oauth2grant_table = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=True),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant')),
sa.Index('ix_oauth2grant_code', 'code')
)
op.execute(oauth2grant_table.update().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2grant_table.c.client_id==oauth2client_table.c.client_id).as_scalar()))
op.execute(oauth2grant_table.delete().where(oauth2grant_table.c.client_db_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant_table) as batch_op:
batch_op.alter_column('client_db_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('client_id')
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_db_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_oauth2token_client_db_id_oauth2client'), 'oauth2client', ['client_db_id'], ['db_id'], onupdate='CASCADE', ondelete='CASCADE')
oauth2token_table = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=True),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token_table.update().values(client_db_id=sa.select([oauth2client_table.c.db_id]).where(oauth2token_table.c.client_id==oauth2client_table.c.client_id).as_scalar()))
op.execute(oauth2token_table.delete().where(oauth2token_table.c.client_db_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token_table) as batch_op:
batch_op.alter_column('client_db_id', existing_type=sa.Integer(), nullable=False)
batch_op.drop_column('client_id')
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
oauth2client_table = sa.Table('oauth2client', meta,
sa.Column('db_id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_secret', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_oauth2client_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('db_id', name=op.f('pk_oauth2client')),
sa.UniqueConstraint('client_id', name=op.f('uq_oauth2client_client_id'))
)
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_id', sa.VARCHAR(length=40), nullable=True))
oauth2token_table = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token_table.update().values(client_id=sa.select([oauth2client_table.c.client_id]).where(oauth2token_table.c.client_db_id==oauth2client_table.c.db_id).as_scalar()))
op.execute(oauth2token_table.delete().where(oauth2token_table.c.client_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token_table) as batch_op:
batch_op.alter_column('client_id', existing_type=sa.VARCHAR(length=40), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_oauth2token_client_db_id_oauth2client'), type_='foreignkey')
batch_op.drop_column('client_db_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('client_id', sa.VARCHAR(length=40), nullable=True))
oauth2grant_table = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant')),
sa.Index('ix_oauth2grant_code', 'code')
)
op.execute(oauth2grant_table.update().values(client_id=sa.select([oauth2client_table.c.client_id]).where(oauth2grant_table.c.client_db_id==oauth2client_table.c.db_id).as_scalar()))
op.execute(oauth2grant_table.delete().where(oauth2grant_table.c.client_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant_table) as batch_op:
batch_op.alter_column('client_id', existing_type=sa.VARCHAR(length=40), nullable=False)
batch_op.drop_constraint(batch_op.f('fk_oauth2grant_client_db_id_oauth2client'), type_='foreignkey')
batch_op.drop_column('client_db_id')
with op.batch_alter_table('device_login_initiation', schema=None) as batch_op:
batch_op.add_column(sa.Column('oauth2_client_id', sa.VARCHAR(length=40), nullable=True))
device_login_initiation_table = sa.Table('device_login_initiation', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('OAUTH2', create_constraint=True, name='devicelogintype'), nullable=False),
sa.Column('code0', sa.String(length=32), nullable=False),
sa.Column('code1', sa.String(length=32), nullable=False),
sa.Column('secret', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('oauth2_client_id', sa.String(length=40), nullable=True),
sa.Column('oauth2_client_db_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['oauth2_client_db_id'], ['oauth2client.db_id'], name=op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_device_login_initiation')),
sa.UniqueConstraint('code0', name=op.f('uq_device_login_initiation_code0')),
sa.UniqueConstraint('code1', name=op.f('uq_device_login_initiation_code1'))
)
op.execute(device_login_initiation_table.update().values(oauth2_client_id=sa.select([oauth2client_table.c.client_id]).where(device_login_initiation_table.c.oauth2_client_db_id==oauth2client_table.c.db_id).as_scalar()))
op.execute(device_login_initiation_table.delete().where(device_login_initiation_table.c.oauth2_client_id==None))
with op.batch_alter_table('device_login_initiation', copy_from=device_login_initiation_table) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_device_login_initiation_oauth2_client_db_id_oauth2client'), type_='foreignkey')
batch_op.drop_column('oauth2_client_db_id')
op.drop_table('oauth2redirect_uri')
op.drop_table('oauth2logout_uri')
op.drop_table('oauth2client')
op.drop_table('api_client')
op.drop_table('service')
......@@ -31,7 +31,7 @@ def upgrade():
batch_op.drop_column('id')
batch_op.alter_column('dn', new_column_name='group_dn', nullable=False, existing_type=sa.String(128))
batch_op.alter_column('role_id', nullable=False, existing_type=sa.Integer())
batch_op.add_column(sa.Column('requires_mfa', sa.Boolean(name=op.f('ck_role-group_requires_mfa')), nullable=False, default=False))
batch_op.add_column(sa.Column('requires_mfa', sa.Boolean(create_constraint=True, name=op.f('ck_role-group_requires_mfa')), nullable=False, default=False))
batch_op.create_primary_key(batch_op.f('pk_role-group'), ['role_id', 'group_dn'])
def downgrade():
......@@ -39,7 +39,7 @@ def downgrade():
table = sa.Table('role-group', meta,
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('group_dn', sa.String(128), nullable=False),
sa.Column('requires_mfa', sa.Boolean(name=op.f('ck_role-group_requires_mfa')), nullable=False, default=False),
sa.Column('requires_mfa', sa.Boolean(create_constraint=True, name=op.f('ck_role-group_requires_mfa')), nullable=False, default=False),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_role-group_role_id_role')),
sa.PrimaryKeyConstraint('role_id', 'group_dn', name=op.f('pk_role-group'))
)
......
......@@ -80,10 +80,10 @@ def upgrade():
sa.Column('token', sa.String(length=128), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('valid_until', sa.DateTime(), nullable=False),
sa.Column('single_use', sa.Boolean(name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(name=op.f('ck_invite_disabled')), nullable=False),
sa.Column('single_use', sa.Boolean(create_constraint=True, name=op.f('ck_invite_single_use')), nullable=False),
sa.Column('allow_signup', sa.Boolean(create_constraint=True, name=op.f('ck_invite_allow_signup')), nullable=False),
sa.Column('used', sa.Boolean(create_constraint=True, name=op.f('ck_invite_used')), nullable=False),
sa.Column('disabled', sa.Boolean(create_constraint=True, name=op.f('ck_invite_disabled')), nullable=False),
sa.PrimaryKeyConstraint('token', name=op.f('pk_invite'))
)
with op.batch_alter_table(table.name, copy_from=table, recreate='always') as batch_op:
......@@ -99,7 +99,7 @@ def upgrade():
pass
table = sa.Table('mfa_method', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', name='mfatype'), nullable=True),
sa.Column('type', sa.Enum('RECOVERY_CODE', 'TOTP', 'WEBAUTHN', create_constraint=True, name='mfatype'), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('dn', sa.String(length=128), nullable=True),
......
"""Per-service email preferences
Revision ID: e13b733ec856
Revises: b273d7fdaa25
Create Date: 2022-10-17 02:13:11.598210
"""
from alembic import op
import sqlalchemy as sa
revision = 'e13b733ec856'
down_revision = 'b273d7fdaa25'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.add_column(sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()))
with op.batch_alter_table('service_user', schema=None) as batch_op:
batch_op.add_column(sa.Column('service_email_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key(batch_op.f('fk_service_user_service_email_id_user_email'), 'user_email', ['service_email_id'], ['id'], onupdate='CASCADE', ondelete='SET NULL')
meta = sa.MetaData(bind=op.get_bind())
service = sa.Table('service', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('limit_access', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('access_group_id', sa.Integer(), nullable=True),
sa.Column('use_remailer', sa.Boolean(create_constraint=True), nullable=False),
sa.Column('enable_email_preferences', sa.Boolean(create_constraint=True), nullable=False, server_default=sa.false()),
sa.ForeignKeyConstraint(['access_group_id'], ['group.id'], name=op.f('fk_service_access_group_id_group'), onupdate='CASCADE', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_service')),
sa.UniqueConstraint('name', name=op.f('uq_service_name'))
)
with op.batch_alter_table('service', copy_from=service) as batch_op:
batch_op.alter_column('enable_email_preferences', server_default=None)
def downgrade():
with op.batch_alter_table('service_user', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('fk_service_user_service_email_id_user_email'), type_='foreignkey')
batch_op.drop_column('service_email_id')
with op.batch_alter_table('service', schema=None) as batch_op:
batch_op.drop_column('enable_email_preferences')
"""Remailer mode overwrite
Revision ID: e249233e2a31
Revises: aeb07202a6c8
Create Date: 2022-11-05 03:42:38.036623
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e249233e2a31'
down_revision = 'aeb07202a6c8'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
service_user = sa.Table('service_user', meta,
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('service_email_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['service_email_id'], ['user_email.id'], name=op.f('fk_service_user_service_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_service_user_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_service_user_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('service_id', 'user_id', name=op.f('pk_service_user'))
)
with op.batch_alter_table('service_user', copy_from=service_user) as batch_op:
batch_op.add_column(sa.Column('remailer_overwrite_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=True))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
service_user = sa.Table('service_user', meta,
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('remailer_overwrite_mode', sa.Enum('DISABLED', 'ENABLED_V1', 'ENABLED_V2', create_constraint=True, name='remailermode'), nullable=True),
sa.Column('service_email_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['service_email_id'], ['user_email.id'], name=op.f('fk_service_user_service_email_id_user_email'), onupdate='CASCADE', ondelete='SET NULL'),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_service_user_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_service_user_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('service_id', 'user_id', name=op.f('pk_service_user'))
)
with op.batch_alter_table('service_user', copy_from=service_user) as batch_op:
batch_op.drop_column('remailer_overwrite_mode')
"""Migrate oauth2 state from user to session
Revision ID: e71e29cc605a
Revises: 99df71f0f4a0
Create Date: 2024-05-18 21:59:20.435912
"""
from alembic import op
import sqlalchemy as sa
revision = 'e71e29cc605a'
down_revision = '99df71f0f4a0'
branch_labels = None
depends_on = None
def upgrade():
op.drop_table('oauth2grant')
op.drop_table('oauth2token')
op.create_table('oauth2grant',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2grant_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
op.create_table('oauth2token',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2token_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
def downgrade():
# We don't drop and recreate the table here to improve fuzzy migration test coverage
meta = sa.MetaData(bind=op.get_bind())
session = sa.table('session',
sa.column('id', sa.Integer),
sa.column('user_id', sa.Integer()),
)
with op.batch_alter_table('oauth2token', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.INTEGER(), nullable=True))
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2token_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
op.execute(oauth2token.update().values(user_id=sa.select([session.c.user_id]).where(oauth2token.c.session_id==session.c.id).as_scalar()))
op.execute(oauth2token.delete().where(oauth2token.c.user_id==None))
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.create_foreign_key('fk_oauth2token_user_id_user', 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_constraint(batch_op.f('fk_oauth2token_session_id_session'), type_='foreignkey')
batch_op.drop_column('session_id')
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_id', sa.INTEGER(), nullable=True))
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('session_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['session_id'], ['session.id'], name=op.f('fk_oauth2grant_session_id_session'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
op.execute(oauth2grant.update().values(user_id=sa.select([session.c.user_id]).where(oauth2grant.c.session_id==session.c.id).as_scalar()))
op.execute(oauth2grant.delete().where(oauth2grant.c.user_id==None))
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.alter_column('user_id', nullable=False, existing_type=sa.Integer())
batch_op.create_foreign_key('fk_oauth2grant_user_id_user', 'user', ['user_id'], ['id'], onupdate='CASCADE', ondelete='CASCADE')
batch_op.drop_constraint(batch_op.f('fk_oauth2grant_session_id_session'), type_='foreignkey')
batch_op.drop_column('session_id')
"""Add ServiceUser
Revision ID: f2eb2c52a61f
Revises: 9f824f61d8ac
Create Date: 2022-08-21 00:42:37.896970
"""
from alembic import op
import sqlalchemy as sa
revision = 'f2eb2c52a61f'
down_revision = '9f824f61d8ac'
branch_labels = None
depends_on = None
def upgrade():
service_user = op.create_table('service_user',
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['service.id'], name=op.f('fk_service_user_service_id_service'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_service_user_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('service_id', 'user_id', name=op.f('pk_service_user'))
)
service = sa.table('service', sa.column('id'))
user = sa.table('user', sa.column('id'))
op.execute(service_user.insert().from_select(
['service_id', 'user_id'],
sa.select([service.c.id, user.c.id]).select_from(sa.join(service, user, sa.true()))
))
def downgrade():
op.drop_table('service_user')
from .api import APIClient
from .invite import Invite, InviteGrant, InviteSignup
from .mail import Mail, MailReceiveAddress, MailDestinationAddress
from .mfa import MFAType, MFAMethod, RecoveryCodeMethod, TOTPMethod, WebauthnMethod
from .oauth2 import OAuth2Client, OAuth2RedirectURI, OAuth2LogoutURI, OAuth2Grant, OAuth2Token, OAuth2DeviceLoginInitiation, OAuth2Key
from .role import Role, RoleGroup, RoleGroupMap
from .selfservice import PasswordToken
from .service import RemailerMode, Service, ServiceUser, get_services
from .session import Session, DeviceLoginType, DeviceLoginInitiation, DeviceLoginConfirmation
from .signup import Signup
from .user import User, UserEmail, Group, IDAllocator, IDRangeExhaustedError, IDAlreadyAllocatedError
from .ratelimit import RatelimitEvent, Ratelimit, HostRatelimit, host_ratelimit, format_delay
from .misc import FeatureFlag, Lock
__all__ = [
'APIClient',
'Invite', 'InviteGrant', 'InviteSignup',
'Mail', 'MailReceiveAddress', 'MailDestinationAddress',
'MFAType', 'MFAMethod', 'RecoveryCodeMethod', 'TOTPMethod', 'WebauthnMethod',
'OAuth2Client', 'OAuth2RedirectURI', 'OAuth2LogoutURI', 'OAuth2Grant', 'OAuth2Token', 'OAuth2DeviceLoginInitiation',
'Role', 'RoleGroup', 'RoleGroupMap',
'PasswordToken',
'RemailerMode', 'Service', 'ServiceUser', 'get_services',
'DeviceLoginType', 'DeviceLoginInitiation', 'DeviceLoginConfirmation',
'Signup',
'User', 'UserEmail', 'Group', 'IDAllocator', 'IDRangeExhaustedError', 'IDAlreadyAllocatedError',
'RatelimitEvent', 'Ratelimit', 'HostRatelimit', 'host_ratelimit', 'format_delay',
'FeatureFlag', 'Lock',
]
from sqlalchemy import Column, Integer, String, ForeignKey, Boolean, Text
from sqlalchemy.orm import relationship
from uffd.database import db
from uffd.password_hash import PasswordHashAttribute, HighEntropyPasswordHash
class APIClient(db.Model):
__tablename__ = 'api_client'
id = Column(Integer, primary_key=True, autoincrement=True)
service_id = Column(Integer, ForeignKey('service.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
service = relationship('Service', back_populates='api_clients')
auth_username = Column(String(40), unique=True, nullable=False)
_auth_password = Column('auth_password', Text(), nullable=False)
auth_password = PasswordHashAttribute('_auth_password', HighEntropyPasswordHash)
# Permissions are defined by adding an attribute named "perm_NAME"
perm_users = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_checkpassword = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_mail_aliases = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_remailer = Column(Boolean(create_constraint=True), default=False, nullable=False)
perm_metrics = Column(Boolean(create_constraint=True), default=False, nullable=False)
@classmethod
def permission_exists(cls, name):
return hasattr(cls, 'perm_'+name)
def has_permission(self, name):
return getattr(self, 'perm_' + name)
import datetime
from flask_babel import gettext as _
from flask import current_app
from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Boolean
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship
from uffd.database import db
from uffd.signup.models import Signup
from uffd.utils import token_urlfriendly
from uffd.database import db
from .signup import Signup
invite_roles = db.Table('invite_roles',
Column('invite_id', Integer(), ForeignKey('invite.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True),
......@@ -17,23 +19,23 @@ class Invite(db.Model):
__tablename__ = 'invite'
id = Column(Integer(), primary_key=True, autoincrement=True)
token = Column(String(128), unique=True, nullable=False, default=token_urlfriendly)
created = Column(DateTime, default=datetime.datetime.now, nullable=False)
created = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
creator_id = Column(Integer(), ForeignKey('user.id', onupdate='CASCADE'), nullable=True)
creator = relationship('User')
valid_until = Column(DateTime, nullable=False)
single_use = Column(Boolean, default=True, nullable=False)
allow_signup = Column(Boolean, default=True, nullable=False)
used = Column(Boolean, default=False, nullable=False)
disabled = Column(Boolean, default=False, nullable=False)
single_use = Column(Boolean(create_constraint=True), default=True, nullable=False)
allow_signup = Column(Boolean(create_constraint=True), default=True, nullable=False)
used = Column(Boolean(create_constraint=True), default=False, nullable=False)
disabled = Column(Boolean(create_constraint=True), default=False, nullable=False)
roles = relationship('Role', secondary=invite_roles)
signups = relationship('InviteSignup', back_populates='invite', lazy=True, cascade='all, delete-orphan')
grants = relationship('InviteGrant', back_populates='invite', lazy=True, cascade='all, delete-orphan')
@property
@hybrid_property
def expired(self):
return datetime.datetime.now().replace(second=0, microsecond=0) > self.valid_until
return self.valid_until < datetime.datetime.utcnow().replace(second=0, microsecond=0)
@property
@hybrid_property
def voided(self):
return self.single_use and self.used
......@@ -41,6 +43,8 @@ class Invite(db.Model):
def permitted(self):
if self.creator is None:
return False # Creator does not exist (anymore)
if self.creator.is_deactivated:
return False
if self.creator.is_in_group(current_app.config['ACL_ADMIN_GROUP']):
return True
if self.allow_signup and not self.creator.is_in_group(current_app.config['ACL_SIGNUP_GROUP']):
......@@ -77,16 +81,16 @@ class InviteGrant(db.Model):
def apply(self):
if not self.invite.active:
return False, 'Invite link is invalid'
return False, _('Invite link is invalid')
if not self.invite.roles:
return False, 'Invite link does not grant any roles'
return False, _('Invite link does not grant any roles')
if set(self.invite.roles).issubset(self.user.roles):
return False, 'Invite link does not grant any new roles'
return False, _('Invite link does not grant any new roles')
for role in self.invite.roles:
self.user.roles.append(role)
self.user.update_groups()
self.invite.used = True
return True, 'Success'
return True, _('Success')
class InviteSignup(Signup):
__tablename__ = 'invite_signup'
......@@ -100,12 +104,12 @@ class InviteSignup(Signup):
def validate(self):
if not self.invite.active or not self.invite.allow_signup:
return False, 'Invite link is invalid'
return False, _('Invite link is invalid')
return super().validate()
def finish(self, password):
if not self.invite.active or not self.invite.allow_signup:
return None, 'Invite link is invalid'
return None, _('Invite link is invalid')
user, msg = super().finish(password)
if user is not None:
for role in self.invite.roles:
......
import re
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.associationproxy import association_proxy
......@@ -5,6 +7,18 @@ from sqlalchemy.ext.associationproxy import association_proxy
from uffd.database import db
class Mail(db.Model):
# Aliases are looked up by receiver addresses with api.getmails. To emulate
# the pre-v2/LDAP behaviour, the lookup needs to be case-insensitive. To not
# rely on database-specific behaviour, we ensure that all receiver addresses
# are stored lower-case and convert incoming addresses in api.getmails to
# lower-case. Note that full emulation of LDAP behaviour would also require
# whitespace normalization. Instead we disallow spaces in receiver addresses.
# Match ASCII code points 33 (!) to 64 (@) and 91 ([) to 126 (~), i.e. any
# number of lower-case ASCII letters, digits, symbols
RECEIVER_REGEX = '[!-@[-~]*'
RECEIVER_REGEX_COMPILED = re.compile(RECEIVER_REGEX)
__tablename__ = 'mail'
id = Column(Integer(), primary_key=True, autoincrement=True)
uid = Column(String(32), unique=True, nullable=False)
......@@ -13,6 +27,10 @@ class Mail(db.Model):
_destinations = relationship('MailDestinationAddress', cascade='all, delete-orphan')
destinations = association_proxy('_destinations', 'address')
@property
def invalid_receivers(self):
return [addr for addr in self.receivers if not re.fullmatch(self.RECEIVER_REGEX_COMPILED, addr)]
class MailReceiveAddress(db.Model):
__tablename__ = 'mail_receive_address'
id = Column(Integer(), primary_key=True, autoincrement=True)
......
......@@ -8,16 +8,18 @@ import hmac
import hashlib
import base64
import urllib.parse
# imports for recovery codes
import crypt
from flask import request, current_app
from sqlalchemy import Column, Integer, Enum, String, DateTime, Text, ForeignKey
from sqlalchemy.orm import relationship, backref
from uffd.utils import nopad_b32decode, nopad_b32encode
from uffd.password_hash import PasswordHashAttribute, CryptPasswordHash
from uffd.database import db
from uffd.user.models import User
from .user import User
User.mfa_recovery_codes = relationship('RecoveryCodeMethod', viewonly=True)
User.mfa_totp_methods = relationship('TOTPMethod', viewonly=True)
User.mfa_webauthn_methods = relationship('WebauthnMethod', viewonly=True)
User.mfa_enabled = property(lambda user: bool(user.mfa_totp_methods or user.mfa_webauthn_methods))
class MFAType(enum.Enum):
......@@ -28,8 +30,8 @@ class MFAType(enum.Enum):
class MFAMethod(db.Model):
__tablename__ = 'mfa_method'
id = Column(Integer(), primary_key=True, autoincrement=True)
type = Column(Enum(MFAType), nullable=False)
created = Column(DateTime(), nullable=False, default=datetime.datetime.now)
type = Column(Enum(MFAType, create_constraint=True), nullable=False)
created = Column(DateTime(), nullable=False, default=datetime.datetime.utcnow)
name = Column(String(128))
user_id = Column(Integer(), ForeignKey('user.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
user = relationship('User', backref=backref('mfa_methods', cascade='all, delete-orphan'))
......@@ -41,12 +43,11 @@ class MFAMethod(db.Model):
def __init__(self, user, name=None):
self.user = user
self.name = name
self.created = datetime.datetime.now()
self.created = datetime.datetime.utcnow()
class RecoveryCodeMethod(MFAMethod):
code_salt = Column('recovery_salt', String(64))
code_hash = Column('recovery_hash', String(256))
user = relationship('User', backref='mfa_recovery_codes')
_code = Column('recovery_hash', String(256))
code = PasswordHashAttribute('_code', CryptPasswordHash)
__mapper_args__ = {
'polymorphic_identity': MFAType.RECOVERY_CODE
......@@ -54,14 +55,11 @@ class RecoveryCodeMethod(MFAMethod):
def __init__(self, user):
super().__init__(user, None)
# The code attribute is only available in newly created objects as only
# it's hash is stored in the database
self.code = secrets.token_hex(8).replace(' ', '').lower()
self.code_hash = crypt.crypt(self.code)
# self.code_value is not stored and only available on freshly initiated objects
self.code = self.code_value = secrets.token_hex(8).replace(' ', '').lower()
def verify(self, code):
code = code.replace(' ', '').lower()
return secrets.compare_digest(crypt.crypt(code, self.code_hash), self.code_hash)
return self.code.verify(code.replace(' ', '').lower())
def _hotp(counter, key, digits=6):
'''Generates HMAC-based one-time password according to RFC4226
......@@ -79,7 +77,7 @@ def _hotp(counter, key, digits=6):
class TOTPMethod(MFAMethod):
key = Column('totp_key', String(64))
user = relationship('User', backref='mfa_totp_methods')
last_counter = Column('totp_last_counter', Integer())
__mapper_args__ = {
'polymorphic_identity': MFAType.TOTP
......@@ -88,13 +86,12 @@ class TOTPMethod(MFAMethod):
def __init__(self, user, name=None, key=None):
super().__init__(user, name)
if key is None:
key = base64.b32encode(secrets.token_bytes(16)).rstrip(b'=').decode()
key = nopad_b32encode(secrets.token_bytes(16)).decode()
self.key = key
@property
def raw_key(self):
tmp = self.key + '='*(8 - (len(self.key) % 8))
return base64.b32decode(tmp.encode())
return nopad_b32decode(self.key)
@property
def issuer(self):
......@@ -121,15 +118,17 @@ class TOTPMethod(MFAMethod):
:param code: String of digits (as entered by the user)
:returns: True if code is valid, False otherwise'''
counter = int(time.time()/30)
for valid_code in [_hotp(counter-1, self.raw_key), _hotp(counter, self.raw_key)]:
current_counter = int(time.time()/30)
for counter in (current_counter - 1, current_counter):
if counter > (self.last_counter or 0):
valid_code = _hotp(counter, self.raw_key)
if secrets.compare_digest(code, valid_code):
self.last_counter = counter
return True
return False
class WebauthnMethod(MFAMethod):
_cred = Column('webauthn_cred', Text())
user = relationship('User', backref='mfa_webauthn_methods')
__mapper_args__ = {
'polymorphic_identity': MFAType.WEBAUTHN
......@@ -141,7 +140,7 @@ class WebauthnMethod(MFAMethod):
@property
def cred(self):
from fido2.ctap2 import AttestedCredentialData #pylint: disable=import-outside-toplevel
from uffd.fido2_compat import AttestedCredentialData #pylint: disable=import-outside-toplevel
return AttestedCredentialData(base64.b64decode(self._cred))
@cred.setter
......
from uffd.database import db
# pylint completely fails to understand SQLAlchemy's query functions
# pylint: disable=no-member
feature_flag_table = db.Table('feature_flag',
db.Column('name', db.String(32), primary_key=True),
)
class FeatureFlag:
def __init__(self, name):
self.name = name
self.enable_hooks = []
self.disable_hooks = []
@property
def expr(self):
return db.exists().where(feature_flag_table.c.name == self.name)
def __bool__(self):
return db.session.execute(db.select([self.expr])).scalar()
def enable_hook(self, func):
self.enable_hooks.append(func)
return func
def enable(self):
db.session.execute(db.insert(feature_flag_table).values(name=self.name))
for func in self.enable_hooks:
func()
def disable_hook(self, func):
self.disable_hooks.append(func)
return func
def disable(self):
db.session.execute(db.delete(feature_flag_table).where(feature_flag_table.c.name == self.name))
for func in self.disable_hooks:
func()
FeatureFlag.unique_email_addresses = FeatureFlag('unique-email-addresses')
lock_table = db.Table('lock',
db.Column('name', db.String(32), primary_key=True),
)
class Lock:
ALL_LOCKS = set()
def __init__(self, name):
self.name = name
assert name not in self.ALL_LOCKS
self.ALL_LOCKS.add(name)
def acquire(self):
'''Acquire the lock until the end of the current transaction
Calling acquire while the specific lock is already held has no effect.'''
if db.engine.name == 'sqlite':
# SQLite does not support with_for_update, but we can lock the whole DB
# with any write operation. So we do a dummy update.
db.session.execute(db.update(lock_table).where(False).values(name=None))
elif db.engine.name in ('mysql', 'mariadb'):
result = db.session.execute(db.select([lock_table.c.name]).where(lock_table.c.name == self.name).with_for_update()).scalar()
if result is not None:
return
# We add all lock rows with migrations so we should never end up here
raise Exception(f'Lock "{self.name}" is missing')
else:
raise NotImplementedError()
# Only executed when lock_table is created with db.create/db.create_all (e.g.
# during testing). Otherwise the rows are inserted with migrations.
@db.event.listens_for(lock_table, 'after_create') # pylint: disable=no-member
def insert_lock_rows(target, connection, **kwargs): # pylint: disable=unused-argument
for name in Lock.ALL_LOCKS:
db.session.execute(db.insert(lock_table).values(name=name))
db.session.commit()
import datetime
import json
import secrets
import base64
from sqlalchemy import Column, Integer, String, DateTime, Text, ForeignKey, Boolean
from sqlalchemy.orm import relationship
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.ext.associationproxy import association_proxy
import jwt
from uffd.database import db, CommaSeparatedList
from uffd.tasks import cleanup_task
from uffd.password_hash import PasswordHashAttribute, HighEntropyPasswordHash
from uffd.utils import token_urlfriendly
from .session import DeviceLoginInitiation, DeviceLoginType
from .service import ServiceUser
# pyjwt v1.7.x compat (Buster/Bullseye)
if not hasattr(jwt, 'get_algorithm_by_name'):
jwt.get_algorithm_by_name = lambda name: jwt.algorithms.get_default_algorithms()[name]
class OAuth2Client(db.Model):
__tablename__ = 'oauth2client'
# Inconsistently named "db_id" instead of "id" because of the naming conflict
# with "client_id" in the OAuth2 standard
db_id = Column(Integer, primary_key=True, autoincrement=True)
service_id = Column(Integer, ForeignKey('service.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
service = relationship('Service', back_populates='oauth2_clients')
client_id = Column(String(40), unique=True, nullable=False)
_client_secret = Column('client_secret', Text(), nullable=False)
client_secret = PasswordHashAttribute('_client_secret', HighEntropyPasswordHash)
_redirect_uris = relationship('OAuth2RedirectURI', cascade='all, delete-orphan')
redirect_uris = association_proxy('_redirect_uris', 'uri')
logout_uris = relationship('OAuth2LogoutURI', cascade='all, delete-orphan')
@property
def default_redirect_uri(self):
return self.redirect_uris[0] if len(self.redirect_uris) == 1 else None
def access_allowed(self, user):
service_user = ServiceUser.query.get((self.service_id, user.id))
return service_user and service_user.has_access
@property
def logout_uris_json(self):
return json.dumps([[item.method, item.uri] for item in self.logout_uris])
class OAuth2RedirectURI(db.Model):
__tablename__ = 'oauth2redirect_uri'
id = Column(Integer, primary_key=True, autoincrement=True)
client_db_id = Column(Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
uri = Column(String(255), nullable=False)
def __init__(self, uri):
self.uri = uri
class OAuth2LogoutURI(db.Model):
__tablename__ = 'oauth2logout_uri'
id = Column(Integer, primary_key=True, autoincrement=True)
client_db_id = Column(Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
method = Column(String(40), nullable=False, default='GET')
uri = Column(String(255), nullable=False)
@cleanup_task.delete_by_attribute('expired')
class OAuth2Grant(db.Model):
__tablename__ = 'oauth2grant'
id = Column(Integer, primary_key=True, autoincrement=True)
EXPIRES_IN = 100
expires = Column(DateTime, nullable=False, default=lambda: datetime.datetime.utcnow() + datetime.timedelta(seconds=OAuth2Grant.EXPIRES_IN))
session_id = Column(Integer(), ForeignKey('session.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
session = relationship('Session')
client_db_id = Column(Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
client = relationship('OAuth2Client')
_code = Column('code', String(255), nullable=False, default=token_urlfriendly)
code = property(lambda self: f'{self.id}-{self._code}')
redirect_uri = Column(String(255), nullable=True)
nonce = Column(Text(), nullable=True)
scopes = Column('_scopes', CommaSeparatedList(), nullable=False, default=tuple())
_claims = Column('claims', Text(), nullable=True)
@property
def claims(self):
return json.loads(self._claims) if self._claims is not None else None
@claims.setter
def claims(self, value):
self._claims = json.dumps(value) if value is not None else None
@property
def service_user(self):
return ServiceUser.query.get((self.client.service_id, self.session.user_id))
@hybrid_property
def expired(self):
if self.expires is None:
return False
return self.expires < datetime.datetime.utcnow()
@classmethod
def get_by_authorization_code(cls, code):
# pylint: disable=protected-access
if '-' not in code:
return None
grant_id, grant_code = code.split('-', 2)
grant = cls.query.filter_by(id=grant_id, expired=False).first()
if not grant or not secrets.compare_digest(grant._code, grant_code):
return None
if grant.session.expired or grant.session.user.is_deactivated:
return None
if not grant.service_user or not grant.service_user.has_access:
return None
return grant
def make_token(self, **kwargs):
return OAuth2Token(
session=self.session,
client=self.client,
scopes=self.scopes,
claims=self.claims,
**kwargs
)
# OAuth2Token objects are cleaned-up when the session expires and is
# auto-deleted (or the user manually revokes it).
class OAuth2Token(db.Model):
__tablename__ = 'oauth2token'
id = Column(Integer, primary_key=True, autoincrement=True)
EXPIRES_IN = 3600
expires = Column(DateTime, nullable=False, default=lambda: datetime.datetime.utcnow() + datetime.timedelta(seconds=OAuth2Token.EXPIRES_IN))
session_id = Column(Integer(), ForeignKey('session.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
session = relationship('Session')
client_db_id = Column(Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
client = relationship('OAuth2Client')
# currently only bearer is supported
token_type = Column(String(40), nullable=False, default='bearer')
_access_token = Column('access_token', String(255), unique=True, nullable=False, default=token_urlfriendly)
access_token = property(lambda self: f'{self.id}-{self._access_token}')
_refresh_token = Column('refresh_token', String(255), unique=True, nullable=False, default=token_urlfriendly)
refresh_token = property(lambda self: f'{self.id}-{self._refresh_token}')
scopes = Column('_scopes', CommaSeparatedList(), nullable=False, default=tuple())
_claims = Column('claims', Text(), nullable=True)
@property
def claims(self):
return json.loads(self._claims) if self._claims is not None else None
@claims.setter
def claims(self, value):
self._claims = json.dumps(value) if value is not None else None
@property
def service_user(self):
return ServiceUser.query.get((self.client.service_id, self.session.user_id))
@hybrid_property
def expired(self):
return self.expires < datetime.datetime.utcnow()
@classmethod
def get_by_access_token(cls, access_token):
# pylint: disable=protected-access
if '-' not in access_token:
return None
token_id, token_secret = access_token.split('-', 2)
token = cls.query.filter_by(id=token_id, expired=False).first()
if not token or not secrets.compare_digest(token._access_token, token_secret):
return None
if token.session.expired or token.session.user.is_deactivated:
return None
if not token.service_user or not token.service_user.has_access:
return None
return token
class OAuth2DeviceLoginInitiation(DeviceLoginInitiation):
__mapper_args__ = {
'polymorphic_identity': DeviceLoginType.OAUTH2
}
client_db_id = Column('oauth2_client_db_id', Integer, ForeignKey('oauth2client.db_id', onupdate='CASCADE', ondelete='CASCADE'))
client = relationship('OAuth2Client')
@property
def description(self):
return self.client.service.name
class OAuth2Key(db.Model):
__tablename__ = 'oauth2_key'
id = Column(String(64), primary_key=True, default=token_urlfriendly)
created = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
active = Column(Boolean(create_constraint=False), default=True, nullable=False)
algorithm = Column(String(32), nullable=False)
private_key_jwk = Column(Text(), nullable=False)
public_key_jwk = Column(Text(), nullable=False)
def __init__(self, **kwargs):
if kwargs.get('algorithm') and kwargs.get('private_key') \
and not kwargs.get('private_key_jwk') \
and not kwargs.get('public_key_jwk'):
algorithm = jwt.get_algorithm_by_name(kwargs['algorithm'])
private_key = kwargs.pop('private_key')
kwargs['private_key_jwk'] = algorithm.to_jwk(private_key)
kwargs['public_key_jwk'] = algorithm.to_jwk(private_key.public_key())
super().__init__(**kwargs)
@property
def private_key(self):
# pylint: disable=protected-access,import-outside-toplevel
# cryptography performs expensive checks when loading RSA private keys.
# Since we only load keys we generated ourselves with help of cryptography,
# these checks are unnecessary.
import cryptography.hazmat.backends.openssl
cryptography.hazmat.backends.openssl.backend._rsa_skip_check_key = True
res = jwt.get_algorithm_by_name(self.algorithm).from_jwk(self.private_key_jwk)
cryptography.hazmat.backends.openssl.backend._rsa_skip_check_key = False
return res
@property
def public_key(self):
return jwt.get_algorithm_by_name(self.algorithm).from_jwk(self.public_key_jwk)
@property
def public_key_jwks_dict(self):
res = json.loads(self.public_key_jwk)
res['kid'] = self.id
res['alg'] = self.algorithm
res['use'] = 'sig'
# RFC7517 4.3 "The "use" and "key_ops" JWK members SHOULD NOT be used together [...]"
res.pop('key_ops', None)
return res
def encode_jwt(self, payload):
if not self.active:
raise jwt.exceptions.InvalidKeyError(f'Key {self.id} not active')
res = jwt.encode(payload, key=self.private_key, algorithm=self.algorithm, headers={'kid': self.id})
# pyjwt pre-v2 compat (Buster/Bullseye)
if isinstance(res, bytes):
res = res.decode()
return res
# Hash algorithm for at_hash/c_hash from OpenID Connect Core 1.0 section 3.1.3.6
def oidc_hash(self, value):
# pylint: disable=import-outside-toplevel
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.backends import default_backend # Only required for Buster
hash_alg = jwt.get_algorithm_by_name(self.algorithm).hash_alg
digest = hashes.Hash(hash_alg(), backend=default_backend())
digest.update(value)
return base64.urlsafe_b64encode(
digest.finalize()[:hash_alg.digest_size // 2]
).decode('ascii').rstrip('=')
@classmethod
def get_preferred_key(cls, algorithm='RS256'):
return cls.query.filter_by(active=True, algorithm=algorithm).order_by(OAuth2Key.created.desc()).first()
@classmethod
def get_available_algorithms(cls):
return ['RS256']
@classmethod
def decode_jwt(cls, data, algorithms=('RS256',), **kwargs):
headers = jwt.get_unverified_header(data)
if 'kid' not in headers:
raise jwt.exceptions.InvalidKeyError('JWT without kid')
kid = headers['kid']
key = cls.query.get(kid)
if not key:
raise jwt.exceptions.InvalidKeyError(f'Key {kid} not found')
if not key.active:
raise jwt.exceptions.InvalidKeyError(f'Key {kid} not active')
return jwt.decode(data, key=key.public_key, algorithms=algorithms, **kwargs)
@classmethod
def generate_rsa_key(cls, public_exponent=65537, key_size=3072):
# pylint: disable=import-outside-toplevel
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.backends import default_backend # Only required for Buster
return cls(algorithm='RS256', private_key=rsa.generate_private_key(public_exponent=public_exponent, key_size=key_size, backend=default_backend()))
......@@ -5,16 +5,24 @@ import math
from flask import request
from flask_babel import gettext as _
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy.ext.hybrid import hybrid_property
from uffd.tasks import cleanup_task
from uffd.database import db
@cleanup_task.delete_by_attribute('expired')
class RatelimitEvent(db.Model):
__tablename__ = 'ratelimit_event'
id = Column(Integer(), primary_key=True, autoincrement=True)
timestamp = Column(DateTime(), default=datetime.datetime.now)
name = Column(String(128))
timestamp = Column(DateTime(), default=datetime.datetime.utcnow, nullable=False)
expires = Column(DateTime(), nullable=False)
name = Column(String(128), nullable=False)
key = Column(String(128))
@hybrid_property
def expired(self):
return self.expires < datetime.datetime.utcnow()
class Ratelimit:
def __init__(self, name, interval, limit):
self.name = name
......@@ -22,25 +30,23 @@ class Ratelimit:
self.limit = limit
self.base = interval**(1/limit)
def cleanup(self):
limit = datetime.datetime.now() - datetime.timedelta(seconds=self.interval)
RatelimitEvent.query.filter(RatelimitEvent.name == self.name, RatelimitEvent.timestamp <= limit).delete()
db.session.commit()
def log(self, key=None):
db.session.add(RatelimitEvent(name=self.name, key=key))
db.session.add(RatelimitEvent(name=self.name, key=key, expires=datetime.datetime.utcnow() + datetime.timedelta(seconds=self.interval)))
db.session.commit()
def get_delay(self, key=None):
self.cleanup()
events = RatelimitEvent.query.filter(RatelimitEvent.name == self.name, RatelimitEvent.key == key).all()
events = RatelimitEvent.query\
.filter(db.not_(RatelimitEvent.expired))\
.filter_by(name=self.name, key=key)\
.order_by(RatelimitEvent.timestamp)\
.all()
if not events:
return 0
delay = math.ceil(self.base**len(events))
if delay < 5:
delay = 0
delay = min(delay, 365*24*60*60) # prevent overflow of datetime objetcs
remaining = events[0].timestamp + datetime.timedelta(seconds=delay) - datetime.datetime.now()
delay = min(delay, 365*24*60*60) # prevent overflow of datetime objects
remaining = events[0].timestamp + datetime.timedelta(seconds=delay) - datetime.datetime.utcnow()
return max(0, math.ceil(remaining.total_seconds()))
def get_addrkey(addr=None):
......
......@@ -3,15 +3,15 @@ from sqlalchemy.orm import relationship
from sqlalchemy.orm.collections import MappedCollection, collection
from uffd.database import db
from uffd.user.models import User
from .user import User
class RoleGroup(db.Model):
__tablename__ = 'role_groups'
role_id = Column(Integer(), ForeignKey('role.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
role = relationship('Role')
role = relationship('Role', back_populates='groups')
group_id = Column(Integer(), ForeignKey('group.id', onupdate='CASCADE', ondelete='CASCADE'), primary_key=True)
group = relationship('Group')
requires_mfa = Column(Boolean(), default=False, nullable=False)
requires_mfa = Column(Boolean(create_constraint=True), default=False, nullable=False)
# pylint: disable=E1101
role_members = db.Table('role_members',
......@@ -94,14 +94,14 @@ class Role(db.Model):
members = relationship('User', secondary='role_members', back_populates='roles')
groups = relationship('RoleGroup', collection_class=RoleGroupMap, cascade='all, delete-orphan')
groups = relationship('RoleGroup', collection_class=RoleGroupMap, cascade='all, delete-orphan', back_populates='role')
# Roles that are managed externally (e.g. by Ansible) can be locked to
# prevent accidental editing of name, moderator group, included roles
# and groups as well as deletion in the web interface.
locked = Column(Boolean(), default=False, nullable=False)
locked = Column(Boolean(create_constraint=True), default=False, nullable=False)
is_default = Column(Boolean(), default=False, nullable=False)
is_default = Column(Boolean(create_constraint=True), default=False, nullable=False)
@property
def members_effective(self):
......
......@@ -2,23 +2,23 @@ import datetime
from sqlalchemy import Column, String, DateTime, Integer, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.hybrid import hybrid_property
from uffd.database import db
from uffd.utils import token_urlfriendly
from uffd.tasks import cleanup_task
@cleanup_task.delete_by_attribute('expired')
class PasswordToken(db.Model):
__tablename__ = 'passwordToken'
id = Column(Integer(), primary_key=True, autoincrement=True)
token = Column(String(128), default=token_urlfriendly, nullable=False)
created = Column(DateTime, default=datetime.datetime.now, nullable=False)
created = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
user_id = Column(Integer(), ForeignKey('user.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
user = relationship('User')
class MailToken(db.Model):
__tablename__ = 'mailToken'
id = Column(Integer(), primary_key=True, autoincrement=True)
token = Column(String(128), default=token_urlfriendly, nullable=False)
created = Column(DateTime, default=datetime.datetime.now)
user_id = Column(Integer(), ForeignKey('user.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
user = relationship('User')
newmail = Column(String(255))
@hybrid_property
def expired(self):
if self.created is None:
return False
return self.created < datetime.datetime.utcnow() - datetime.timedelta(days=2)
import enum
from flask import current_app
from flask_babel import get_locale
from sqlalchemy import Column, Integer, String, ForeignKey, Boolean, Enum
from sqlalchemy.orm import relationship, validates
from uffd.database import db
from uffd.remailer import remailer
from uffd.tasks import cleanup_task
from .user import User, UserEmail, user_groups
class RemailerMode(enum.Enum):
DISABLED = 0
ENABLED_V1 = 1
ENABLED_V2 = 2
class Service(db.Model):
__tablename__ = 'service'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(255), unique=True, nullable=False)
# If limit_access is False, all users have access and access_group is
# ignored. This attribute exists for legacy API and OAuth2 clients that
# were migrated from config definitions where a missing "required_group"
# parameter meant no access restrictions. Representing this state by
# setting access_group_id to NULL would lead to a bad/unintuitive ondelete
# behaviour.
limit_access = Column(Boolean(create_constraint=True), default=True, nullable=False)
access_group_id = Column(Integer(), ForeignKey('group.id', onupdate='CASCADE', ondelete='SET NULL'), nullable=True)
access_group = relationship('Group')
oauth2_clients = relationship('OAuth2Client', back_populates='service', cascade='all, delete-orphan')
api_clients = relationship('APIClient', back_populates='service', cascade='all, delete-orphan')
remailer_mode = Column(Enum(RemailerMode, create_constraint=True), default=RemailerMode.DISABLED, nullable=False)
enable_email_preferences = Column(Boolean(create_constraint=True), default=False, nullable=False)
hide_deactivated_users = Column(Boolean(create_constraint=True), default=False, nullable=False)
class ServiceUser(db.Model):
'''Service-related configuration and state for a user
ServiceUser objects are auto-created whenever a new User or Service is
created, so there one for for every (Service, User) pair.
Service- or User-related code should always use ServiceUser in queries
instead of User/Service.'''
__tablename__ = 'service_user'
__table_args__ = (
db.PrimaryKeyConstraint('service_id', 'user_id'),
)
service_id = Column(Integer(), ForeignKey('service.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
service = relationship('Service', viewonly=True)
user_id = Column(Integer(), ForeignKey('user.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False)
user = relationship('User', viewonly=True)
@property
def has_access(self):
return not self.service.limit_access or self.service.access_group in self.user.groups
@property
def has_email_preferences(self):
return self.has_access and self.service.enable_email_preferences
remailer_overwrite_mode = Column(Enum(RemailerMode, create_constraint=True), default=None, nullable=True)
@property
def effective_remailer_mode(self):
if not remailer.configured:
return RemailerMode.DISABLED
if current_app.config['REMAILER_LIMIT_TO_USERS'] is not None:
if self.user.loginname not in current_app.config['REMAILER_LIMIT_TO_USERS']:
return RemailerMode.DISABLED
if self.remailer_overwrite_mode is not None:
return self.remailer_overwrite_mode
return self.service.remailer_mode
service_email_id = Column(Integer(), ForeignKey('user_email.id', onupdate='CASCADE', ondelete='SET NULL'))
service_email = relationship('UserEmail')
@validates('service_email')
def validate_service_email(self, key, value): # pylint: disable=unused-argument
if value is not None:
if not value.user:
value.user = self.user
if value.user != self.user:
raise ValueError('UserEmail assigned to ServiceUser.service_email is not associated with user')
if not value.verified:
raise ValueError('UserEmail assigned to serviceUser.service_email is not verified')
return value
# Actual e-mail address that mails from the service are sent to
@property
def real_email(self):
if self.has_email_preferences and self.service_email:
return self.service_email.address
return self.user.primary_email.address
@classmethod
def get_by_remailer_email(cls, address):
if not remailer.configured:
return None
result = remailer.parse_address(address)
if result is None:
return None
# result is (service_id, user_id), i.e. our primary key
return cls.query.get(result)
# E-Mail address as seen by the service
@property
def email(self):
if self.effective_remailer_mode == RemailerMode.ENABLED_V1:
return remailer.build_v1_address(self.service_id, self.user_id)
if self.effective_remailer_mode == RemailerMode.ENABLED_V2:
return remailer.build_v2_address(self.service_id, self.user_id)
return self.real_email
# User.primary_email and ServiceUser.service_email can only be set to
# verified addresses, so this should always return True
@property
def email_verified(self):
if self.effective_remailer_mode != RemailerMode.DISABLED:
return True
if self.has_email_preferences and self.service_email:
return self.service_email.verified
return self.user.primary_email.verified
@classmethod
def filter_query_by_email(cls, query, email):
'''Filter query of ServiceUser by ServiceUser.email'''
# pylint completely fails to understand SQLAlchemy's query functions
# pylint: disable=no-member,invalid-name,singleton-comparison
service_user = cls.get_by_remailer_email(email)
if service_user and service_user.email == email:
return query.filter(cls.user_id == service_user.user_id, cls.service_id == service_user.service_id)
AliasedUser = db.aliased(User)
AliasedPrimaryEmail = db.aliased(UserEmail)
AliasedServiceEmail = db.aliased(UserEmail)
AliasedService = db.aliased(Service)
aliased_user_groups = db.aliased(user_groups)
query = query.join(cls.user.of_type(AliasedUser))
query = query.join(AliasedUser.primary_email.of_type(AliasedPrimaryEmail))
query = query.outerjoin(cls.service_email.of_type(AliasedServiceEmail))
query = query.join(cls.service.of_type(AliasedService))
remailer_enabled = db.case(
whens=[
(db.not_(remailer.configured), False),
(
db.not_(AliasedUser.loginname.in_(current_app.config['REMAILER_LIMIT_TO_USERS']))
if current_app.config['REMAILER_LIMIT_TO_USERS'] is not None else db.and_(False),
False
),
(cls.remailer_overwrite_mode != None, cls.remailer_overwrite_mode != RemailerMode.DISABLED)
],
else_=(AliasedService.remailer_mode != RemailerMode.DISABLED)
)
has_access = db.or_(
db.not_(AliasedService.limit_access),
db.exists().where(db.and_(
aliased_user_groups.c.user_id == AliasedUser.id,
aliased_user_groups.c.group_id == AliasedService.access_group_id,
))
)
has_email_preferences = db.and_(
has_access,
AliasedService.enable_email_preferences,
)
real_email_matches = db.case(
whens=[
# pylint: disable=singleton-comparison
(db.and_(has_email_preferences, cls.service_email != None), AliasedServiceEmail.address == email),
],
else_=(AliasedPrimaryEmail.address == email)
)
return query.filter(db.and_(db.not_(remailer_enabled), real_email_matches))
@db.event.listens_for(db.Session, 'after_flush') # pylint: disable=no-member
def create_service_users(session, flush_context): # pylint: disable=unused-argument
# pylint completely fails to understand SQLAlchemy's query functions
# pylint: disable=no-member
new_user_ids = [user.id for user in session.new if isinstance(user, User)]
new_service_ids = [service.id for service in session.new if isinstance(service, Service)]
if not new_user_ids and not new_service_ids:
return
db.session.execute(db.insert(ServiceUser).from_select(
['service_id', 'user_id'],
db.select([Service.id, User.id]).select_from(db.join(Service, User, db.true())).where(db.or_(
Service.id.in_(new_service_ids),
User.id.in_(new_user_ids),
))
))
# On databases with write concurrency (i.e. everything but SQLite), the
# after_flush handler above is racy. So in rare cases ServiceUser objects
# might be missing.
@cleanup_task.handler
def create_missing_service_users():
# pylint completely fails to understand SQLAlchemy's query functions
# pylint: disable=no-member
db.session.execute(db.insert(ServiceUser).from_select(
['service_id', 'user_id'],
db.select([Service.id, User.id]).select_from(db.join(Service, User, db.true())).where(db.not_(
ServiceUser.query.filter(
ServiceUser.service_id == Service.id,
ServiceUser.user_id == User.id
).exists()
))
))
# The user-visible services show on the service overview page are read from
# the SERVICES config key. It is planned to gradually extend the Service model
# in order to finally replace the config-defined services.
def get_language_specific(data, field_name, default =''):
return data.get(field_name + '_' + get_locale().language, data.get(field_name, default))
# pylint: disable=too-many-branches
def get_services(user=None):
if not user and not current_app.config['SERVICES_PUBLIC']:
return []
services = []
for service_data in current_app.config['SERVICES']:
service_title = get_language_specific(service_data, 'title')
if not service_title:
continue
service_description = get_language_specific(service_data, 'description')
service = {
'title': service_title,
'subtitle': service_data.get('subtitle', ''),
'description': service_description,
'url': service_data.get('url', ''),
'logo_url': service_data.get('logo_url', ''),
'has_access': True,
'permission': '',
'groups': [],
'infos': [],
'links': [],
}
if service_data.get('required_group'):
if not user or not user.has_permission(service_data['required_group']):
service['has_access'] = False
for permission_data in service_data.get('permission_levels', []):
if permission_data.get('required_group'):
if not user or not user.has_permission(permission_data['required_group']):
continue
if not permission_data.get('name'):
continue
service['has_access'] = True
service['permission'] = permission_data['name']
if service_data.get('confidential', False) and not service['has_access']:
continue
for group_data in service_data.get('groups', []):
if group_data.get('required_group'):
if not user or not user.has_permission(group_data['required_group']):
continue
if not group_data.get('name'):
continue
service['groups'].append(group_data)
for info_data in service_data.get('infos', []):
if info_data.get('required_group'):
if not user or not user.has_permission(info_data['required_group']):
continue
info_title = get_language_specific(info_data, 'title')
info_html = get_language_specific(info_data, 'html')
if not info_title or not info_html:
continue
info_button_text = get_language_specific(info_data, 'button_text', info_title)
info = {
'title': info_title,
'button_text': info_button_text,
'html': info_html,
'id': '%d-%d'%(len(services), len(service['infos'])),
}
service['infos'].append(info)
for link_data in service_data.get('links', []):
if link_data.get('required_group'):
if not user or not user.has_permission(link_data['required_group']):
continue
if not link_data.get('url') or not link_data.get('title'):
continue
service['links'].append(link_data)
services.append(service)
return services