Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • uffd/uffd
  • rixx/uffd
  • thies/uffd
  • leona/uffd
  • enbewe/uffd
  • strifel/uffd
  • thies/uffd-2
7 results
Select Git revision
Show changes
Showing
with 653 additions and 545 deletions
......@@ -4,9 +4,7 @@ from flask import Blueprint, request, session
bp = Blueprint("csrf", __name__)
# pylint: disable=invalid-name
csrfEndpoints = []
# pylint: enable=invalid-name
csrf_endpoints = []
def csrf_protect(blueprint=None, endpoint=None):
def wraper(func):
......@@ -15,7 +13,7 @@ def csrf_protect(blueprint=None, endpoint=None):
urlendpoint = "{}.{}".format(blueprint.name, func.__name__)
else:
urlendpoint = func.__name__
csrfEndpoints.append(urlendpoint)
csrf_endpoints.append(urlendpoint)
@wraps(func)
def decorator(*args, **kwargs):
if '_csrf_token' in request.values:
......@@ -32,6 +30,6 @@ def csrf_protect(blueprint=None, endpoint=None):
@bp.app_url_defaults
def csrf_inject(endpoint, values):
if endpoint not in csrfEndpoints or not session.get('_csrf_token'):
if endpoint not in csrf_endpoints or not session.get('_csrf_token'):
return
values['_csrf_token'] = session['_csrf_token']
from .csrf import bp as csrf_bp, csrf_protect
bp = [csrf_bp]
from collections import OrderedDict
from sqlalchemy import MetaData, event
from sqlalchemy.types import TypeDecorator, Text
from sqlalchemy.ext.mutable import MutableList
from flask_sqlalchemy import SQLAlchemy
from flask.json import JSONEncoder
# pylint: disable=C0103
db = SQLAlchemy()
# pylint: enable=C0103
class SQLAlchemyJSON(JSONEncoder):
def default(self, o):
if isinstance(o, db.Model):
result = OrderedDict()
for key in o.__mapper__.c.keys():
result[key] = getattr(o, key)
return result
return JSONEncoder.default(self, o)
convention = {
'ix': 'ix_%(column_0_label)s',
'uq': 'uq_%(table_name)s_%(column_0_name)s',
'ck': 'ck_%(table_name)s_%(column_0_name)s',
'fk': 'fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s',
'pk': 'pk_%(table_name)s'
}
metadata = MetaData(naming_convention=convention)
db = SQLAlchemy(metadata=metadata)
def enable_sqlite_foreign_key_support(dbapi_connection, connection_record):
# pylint: disable=unused-argument
cursor = dbapi_connection.cursor()
cursor.execute('PRAGMA foreign_keys=ON')
cursor.close()
# We want to enable SQLite foreign key support for app and test code, but not
# for migrations.
# The common way to add the handler to the Engine class (so it applies to all
# instances) would also affect the migrations. With flask_sqlalchemy v2.4 and
# newer we could overwrite SQLAlchemy.create_engine and add our handler there.
# However Debian Buster and Bullseye ship v2.1, so we do this here and call
# this function in create_app.
def customize_db_engine(engine):
if engine.name == 'sqlite':
event.listen(engine, 'connect', enable_sqlite_foreign_key_support)
elif engine.name in ('mysql', 'mariadb'):
@event.listens_for(engine, 'connect')
def receive_connect(dbapi_connection, connection_record): # pylint: disable=unused-argument
cursor = dbapi_connection.cursor()
cursor.execute('SHOW VARIABLES LIKE "character_set_connection"')
character_set_connection = cursor.fetchone()[1]
if character_set_connection != 'utf8mb4':
raise Exception(f'Unsupported connection charset "{character_set_connection}". Make sure to add "?charset=utf8mb4" to SQLALCHEMY_DATABASE_URI!')
cursor.execute('SHOW VARIABLES LIKE "collation_database"')
collation_database = cursor.fetchone()[1]
if collation_database != 'utf8mb4_nopad_bin':
raise Exception(f'Unsupported database collation "{collation_database}". Create the database with "CHARACTER SET utf8mb4 COLLATE utf8mb4_nopad_bin"!')
cursor.execute('SET NAMES utf8mb4 COLLATE utf8mb4_nopad_bin')
cursor.close()
class CommaSeparatedList(TypeDecorator):
# For some reason TypeDecorator.process_literal_param and
# TypeEngine.python_type are abstract but not actually required
# pylint: disable=abstract-method
impl = Text
cache_ok = True
def process_bind_param(self, value, dialect):
if value is None:
return None
for item in value:
if ',' in item:
raise ValueError('Items of comma-separated list must not contain commas')
return ','.join(value)
def process_result_value(self, value, dialect):
if value is None:
return None
return MutableList(value.split(','))
LDAP_BASE_USER="ou=users,dc=example,dc=com"
LDAP_BASE_GROUPS="ou=groups,dc=example,dc=com"
LDAP_BASE_MAIL="ou=postfix,dc=example,dc=com"
USER_GID=20001
LDAP_SERVICE_BIND_DN=""
LDAP_SERVICE_BIND_PASSWORD=""
LDAP_SERVICE_URL="ldapi:///"
# Service and non-service users must either have the same UID range or must not overlap
USER_MIN_UID=10000
USER_MAX_UID=18999
USER_SERVICE_MIN_UID=19000
USER_SERVICE_MAX_UID=19999
LDAP_USER_OBJECTCLASSES=["top", "inetOrgPerson", "organizationalPerson", "person", "posixAccount"]
LDAP_USER_GID=20001
LDAP_USER_MIN_UID=10000
LDAP_USER_MAX_UID=18999
GROUP_MIN_GID=20000
GROUP_MAX_GID=49999
# The period of time that a login lasts for.
SESSION_LIFETIME_SECONDS=3600
# The period of time that the session cookie lasts for. This is refreshed on each page load.
PERMANENT_SESSION_LIFETIME=2678400
# CSRF protection
SESSION_COOKIE_SECURE=True
SESSION_COOKIE_HTTPONLY=True
SESSION_COOKIE_SAMESITE='Strict'
LANGUAGES={
# Language identifier (see Accept-Language HTTP header) -> Display Name
"en": "EN",
"de": "DE",
}
ACL_ADMIN_GROUP="uffd_admin"
# Group required to access selfservice functions (view selfservice, change profile/password/roles)
ACL_SELFSERVICE_GROUP="uffd_access"
# Group required to login
ACL_ACCESS_GROUP="uffd_access"
# Members can create invite links for signup
ACL_SIGNUP_GROUP="uffd_signup"
MAIL_SERVER='' # e.g. example.com
MAIL_PORT=465
MAIL_USERNAME='yourId@example.com'
MAIL_USERNAME='yourId@example.com' # set to empty string to disable authentication
MAIL_PASSWORD='*****'
MAIL_USE_STARTTLS=True
MAIL_FROM_ADDRESS='foo@bar.com'
MAIL_LDAP_OBJECTCLASSES=["top", "postfixVirtual"]
ROLES_BASEROLES=['base']
# Set to a domain name (e.g. "remailer.example.com") to enable remailer.
# Requires special mail server configuration (see uffd-socketmapd). Can be
# enabled/disabled per-service in the service settings. If enabled, services
# no longer get real user mail addresses but instead special autogenerated
# addresses that are replaced with the real mail addresses by the mail server.
REMAILER_DOMAIN = ''
REMAILER_OLD_DOMAINS = []
# Secret used for construction and verification of remailer addresses.
# If None, the value of SECRET_KEY is used.
REMAILER_SECRET_KEY = None
# Set to list of user loginnames to limit remailer to a small list of users.
# Useful for debugging. If None remailer is active for all users (if
# configured and enabled for a service). This option is deprecated. Use the
# per-service setting in the web interface instead.
REMAILER_LIMIT_TO_USERS = None
# Do not enable this on a public service! There is no spam protection implemented at the moment.
SELF_SIGNUP=False
INVITE_MAX_VALID_DAYS=21
LOGINNAME_BLOCKLIST=['^admin$', '^root$']
#MFA_ICON_URL = 'https://example.com/logo.png'
#MFA_RP_ID = 'example.com' # If unset, hostname from current request is used
MFA_RP_NAME = 'Uffd Test Service' # Service name passed to U2F/FIDO2 authenticators
SQLALCHEMY_TRACK_MODIFICATIONS=False
FOOTER_LINKS=[{"url": "https://example.com", "title": "example"}]
# The default page after login or clicking the top left home button is the self-service
# page. If you would like it to be the services list instead, set this to True.
DEFAULT_PAGE_SERVICES=False
# Service overview page (disabled if empty)
SERVICES=[
# # Title is mandatory, all other fields are optional.
# # For permission_levels/groups/infos/links all fields are mandatory aside from required_group.
# {
# 'title': 'Service Title',
# 'subtitle': 'Service Subtitle',
# 'description': 'Short description of the service as plain text',
# 'url': 'https://example.com/',
# 'logo_url': 'https://example.com/logo.png',
# # Basic access group name, service is accessible to everyone if empty
# 'required_group': 'users',
# # Non-basic permission levels, the last matching entry is selected.
# # Users with a matching permission level are considered to have
# # access to the service (as if they have the basic access group).
# 'permission_levels': [
# {'name': 'Moderator', 'required_group': 'moderators'},
# {'name': 'Admin', 'required_group': 'uffd_admin'},
# ],
# # Per default all services are listed publicly (but grayed out for
# # guests/users without access). Confidential services are only visible
# # to users with access rights to the service.
# 'confidential': True,
# # In-service groups, all matching items are visible
# 'groups': [
# {'name': 'Group "crew_crew"', 'required_group': 'users'},
# {'name': 'Group "crew_logistik"', 'required_group': 'uffd_admin'},
# ],
# # Infos are small/medium amounts of information displayed in a modal
# # dialog. All matching items are visible.
# 'infos': [
# {
# 'title': 'uffd',
# 'button_text': 'Documentation', # Defaults to the title if not set
# 'html': '<p>Some information about the service as html</p>',
# 'required_group': 'users',
# },
# ],
# # Links to external sites, all matching items are visible
# 'links': [
# {'title': 'Link to an external site', 'url': '#', 'required_group': 'users'},
# ]
# },
]
# A banner text that will be displayed above the services list
SERVICES_BANNER=''
# If the banner should be shown to users who are not logged in
SERVICES_BANNER_PUBLIC=True
# Enable the service overview page for users who are not logged in
SERVICES_PUBLIC=True
# An optional banner that will be displayed above the login form
#LOGIN_BANNER='Always check the URL. Never enter your SSO password on any other site.'
BRANDING_LOGO_URL='/static/empty.png'
SITE_TITLE='uffd'
# Name and contact mail address are displayed to users in a few places (plain text only!)
ORGANISATION_NAME='Example Organisation'
ORGANISATION_CONTACT='contact@example.com'
# Optional text included in account registration mails (plain text only!)
WELCOME_TEXT='See https://docs.example.com/ for further information.'
# do NOT set in production
#TEMPLATES_AUTO_RELOAD=True
#SQLALCHEMY_ECHO=True
#FLASK_ENV=development
#LDAP_SERVICE_MOCK=True
# DO set in production
......
# pylint: skip-file
from flask_babel import gettext as _
from warnings import warn
from flask import request, current_app
import urllib.parse
# WebAuthn support is optional because fido2 has a pretty unstable
# interface and might be difficult to install with the correct version
try:
import fido2 as __fido2
if __fido2.__version__.startswith('0.5.'):
from fido2.client import ClientData
from fido2.server import Fido2Server, RelyingParty as __PublicKeyCredentialRpEntity
from fido2.ctap2 import AttestationObject, AuthenticatorData, AttestedCredentialData
from fido2 import cbor
cbor.encode = cbor.dumps
cbor.decode = lambda arg: cbor.loads(arg)[0]
class PublicKeyCredentialRpEntity(__PublicKeyCredentialRpEntity):
def __init__(self, name, id):
super().__init__(id, name)
elif __fido2.__version__.startswith('0.9.'):
from fido2.client import ClientData
from fido2.webauthn import PublicKeyCredentialRpEntity
from fido2.server import Fido2Server
from fido2.ctap2 import AttestationObject, AuthenticatorData, AttestedCredentialData
from fido2 import cbor
elif __fido2.__version__.startswith('1.'):
from fido2.webauthn import PublicKeyCredentialRpEntity, CollectedClientData as ClientData, AttestationObject, AuthenticatorData, AttestedCredentialData
from fido2.server import Fido2Server
from fido2 import cbor
else:
raise ImportError(f'Unsupported fido2 version: {__fido2.__version__}')
def get_webauthn_server():
hostname = urllib.parse.urlsplit(request.url).hostname
return Fido2Server(PublicKeyCredentialRpEntity(id=current_app.config.get('MFA_RP_ID', hostname),
name=current_app.config['MFA_RP_NAME']))
WEBAUTHN_SUPPORTED = True
except ImportError as err:
warn(_('2FA WebAuthn support disabled because import of the fido2 module failed (%s)')%err)
WEBAUTHN_SUPPORTED = False
from .ldap import bp as ldap_bp
from .ldap import get_conn, user_conn, escape_filter_chars, uid_to_dn
from .ldap import loginname_to_dn, mail_to_dn, get_next_uid, loginname_is_safe, mailname_is_safe
from .ldap import get_ldap_array_attribute_safe, get_ldap_attribute_safe
bp = [ldap_bp]
import string
from flask import Blueprint, current_app
from ldap3.utils.conv import escape_filter_chars
from ldap3.core.exceptions import LDAPBindError, LDAPCursorError
from ldap3 import Server, Connection, ALL, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, MOCK_SYNC
bp = Blueprint("ldap", __name__)
def fix_connection(conn):
old_search = conn.search
def search(*args, **kwargs):
kwargs.update({'attributes': [ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES]})
return old_search(*args, **kwargs)
conn.search = search
return conn
def get_mock_conn():
if not current_app.debug:
raise Exception('LDAP_SERVICE_MOCK cannot be enabled on production instances')
# Entries are stored in-memory in the mocked `Connection` object. To make
# changes persistent across requests we reuse the same `Connection` object
# for all calls to `service_conn()` and `user_conn()`.
if not hasattr(current_app, 'ldap_mock'):
server = Server.from_definition('ldap_mock', 'ldap_server_info.json', 'ldap_server_schema.json')
current_app.ldap_mock = fix_connection(Connection(server, client_strategy=MOCK_SYNC))
current_app.ldap_mock.strategy.entries_from_json('ldap_server_entries.json')
current_app.ldap_mock.bind()
return current_app.ldap_mock
def service_conn():
if current_app.config.get('LDAP_SERVICE_MOCK', False):
return get_mock_conn()
server = Server(current_app.config["LDAP_SERVICE_URL"], get_info=ALL)
return fix_connection(Connection(server, current_app.config["LDAP_SERVICE_BIND_DN"], current_app.config["LDAP_SERVICE_BIND_PASSWORD"], auto_bind=True))
def user_conn(loginname, password):
if not loginname_is_safe(loginname):
return False
if current_app.config.get('LDAP_SERVICE_MOCK', False):
conn = get_mock_conn()
# Since we reuse the same conn for all calls to `user_conn()` we
# simulate the password check by rebinding. Note that ldap3's mocking
# implementation just compares the string in the objects's userPassword
# field with the password, no support for hashing or OpenLDAP-style
# password-prefixes ("{PLAIN}..." or "{ssha512}...").
if not conn.rebind(loginname_to_dn(loginname), password):
return False
return get_mock_conn()
server = Server(current_app.config["LDAP_SERVICE_URL"], get_info=ALL)
try:
return fix_connection(Connection(server, loginname_to_dn(loginname), password, auto_bind=True))
except LDAPBindError:
return False
def get_conn():
return service_conn()
def uid_to_dn(uid):
conn = get_conn()
conn.search(current_app.config["LDAP_BASE_USER"], '(&(objectclass=person)(uidNumber={}))'.format(escape_filter_chars(uid)))
if not len(conn.entries) == 1:
return None
return conn.entries[0].entry_dn
def loginname_to_dn(loginname):
if loginname_is_safe(loginname):
return 'uid={},{}'.format(loginname, current_app.config["LDAP_BASE_USER"])
raise Exception('unsafe login name')
def mail_to_dn(uid):
if mailname_is_safe(uid):
return 'uid={},{}'.format(uid, current_app.config["LDAP_BASE_MAIL"])
raise Exception('unsafe mail name')
def loginname_is_safe(value):
if len(value) > 32 or len(value) < 1:
return False
for char in value:
if not char in string.ascii_lowercase + string.digits + '_':
return False
return True
def mailname_is_safe(value):
return loginname_is_safe(value)
def get_next_uid():
conn = get_conn()
conn.search(current_app.config["LDAP_BASE_USER"], '(objectclass=person)')
max_uid = current_app.config["LDAP_USER_MIN_UID"]
for i in conn.entries:
# skip out of range entries
if i['uidNumber'].value > current_app.config["LDAP_USER_MAX_UID"]:
continue
if i['uidNumber'].value < current_app.config["LDAP_USER_MIN_UID"]:
continue
max_uid = max(i['uidNumber'].value, max_uid)
next_uid = max_uid + 1
if uid_to_dn(next_uid):
raise Exception('No free uid found')
return next_uid
def get_ldap_attribute_safe(ldapobject, attribute):
try:
result = ldapobject[attribute].value if attribute in ldapobject else None
# we have to catch LDAPCursorError here, because ldap3 in older versions has a broken __contains__ function
# see https://github.com/cannatag/ldap3/issues/493
# fixed in version 2.5
# debian buster ships 2.4.1
except LDAPCursorError:
result = None
return result
def get_ldap_array_attribute_safe(ldapobject, attribute):
# if the aray is empty, the attribute does not exist.
# if there is only one elemtent, ldap returns a string and not an array with one element
# we sanitize this to always be an array
result = get_ldap_attribute_safe(ldapobject, attribute)
if not result:
result = []
if isinstance(result, str):
result = [result]
return result
from .views import bp as bp_ui
bp = [bp_ui]
from ldap3 import MODIFY_REPLACE
from flask import current_app
from uffd import ldap
class Mail():
def __init__(self, uid=None, destinations=None, receivers=None, dn=None):
self.uid = uid
self.receivers = receivers if receivers else []
self.destinations = destinations if destinations else []
self.dn = dn
@classmethod
def from_ldap(cls, ldapobject):
return Mail(
uid=ldapobject['uid'].value,
receivers=ldap.get_ldap_array_attribute_safe(ldapobject, 'mailacceptinggeneralid'),
destinations=ldap.get_ldap_array_attribute_safe(ldapobject, 'maildrop'),
dn=ldapobject.entry_dn,
)
@classmethod
def from_ldap_dn(cls, dn):
conn = ldap.get_conn()
conn.search(dn, '(objectClass=postfixVirtual)')
if not len(conn.entries) == 1:
return None
return Mail.from_ldap(conn.entries[0])
def to_ldap(self, new=False):
conn = ldap.get_conn()
if new:
attributes = {
'uid': self.uid,
# same as for update
'mailacceptinggeneralid': self.receivers,
'maildrop': self.destinations,
}
self.dn = ldap.mail_to_dn(self.uid)
result = conn.add(self.dn, current_app.config['MAIL_LDAP_OBJECTCLASSES'], attributes)
else:
attributes = {
'mailacceptinggeneralid': [(MODIFY_REPLACE, self.receivers)],
'maildrop': [(MODIFY_REPLACE, self.destinations)],
}
result = conn.modify(self.dn, attributes)
return result
from flask import Blueprint, render_template, request, url_for, redirect, flash, current_app
from uffd.navbar import register_navbar
from uffd.csrf import csrf_protect
from uffd.ldap import get_conn, escape_filter_chars
from uffd.session import login_required, is_valid_session, get_current_user
from uffd.mail.models import Mail
bp = Blueprint("mail", __name__, template_folder='templates', url_prefix='/mail/')
@bp.before_request
@login_required()
def mail_acl(): #pylint: disable=inconsistent-return-statements
if not mail_acl_check():
flash('Access denied')
return redirect(url_for('index'))
def mail_acl_check():
return is_valid_session() and get_current_user().is_in_group(current_app.config['ACL_ADMIN_GROUP'])
@bp.route("/")
@register_navbar('Mail', icon='envelope', blueprint=bp, visible=mail_acl_check)
def index():
conn = get_conn()
conn.search(current_app.config["LDAP_BASE_MAIL"], '(objectclass=postfixVirtual)')
mails = []
for i in conn.entries:
mails.append(Mail.from_ldap(i))
return render_template('mail_list.html', mails=mails)
@bp.route("/<uid>")
@bp.route("/new")
def show(uid=None):
if not uid:
mail = Mail()
else:
conn = get_conn()
conn.search(current_app.config["LDAP_BASE_MAIL"], '(&(objectclass=postfixVirtual)(uid={}))'.format((escape_filter_chars(uid))))
assert len(conn.entries) == 1
mail = Mail.from_ldap(conn.entries[0])
return render_template('mail.html', mail=mail)
@bp.route("/<uid>/update", methods=['POST'])
@bp.route("/new", methods=['POST'])
@csrf_protect(blueprint=bp)
def update(uid=False):
conn = get_conn()
is_newmail = bool(not uid)
if is_newmail:
mail = Mail()
else:
conn = get_conn()
conn.search(current_app.config["LDAP_BASE_MAIL"], '(&(objectclass=postfixVirtual)(uid={}))'.format((escape_filter_chars(uid))))
assert len(conn.entries) == 1
mail = Mail.from_ldap(conn.entries[0])
if is_newmail:
mail.uid = request.form.get('mail-uid')
mail.receivers = request.form.get('mail-receivers', '').splitlines()
mail.destinations = request.form.get('mail-destinations', '').splitlines()
if mail.to_ldap(new=is_newmail):
flash('Mail mapping updated.')
else:
flash('Error updating mail mapping: {}'.format(conn.result['message']))
if is_newmail:
return redirect(url_for('mail.index'))
return redirect(url_for('mail.show', uid=mail.uid))
@bp.route("/<uid>/del")
@csrf_protect(blueprint=bp)
def delete(uid):
conn = get_conn()
conn.search(current_app.config["LDAP_BASE_MAIL"], '(&(objectclass=postfixVirtual)(uid={}))'.format((escape_filter_chars(uid))))
assert len(conn.entries) == 1
mail = conn.entries[0]
if conn.delete(mail.entry_dn):
flash('Deleted mail mapping.')
else:
flash('Could not delete mail mapping: {}'.format(conn.result['message']))
return redirect(url_for('mail.index'))
from .views import bp as _bp
bp = [_bp]
{% extends 'base.html' %}
{% block body %}
<p>When you proceed, all recovery codes, registered authenticator applications and devices will be invalidated.
You can later generate new recovery codes and setup your applications and devices again.</p>
<form class="form" action="{{ url_for('mfa.disable_confirm') }}" method="POST">
<button type="submit" class="btn btn-danger btn-block">Disable two-factor authentication</button>
</form>
{% endblock %}
from flask import Blueprint, render_template, session, request, redirect, url_for, flash, current_app, abort
import urllib.parse
from fido2.client import ClientData
from fido2.server import Fido2Server, RelyingParty
from fido2.ctap2 import AttestationObject, AuthenticatorData
from fido2 import cbor
from uffd.database import db
from uffd.mfa.models import MFAMethod, TOTPMethod, WebauthnMethod, RecoveryCodeMethod
from uffd.session.views import get_current_user, login_required, is_valid_session
from uffd.ldap import uid_to_dn
from uffd.user.models import User
from uffd.csrf import csrf_protect
bp = Blueprint('mfa', __name__, template_folder='templates', url_prefix='/mfa/')
@bp.route('/', methods=['GET'])
@login_required()
def setup():
user = get_current_user()
recovery_methods = RecoveryCodeMethod.query.filter_by(dn=user.dn).all()
totp_methods = TOTPMethod.query.filter_by(dn=user.dn).all()
webauthn_methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
return render_template('setup.html', totp_methods=totp_methods, webauthn_methods=webauthn_methods, recovery_methods=recovery_methods)
@bp.route('/setup/disable', methods=['GET'])
@login_required()
def disable():
return render_template('disable.html')
@bp.route('/setup/disable', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def disable_confirm():
user = get_current_user()
MFAMethod.query.filter_by(dn=user.dn).delete()
db.session.commit()
return redirect(url_for('mfa.setup'))
@bp.route('/admin/<int:uid>/disable')
@login_required()
@csrf_protect(blueprint=bp)
def admin_disable(uid):
# Group cannot be checked with login_required kwarg, because the config
# variable is not available when the decorator is processed
if not get_current_user().is_in_group(current_app.config['ACL_ADMIN_GROUP']):
flash('Access denied')
return redirect(url_for('index'))
user = User.from_ldap_dn(uid_to_dn(uid))
MFAMethod.query.filter_by(dn=user.dn).delete()
db.session.commit()
flash('Two-factor authentication was reset')
return redirect(url_for('user.show', uid=uid))
@bp.route('/setup/recovery', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_recovery():
user = get_current_user()
for method in RecoveryCodeMethod.query.filter_by(dn=user.dn).all():
db.session.delete(method)
methods = []
for _ in range(10):
method = RecoveryCodeMethod(user)
methods.append(method)
db.session.add(method)
db.session.commit()
return render_template('setup_recovery.html', methods=methods)
@bp.route('/setup/totp', methods=['GET'])
@login_required()
def setup_totp():
user = get_current_user()
method = TOTPMethod(user)
session['mfa_totp_key'] = method.key
return render_template('setup_totp.html', method=method, name=request.values['name'])
@bp.route('/setup/totp', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_totp_finish():
user = get_current_user()
if not RecoveryCodeMethod.query.filter_by(dn=user.dn).all():
flash('Generate recovery codes first!')
return redirect(url_for('mfa.setup'))
method = TOTPMethod(user, name=request.values['name'], key=session.pop('mfa_totp_key'))
if method.verify(request.form['code']):
db.session.add(method)
db.session.commit()
return redirect(url_for('mfa.setup'))
flash('Code is invalid')
return redirect(url_for('mfa.setup_totp'))
@bp.route('/setup/totp/<int:id>/delete')
@login_required()
@csrf_protect(blueprint=bp)
def delete_totp(id):
user = get_current_user()
method = TOTPMethod.query.filter_by(dn=user.dn, id=id).first_or_404()
db.session.delete(method)
db.session.commit()
return redirect(url_for('mfa.setup'))
def get_webauthn_server():
return Fido2Server(RelyingParty(urllib.parse.urlsplit(request.url).hostname, "uffd"))
@bp.route('/setup/webauthn/begin', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_webauthn_begin():
user = get_current_user()
if not RecoveryCodeMethod.query.filter_by(dn=user.dn).all():
abort(403)
methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
creds = [method.cred for method in methods]
server = get_webauthn_server()
registration_data, state = server.register_begin(
{
"id": user.dn.encode(),
"name": user.loginname,
"displayName": user.displayname,
},
creds,
user_verification='discouraged',
)
session["webauthn-state"] = state
return cbor.dumps(registration_data)
@bp.route('/setup/webauthn/complete', methods=['POST'])
@login_required()
@csrf_protect(blueprint=bp)
def setup_webauthn_complete():
user = get_current_user()
server = get_webauthn_server()
data = cbor.loads(request.get_data())[0]
client_data = ClientData(data["clientDataJSON"])
att_obj = AttestationObject(data["attestationObject"])
auth_data = server.register_complete(session["webauthn-state"], client_data, att_obj)
method = WebauthnMethod(user, auth_data.credential_data, name=data['name'])
db.session.add(method)
db.session.commit()
return cbor.dumps({"status": "OK"})
@bp.route('/setup/webauthn/<int:id>/delete')
@login_required()
@csrf_protect(blueprint=bp)
def delete_webauthn(id):
user = get_current_user()
method = WebauthnMethod.query.filter_by(dn=user.dn, id=id).first_or_404()
db.session.delete(method)
db.session.commit()
return redirect(url_for('mfa.setup'))
@bp.route("/auth/webauthn/begin", methods=["POST"])
def auth_webauthn_begin():
user = get_current_user()
server = get_webauthn_server()
methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
creds = [method.cred for method in methods]
if not creds:
abort(404)
auth_data, state = server.authenticate_begin(creds, user_verification='discouraged')
session["webauthn-state"] = state
return cbor.dumps(auth_data)
@bp.route("/auth/webauthn/complete", methods=["POST"])
def auth_webauthn_complete():
user = get_current_user()
server = get_webauthn_server()
methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
creds = [method.cred for method in methods]
if not creds:
abort(404)
data = cbor.loads(request.get_data())[0]
credential_id = data["credentialId"]
client_data = ClientData(data["clientDataJSON"])
auth_data = AuthenticatorData(data["authenticatorData"])
signature = data["signature"]
# authenticate_complete() (as of python-fido2 v0.5.0, the version in Debian Buster)
# does not check signCount, although the spec recommends it
server.authenticate_complete(
session.pop("webauthn-state"),
creds,
credential_id,
client_data,
auth_data,
signature,
)
session['user_mfa'] = True
return cbor.dumps({"status": "OK"})
@bp.route('/auth', methods=['GET'])
@login_required(skip_mfa=True)
def auth():
user = get_current_user()
recovery_methods = RecoveryCodeMethod.query.filter_by(dn=user.dn).all()
totp_methods = TOTPMethod.query.filter_by(dn=user.dn).all()
webauthn_methods = WebauthnMethod.query.filter_by(dn=user.dn).all()
if not totp_methods and not webauthn_methods:
session['user_mfa'] = True
if session.get('user_mfa'):
return redirect(request.values.get('ref', url_for('index')))
return render_template('auth.html', ref=request.values.get('ref'), totp_methods=totp_methods,
webauthn_methods=webauthn_methods, recovery_methods=recovery_methods)
@bp.route('/auth', methods=['POST'])
@login_required(skip_mfa=True)
def auth_finish():
user = get_current_user()
recovery_methods = RecoveryCodeMethod.query.filter_by(dn=user.dn).all()
totp_methods = TOTPMethod.query.filter_by(dn=user.dn).all()
for method in totp_methods:
if method.verify(request.form['code']):
session['user_mfa'] = True
return redirect(request.values.get('ref', url_for('index')))
for method in recovery_methods:
if method.verify(request.form['code']):
db.session.delete(method)
db.session.commit()
session['user_mfa'] = True
if len(recovery_methods) <= 1:
flash('You have exhausted your recovery codes. Please generate new ones now!')
return redirect(url_for('mfa.setup'))
elif len(recovery_methods) <= 5:
flash('You only have a few recovery codes remaining. Make sure to generate new ones before they run out.')
return redirect(url_for('mfa.setup'))
return redirect(request.values.get('ref', url_for('index')))
flash('Two-factor authentication failed')
return redirect(url_for('mfa.auth', ref=request.values.get('ref')))
Database Migrations
===================
While we use Alembic in a single-database configuration, the migration scripts
are compatible with both SQLite and MySQL/MariaDB.
Compatability with SQLite almost always requires `batch_alter_table` operations
to modify existing tables. These recreate the tables, copy the data and finally
replace the old with the newly creaed ones. Alembic is configured to
auto-generate those operations, but in most cases the generated code fails to
fully reflect all details of the original schema. This way some contraints
(i.e. `CHECK` contstrains on Enums) are lost. Define the full table and pass it
with `copy_from` to `batch_alter_table` to prevent this.
Compatability with MySQL requires special care when changing primary keys and
when dealing with foreign keys. It often helps to temporarily remove foreign
key constraints concerning the table that is subject to change. When adding an
autoincrement id column as the new primary key of a table, recreate the table
with `batch_alter_table`.
The `check_migrations.py` script verifies that upgrading and downgrading works
with both databases. While it is far from perfect, it catches many common
errors. It runs automatically as part of the CI pipeline. Make sure to update
the script when adding new tables and when making significant changes to
existing tables.
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
import logging
import click
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from flask import current_app
config.set_main_option('sqlalchemy.url',
current_app.config.get('SQLALCHEMY_DATABASE_URI'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
engine = engine_from_config(config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
connection = engine.connect()
context.configure(connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args)
if engine.name in ('mysql', 'mariadb'):
character_set_connection = connection.execute('SHOW VARIABLES LIKE "character_set_connection"').fetchone()[1]
if character_set_connection != 'utf8mb4':
raise click.ClickException(f'Unsupported connection charset "{character_set_connection}". Make sure to add "?charset=utf8mb4" to SQLALCHEMY_DATABASE_URI!')
collation_database = connection.execute('SHOW VARIABLES LIKE "collation_database"').fetchone()[1]
if collation_database != 'utf8mb4_nopad_bin':
raise click.ClickException(f'Unsupported database collation "{collation_database}". Create the database with "CHARACTER SET utf8mb4 COLLATE utf8mb4_nopad_bin"!')
connection.execute('SET NAMES utf8mb4 COLLATE utf8mb4_nopad_bin')
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}
"""OpenID Connect Support
Revision ID: 01fdd7820f29
Revises: a9b449776953
Create Date: 2023-11-09 16:52:20.860871
"""
from alembic import op
import sqlalchemy as sa
import datetime
import secrets
import math
import logging
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.backends import default_backend # Only required for Buster
import jwt
# pyjwt v1.7.x compat (Buster/Bullseye)
if not hasattr(jwt, 'get_algorithm_by_name'):
jwt.get_algorithm_by_name = lambda name: jwt.algorithms.get_default_algorithms()[name]
# revision identifiers, used by Alembic.
revision = '01fdd7820f29'
down_revision = 'a9b449776953'
branch_labels = None
depends_on = None
logger = logging.getLogger('alembic.runtime.migration.01fdd7820f29')
def token_with_alphabet(alphabet, nbytes=None):
'''Return random text token that consists of characters from `alphabet`'''
if nbytes is None:
nbytes = max(secrets.DEFAULT_ENTROPY, 32)
nbytes_per_char = math.log(len(alphabet), 256)
nchars = math.ceil(nbytes / nbytes_per_char)
return ''.join([secrets.choice(alphabet) for _ in range(nchars)])
def token_urlfriendly(nbytes=None):
'''Return random text token that is urlsafe and works around common parsing bugs'''
alphabet = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
return token_with_alphabet(alphabet, nbytes=nbytes)
def upgrade():
logger.info('Generating 3072 bit RSA key pair (RS256) for OpenID Connect support ...')
private_key = rsa.generate_private_key(public_exponent=65537, key_size=3072, backend=default_backend())
meta = sa.MetaData(bind=op.get_bind())
oauth2_key = op.create_table('oauth2_key',
sa.Column('id', sa.String(length=64), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('active', sa.Boolean(create_constraint=False), nullable=False),
sa.Column('algorithm', sa.String(length=32), nullable=False),
sa.Column('private_key_jwk', sa.Text(), nullable=False),
sa.Column('public_key_jwk', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2_key'))
)
algorithm = jwt.get_algorithm_by_name('RS256')
op.bulk_insert(oauth2_key, [{
'id': token_urlfriendly(),
'created': datetime.datetime.utcnow(),
'active': True,
'algorithm': 'RS256',
'private_key_jwk': algorithm.to_jwk(private_key),
'public_key_jwk': algorithm.to_jwk(private_key.public_key()),
}])
with op.batch_alter_table('oauth2grant', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_oauth2grant_code'))
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.add_column(sa.Column('nonce', sa.Text(), nullable=True))
batch_op.add_column(sa.Column('claims', sa.Text(), nullable=True))
batch_op.alter_column('redirect_uri', existing_type=sa.VARCHAR(length=255), nullable=True)
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.add_column(sa.Column('claims', sa.Text(), nullable=True))
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
oauth2token = sa.Table('oauth2token', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('token_type', sa.String(length=40), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2token_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2token_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2token')),
sa.UniqueConstraint('access_token', name=op.f('uq_oauth2token_access_token')),
sa.UniqueConstraint('refresh_token', name=op.f('uq_oauth2token_refresh_token'))
)
with op.batch_alter_table('oauth2token', copy_from=oauth2token) as batch_op:
batch_op.drop_column('claims')
oauth2grant = sa.Table('oauth2grant', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_db_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=True),
sa.Column('nonce', sa.Text(), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('_scopes', sa.Text(), nullable=False),
sa.Column('claims', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['client_db_id'], ['oauth2client.db_id'], name=op.f('fk_oauth2grant_client_db_id_oauth2client'), onupdate='CASCADE', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_oauth2grant_user_id_user'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauth2grant'))
)
with op.batch_alter_table('oauth2grant', copy_from=oauth2grant) as batch_op:
batch_op.alter_column('redirect_uri', existing_type=sa.VARCHAR(length=255), nullable=False)
batch_op.drop_column('claims')
batch_op.drop_column('nonce')
batch_op.create_index(batch_op.f('ix_oauth2grant_code'), ['code'], unique=False)
op.drop_table('oauth2_key')
"""lower-case mail receive addresses
Revision ID: 042879d5e3ac
Revises: 878b25c4fae7
Create Date: 2022-02-01 20:37:32.103288
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '042879d5e3ac'
down_revision = '878b25c4fae7'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
mail_receive_address_table = sa.Table('mail_receive_address', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('mail_id', sa.Integer(), nullable=False),
sa.Column('address', sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(['mail_id'], ['mail.id'], name=op.f('fk_mail_receive_address_mail_id_mail'), onupdate='CASCADE', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mail_receive_address'))
)
op.execute(mail_receive_address_table.update().values(address=sa.func.lower(mail_receive_address_table.c.address)))
def downgrade():
pass
"""add expires attribute to ratelimit_event
Revision ID: 09d2edcaf0cc
Revises: af07cea65391
Create Date: 2022-02-15 14:16:19.318253
"""
from alembic import op
import sqlalchemy as sa
revision = '09d2edcaf0cc'
down_revision = 'af07cea65391'
branch_labels = None
depends_on = None
def upgrade():
meta = sa.MetaData(bind=op.get_bind())
ratelimit_event = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=128), nullable=True),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
op.execute(ratelimit_event.delete())
with op.batch_alter_table('ratelimit_event', copy_from=ratelimit_event) as batch_op:
batch_op.add_column(sa.Column('expires', sa.DateTime(), nullable=False))
batch_op.alter_column('name', existing_type=sa.VARCHAR(length=128), nullable=False)
batch_op.alter_column('timestamp', existing_type=sa.DATETIME(), nullable=False)
def downgrade():
meta = sa.MetaData(bind=op.get_bind())
ratelimit_event = sa.Table('ratelimit_event', meta,
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('expires', sa.DateTime(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=False),
sa.Column('key', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_ratelimit_event'))
)
op.execute(ratelimit_event.delete())
with op.batch_alter_table('ratelimit_event', schema=None) as batch_op:
batch_op.alter_column('timestamp', existing_type=sa.DATETIME(), nullable=True)
batch_op.alter_column('name', existing_type=sa.VARCHAR(length=128), nullable=True)
batch_op.drop_column('expires')