From 0a36b07ba852f54b14cff5b5e306e81a494ae931 Mon Sep 17 00:00:00 2001 From: Scott Koranda Date: Fri, 21 Feb 2020 10:42:55 -0600 Subject: [PATCH 001/288] Specify pool name when using REUSABLE client strategy Add code to choose a random name to be used as the pool name when using the REUSABLE client strategy. Without this enhancement two instances of the microservice connecting to distinct servers will share a single connection pool. --- .../micro_services/ldap_attribute_store.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index 333254648..624357081 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -7,6 +7,8 @@ import copy import logging +import random +import string import urllib import ldap3 @@ -303,6 +305,8 @@ def _ldap_connection_factory(self, config): pool_size = config["pool_size"] pool_keepalive = config["pool_keepalive"] + pool_name = ''.join(random.sample(string.ascii_lowercase, 6)) + if client_strategy == ldap3.REUSABLE: msg = "Using pool size {}".format(pool_size) satosa_logging(logger, logging.DEBUG, msg, None) @@ -318,6 +322,7 @@ def _ldap_connection_factory(self, config): client_strategy=client_strategy, read_only=read_only, version=version, + pool_name=pool_name, pool_size=pool_size, pool_keepalive=pool_keepalive, ) @@ -358,7 +363,8 @@ def _populate_attributes(self, config, record): for attr, values in ldap_attributes.items(): internal_attr = ldap_to_internal_map.get(attr, None) if not internal_attr and ";" in attr: - internal_attr = ldap_to_internal_map.get(attr.split(";")[0], None) + internal_attr = ldap_to_internal_map.get(attr.split(";")[0], + None) if internal_attr and values: attributes[internal_attr] = ( @@ -439,8 +445,14 @@ def process(self, context, data): results = None exp_msg = None + connection = config["connection"] + msg = { + "message": "LDAP server host", + "server host": connection.server.host, + } + satosa_logging(logger, logging.DEBUG, msg, context.state) + for filter_val in filter_values: - connection = config["connection"] ldap_ident_attr = config["ldap_identifier_attribute"] search_filter = "({0}={1})".format(ldap_ident_attr, filter_val) msg = { From 016d9d17b7f5a4af41216ebcfb28c001c9bf5305 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 27 Feb 2020 12:52:07 +0200 Subject: [PATCH 002/288] Make the cookie a session-cookie To have the the cookie removed immediately after use, the CONTEXT_STATE_DELETE configuration option should be set to `True`. Signed-off-by: Ivan Kanakarakis --- src/satosa/state.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/satosa/state.py b/src/satosa/state.py index d81a7773a..6aaa5154b 100644 --- a/src/satosa/state.py +++ b/src/satosa/state.py @@ -23,9 +23,6 @@ logger = logging.getLogger(__name__) -# TODO MOVE TO CONFIG -STATE_COOKIE_MAX_AGE = 1200 - _SESSION_ID_KEY = "SESSION_ID" @@ -47,14 +44,13 @@ def state_to_cookie(state, name, path, encryption_key): """ cookie_data = "" if state.delete else state.urlstate(encryption_key) - max_age = 0 if state.delete else STATE_COOKIE_MAX_AGE cookie = SimpleCookie() cookie[name] = cookie_data cookie[name]["samesite"] = "None" cookie[name]["secure"] = True cookie[name]["path"] = path - cookie[name]["max-age"] = max_age + cookie[name]["max-age"] = 0 if state.delete else "" msg = "Saved state in cookie {name} with properties {props}".format( name=name, props=list(cookie[name].items()) From a23d7a770a86e3365f20e5a8b9bb07a7d9c69211 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 1 Mar 2020 16:11:52 +0200 Subject: [PATCH 003/288] Add version module and expose version Signed-off-by: Ivan Kanakarakis --- src/satosa/__init__.py | 11 ++--------- src/satosa/proxy_server.py | 13 +++---------- src/satosa/version.py | 11 +++++++++++ 3 files changed, 16 insertions(+), 19 deletions(-) create mode 100644 src/satosa/version.py diff --git a/src/satosa/__init__.py b/src/satosa/__init__.py index 52adfa0d1..895e0166f 100644 --- a/src/satosa/__init__.py +++ b/src/satosa/__init__.py @@ -1,11 +1,4 @@ # -*- coding: utf-8 -*- -""" - satosa - ~~~~~~~~~~~~~~~~ +"""SATOSA: An any to any Single Sign On (SSO) proxy.""" - An any to any Single Sign On (SSO) proxy. - Has support for SAML2, OpenID Connect and some OAUth2 variants. - - :copyright: (c) 2016 by UmeĆ„ University. - :license: APACHE 2.0, see LICENSE for more details. -""" +from .version import version as __version__ diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index c1c12d2cc..66a9154c6 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -5,9 +5,9 @@ import sys from urllib.parse import parse_qsl -import pkg_resources - from cookies_samesite_compat import CookiesSameSiteCompatMiddleware + +import satosa from .base import SATOSABase from .context import Context from .response import ServiceError, NotFound @@ -144,14 +144,7 @@ def make_app(satosa_config): root_logger.addHandler(stderr_handler) root_logger.setLevel(logging.DEBUG) - try: - _ = pkg_resources.get_distribution(module.__name__) - logline = "Running SATOSA version {}".format( - pkg_resources.get.get_distribution("SATOSA").version - ) - logger.info(logline) - except (NameError, pkg_resources.DistributionNotFound): - pass + logger.info("Running SATOSA version {v}".format(v=satosa.__version__)) res1 = WsgiApplication(satosa_config) res2 = CookiesSameSiteCompatMiddleware(res1, satosa_config) diff --git a/src/satosa/version.py b/src/satosa/version.py new file mode 100644 index 000000000..8025c9e3c --- /dev/null +++ b/src/satosa/version.py @@ -0,0 +1,11 @@ +import pkg_resources as _pkg_resources + + +def _parse_version(): + data = _pkg_resources.get_distribution('satosa') + value = _pkg_resources.parse_version(data.version) + return value + + +version_info = _parse_version() +version = str(version_info) From 589a79a05024324e42c1fc50d28eda659b3a0c93 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 3 Mar 2020 00:54:24 +0200 Subject: [PATCH 004/288] Remove references to specific micro_services from core Signed-off-by: Ivan Kanakarakis --- src/satosa/base.py | 14 -------------- tests/satosa/test_base.py | 12 ------------ 2 files changed, 26 deletions(-) diff --git a/src/satosa/base.py b/src/satosa/base.py index ae041ab0e..a593af616 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -14,8 +14,6 @@ from .context import Context from .exception import SATOSAConfigurationError from .exception import SATOSAError, SATOSAAuthenticationError, SATOSAUnknownError -from .micro_services.account_linking import AccountLinking -from .micro_services.consent import Consent from .plugin_loader import load_backends, load_frontends from .plugin_loader import load_request_microservices, load_response_microservices from .routing import ModuleRouter, SATOSANoBoundEndpointError @@ -84,7 +82,6 @@ def __init__(self, config): self.config["MICRO_SERVICES"], self.config["INTERNAL_ATTRIBUTES"], self.config["BASE"])) - self._verify_response_micro_services(self.response_micro_services) self._link_micro_services(self.response_micro_services, self._auth_resp_finish) self.module_router = ModuleRouter(frontends, backends, @@ -99,17 +96,6 @@ def _link_micro_services(self, micro_services, finisher): micro_services[-1].next = finisher - def _verify_response_micro_services(self, response_micro_services): - account_linking_index = next((i for i in range(len(response_micro_services)) - if isinstance(response_micro_services[i], AccountLinking)), -1) - if account_linking_index > 0: - raise SATOSAConfigurationError("Account linking must be configured first in the list of micro services") - - consent_index = next((i for i in range(len(response_micro_services)) - if isinstance(response_micro_services[i], Consent)), -1) - if consent_index != -1 and consent_index < len(response_micro_services) - 1: - raise SATOSAConfigurationError("Consent must be configured last in the list of micro services") - def _auth_req_callback_func(self, context, internal_request): """ This function is called by a frontend module when an authorization request has been diff --git a/tests/satosa/test_base.py b/tests/satosa/test_base.py index 0cb365742..fe46d59fb 100644 --- a/tests/satosa/test_base.py +++ b/tests/satosa/test_base.py @@ -29,18 +29,6 @@ def test_full_initialisation(self, satosa_config): assert len(base.request_micro_services) == 1 assert len(base.response_micro_services) == 1 - def test_constuctor_should_raise_exception_if_account_linking_is_not_first_in_micro_service_list( - self, satosa_config, account_linking_module_config): - satosa_config["MICRO_SERVICES"].append(account_linking_module_config) - with pytest.raises(SATOSAConfigurationError): - SATOSABase(satosa_config) - - def test_constuctor_should_raise_exception_if_consent_is_not_last_in_micro_service_list( - self, satosa_config, consent_module_config): - satosa_config["MICRO_SERVICES"].insert(0, consent_module_config) - with pytest.raises(SATOSAConfigurationError): - SATOSABase(satosa_config) - def test_auth_resp_callback_func_user_id_from_attrs_is_used_to_override_user_id(self, context, satosa_config): satosa_config["INTERNAL_ATTRIBUTES"]["user_id_from_attrs"] = ["user_id", "domain"] base = SATOSABase(satosa_config) From 136a17aa552fc11143581cf80260b882854be9c9 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 3 Mar 2020 00:55:05 +0200 Subject: [PATCH 005/288] Move all state management of Consent micro-service into the consent module Signed-off-by: Ivan Kanakarakis --- src/satosa/base.py | 13 +------------ src/satosa/micro_services/consent.py | 18 +++++++++++------- tests/satosa/micro_services/test_consent.py | 7 ++++--- tests/satosa/test_base.py | 19 ------------------- 4 files changed, 16 insertions(+), 41 deletions(-) diff --git a/src/satosa/base.py b/src/satosa/base.py index a593af616..ff5865ec7 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -9,8 +9,6 @@ from saml2.s_utils import UnknownSystemEntity from satosa import util -from satosa.micro_services import consent - from .context import Context from .exception import SATOSAConfigurationError from .exception import SATOSAError, SATOSAAuthenticationError, SATOSAUnknownError @@ -112,16 +110,7 @@ def _auth_req_callback_func(self, context, internal_request): """ state = context.state state[STATE_KEY] = {"requester": internal_request.requester} - # TODO consent module should manage any state it needs by itself - try: - state_dict = context.state[consent.STATE_KEY] - except KeyError: - state_dict = context.state[consent.STATE_KEY] = {} - finally: - state_dict.update({ - "filter": internal_request.attributes or [], - "requester_name": internal_request.requester_name, - }) + msg = "Requesting provider: {}".format(internal_request.requester) logline = lu.LOG_FMT.format(id=lu.get_session_id(state), message=msg) logger.info(logline) diff --git a/src/satosa/micro_services/consent.py b/src/satosa/micro_services/consent.py index 968b28327..40e2d37a2 100644 --- a/src/satosa/micro_services/consent.py +++ b/src/satosa/micro_services/consent.py @@ -91,7 +91,7 @@ def _approve_new_consent(self, context, internal_response, id_hash): "attr": internal_response.attributes, "id": id_hash, "redirect_endpoint": "%s/consent%s" % (self.base_url, self.endpoint), - "requester_name": context.state[STATE_KEY]["requester_name"] + "requester_name": internal_response.requester_name, } if self.locked_attr: consent_args["locked_attrs"] = [self.locked_attr] @@ -122,11 +122,15 @@ def process(self, context, internal_response): :param internal_response: the response :return: response """ - consent_state = context.state[STATE_KEY] - - internal_response.attributes = self._filter_attributes(internal_response.attributes, consent_state["filter"]) - id_hash = self._get_consent_id(internal_response.requester, internal_response.subject_id, - internal_response.attributes) + context.state[STATE_KEY] = context.state.get(STATE_KEY, {}) + consent_filter = internal_response.attributes or [] + internal_response.attributes = self._filter_attributes( + internal_response.attributes, consent_filter + ) + id_hash = self._get_consent_id( + internal_response.requester, internal_response.subject_id, + internal_response.attributes, + ) try: # Check if consent is already given @@ -225,7 +229,7 @@ def _end_consent(self, context, internal_response): :param internal_response: the response :return: response """ - del context.state[STATE_KEY] + context.state.pop(STATE_KEY, None) return super().process(context, internal_response) def register_endpoints(self): diff --git a/tests/satosa/micro_services/test_consent.py b/tests/satosa/micro_services/test_consent.py index 247b74868..6d9bf21b2 100644 --- a/tests/satosa/micro_services/test_consent.py +++ b/tests/satosa/micro_services/test_consent.py @@ -152,7 +152,7 @@ def test_consent_full_flow(self, context, consent_config, internal_response, int consent_verify_endpoint_regex, consent_registration_endpoint_regex): expected_ticket = "my_ticket" - requester_name = [{"lang": "en", "text": "test requester"}] + requester_name = internal_response.requester_name context.state[consent.STATE_KEY] = {"filter": internal_request.attributes, "requester_name": requester_name} @@ -189,7 +189,8 @@ def test_consent_not_given(self, context, consent_config, internal_response, int responses.add(responses.GET, consent_registration_endpoint_regex, status=200, body=expected_ticket) - context.state[consent.STATE_KEY] = {"filter": [], "requester_name": None} + requester_name = internal_response.requester_name + context.state[consent.STATE_KEY] = {} resp = self.consent_module.process(context, internal_response) @@ -198,7 +199,7 @@ def test_consent_not_given(self, context, consent_config, internal_response, int internal_response, consent_config["sign_key"], self.consent_module.base_url, - None) + requester_name) new_context = Context() new_context.state = context.state diff --git a/tests/satosa/test_base.py b/tests/satosa/test_base.py index fe46d59fb..713160aca 100644 --- a/tests/satosa/test_base.py +++ b/tests/satosa/test_base.py @@ -3,16 +3,11 @@ import pytest -from saml2.saml import NAMEID_FORMAT_TRANSIENT -from saml2.saml import NAMEID_FORMAT_PERSISTENT - import satosa from satosa import util from satosa.base import SATOSABase -from satosa.exception import SATOSAConfigurationError from satosa.internal import AuthenticationInformation from satosa.internal import InternalData -from satosa.micro_services import consent from satosa.satosa_config import SATOSAConfig @@ -44,20 +39,6 @@ def test_auth_resp_callback_func_user_id_from_attrs_is_used_to_override_user_id( expected_user_id = "user@example.com" assert internal_resp.subject_id == expected_user_id - def test_auth_req_callback_stores_state_for_consent(self, context, satosa_config): - base = SATOSABase(satosa_config) - - context.target_backend = satosa_config["BACKEND_MODULES"][0]["name"] - requester_name = [{"lang": "en", "text": "Test EN"}, {"lang": "sv", "text": "Test SV"}] - internal_req = InternalData( - subject_type=NAMEID_FORMAT_TRANSIENT, requester_name=requester_name, - ) - internal_req.attributes = ["attr1", "attr2"] - base._auth_req_callback_func(context, internal_req) - - assert context.state[consent.STATE_KEY]["requester_name"] == internal_req.requester_name - assert context.state[consent.STATE_KEY]["filter"] == internal_req.attributes - def test_auth_resp_callback_func_hashes_all_specified_attributes(self, context, satosa_config): satosa_config["INTERNAL_ATTRIBUTES"]["hash"] = ["user_id", "mail"] base = SATOSABase(satosa_config) From 7c7f4ff8371f2f1ff5cf60dd02290e17fa41de0e Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 4 Mar 2020 16:35:17 +0200 Subject: [PATCH 006/288] Do not filter attributes before reaching the Consent Service Signed-off-by: Ivan Kanakarakis --- src/satosa/micro_services/consent.py | 7 ++----- tests/satosa/micro_services/test_consent.py | 21 --------------------- 2 files changed, 2 insertions(+), 26 deletions(-) diff --git a/src/satosa/micro_services/consent.py b/src/satosa/micro_services/consent.py index 40e2d37a2..afad940e2 100644 --- a/src/satosa/micro_services/consent.py +++ b/src/satosa/micro_services/consent.py @@ -123,12 +123,9 @@ def process(self, context, internal_response): :return: response """ context.state[STATE_KEY] = context.state.get(STATE_KEY, {}) - consent_filter = internal_response.attributes or [] - internal_response.attributes = self._filter_attributes( - internal_response.attributes, consent_filter - ) id_hash = self._get_consent_id( - internal_response.requester, internal_response.subject_id, + internal_response.requester, + internal_response.subject_id, internal_response.attributes, ) diff --git a/tests/satosa/micro_services/test_consent.py b/tests/satosa/micro_services/test_consent.py index 6d9bf21b2..514367300 100644 --- a/tests/satosa/micro_services/test_consent.py +++ b/tests/satosa/micro_services/test_consent.py @@ -217,27 +217,6 @@ def test_filter_attributes(self): filtered_attributes = self.consent_module._filter_attributes(ATTRIBUTES, FILTER) assert Counter(filtered_attributes.keys()) == Counter(FILTER) - @responses.activate - def test_manage_consent_filters_attributes_before_send_to_consent_service(self, context, internal_request, - internal_response, - consent_verify_endpoint_regex): - approved_attributes = ["foo", "bar"] - # fake previous consent - responses.add(responses.GET, consent_verify_endpoint_regex, status=200, - body=json.dumps(approved_attributes)) - - attributes = {"foo": "123", "bar": "456", "abc": "should be filtered"} - internal_response.attributes = attributes - - context.state[consent.STATE_KEY] = {"filter": approved_attributes} - self.consent_module.process(context, internal_response) - - consent_hash = urlparse(responses.calls[0].request.url).path.split("/")[2] - expected_hash = self.consent_module._get_consent_id(internal_response.requester, internal_response.subject_id, - {k: v for k, v in attributes.items() if - k in approved_attributes}) - assert consent_hash == expected_hash - @responses.activate def test_manage_consent_without_filter_passes_through_all_attributes(self, context, internal_response, consent_verify_endpoint_regex): From 70549c7a6f0d272f64f4b525f8b84478fab11076 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 6 Mar 2020 01:49:13 +0200 Subject: [PATCH 007/288] Update doc to reference subject instead of user Signed-off-by: Ivan Kanakarakis --- doc/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/README.md b/doc/README.md index 6722fa413..29a2a9182 100644 --- a/doc/README.md +++ b/doc/README.md @@ -106,18 +106,18 @@ attributes (in the proxy backend) <-> internal <-> returned attributes (from the ### user_id_from_attrs -The user identifier generated by the backend module can be overridden by +The subject identifier generated by the backend module can be overridden by specifying a list of internal attribute names under the `user_id_from_attrs` key. The attribute values of the attributes specified in this list will be -concatenated and hashed to be used as the user identifier. +concatenated and used as the subject identifier. ### user_id_to_attr -To store the user identifier in a specific internal attribute, the internal +To store the subject identifier in a specific internal attribute, the internal attribute name can be specified in `user_id_to_attr`. When the [ALService](https://github.com/its-dirg/ALservice) is used for account linking, the `user_id_to_attr` configuration parameter should be set, since that -service will overwrite the user identifier generated by the proxy. +service will overwrite the subject identifier generated by the proxy. ### hash **DEPRECATED - use the hasher micro-service** From f5bef2d5951e9da2d4412155849a8593e30a676f Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 7 Mar 2020 16:22:27 +0200 Subject: [PATCH 008/288] Rearrange config settings - Remove CUSTOM_PLUGIN_MODULE_PATHS. This is a hack to lookup modules outside the PYTHON_PATH. It should not be used. - Group cookie-state settings - Group path settings to other/relative files; but at the same time, add new lines to make jumping between the settings eaiser. Signed-off-by: Ivan Kanakarakis --- example/proxy_conf.yaml.example | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/example/proxy_conf.yaml.example b/example/proxy_conf.yaml.example index 0289a60b5..058d8987c 100644 --- a/example/proxy_conf.yaml.example +++ b/example/proxy_conf.yaml.example @@ -1,24 +1,23 @@ -#--- SATOSA Config ---# BASE: https://example.com -INTERNAL_ATTRIBUTES: "internal_attributes.yaml" + COOKIE_STATE_NAME: "SATOSA_STATE" CONTEXT_STATE_DELETE: yes STATE_ENCRYPTION_KEY: "asdASD123" -CUSTOM_PLUGIN_MODULE_PATHS: - - "plugins/backends" - - "plugins/frontends" - - "plugins/micro_services" + +cookies_samesite_compat: + - ["SATOSA_STATE", "SATOSA_STATE_LEGACY"] + +INTERNAL_ATTRIBUTES: "internal_attributes.yaml" + BACKEND_MODULES: - "plugins/backends/saml2_backend.yaml" + FRONTEND_MODULES: - "plugins/frontends/saml2_frontend.yaml" + MICRO_SERVICES: - "plugins/microservices/static_attributes.yaml" -cookies_samesite_compat: [ - ("SATOSA_STATE", "SATOSA_STATE_LEGACY"), -] - LOGGING: version: 1 formatters: From 78dc701f95b21d531ccbc00f698d17af63083477 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 7 Mar 2020 16:26:07 +0200 Subject: [PATCH 009/288] Reset example logging configuration - Make the formatter useful - Add example handlers - Set all related packages' loggers to DEBUG - Set the root logger and handler Signed-off-by: Ivan Kanakarakis --- example/proxy_conf.yaml.example | 48 ++++++++++++++++++++++++--------- 1 file changed, 36 insertions(+), 12 deletions(-) diff --git a/example/proxy_conf.yaml.example b/example/proxy_conf.yaml.example index 058d8987c..760714b00 100644 --- a/example/proxy_conf.yaml.example +++ b/example/proxy_conf.yaml.example @@ -22,26 +22,50 @@ LOGGING: version: 1 formatters: simple: - format: "[%(asctime)-19.19s] [%(levelname)-5.5s]: %(message)s" + format: "[%(asctime)s] [%(levelname)s] [%(name)s.%(funcName)s] %(message)s" handlers: - console: + stdout: class: logging.StreamHandler + stream: "ext://sys.stdout" level: DEBUG formatter: simple - stream: ext://sys.stdout - info_file_handler: - class: logging.handlers.RotatingFileHandler - level: INFO + syslog: + class: logging.handlers.SysLogHandler + address: "/dev/log" + level: DEBUG + formatter: simple + debug_file: + class: logging.FileHandler + filename: satosa-debug.log + encoding: utf8 + level: DEBUG + formatter: simple + error_file: + class: logging.FileHandler + filename: satosa-error.log + encoding: utf8 + level: ERROR formatter: simple - filename: info.log + info_file: + class: logging.FileHandler + filename: satosa-info.log + encoding: utf8 maxBytes: 10485760 # 10MB backupCount: 20 - encoding: utf8 + level: INFO + formatter: simple loggers: satosa: level: DEBUG - handlers: [console] - propagate: no + saml2: + level: DEBUG + oidcendpoint: + level: DEBUG + pyop: + level: DEBUG + oic: + level: DEBUG root: - level: INFO - handlers: [info_file_handler] + level: DEBUG + handlers: + - stdout From a1be76265bee5f64117d5bb3c3009bc8126b31fc Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 7 Mar 2020 22:03:02 +0200 Subject: [PATCH 010/288] Set default logger Signed-off-by: Ivan Kanakarakis --- src/satosa/proxy_server.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index 66a9154c6..868ffd5b1 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -134,15 +134,25 @@ def __call__(self, environ, start_response, debug=False): def make_app(satosa_config): try: - if "LOGGING" in satosa_config: - logging.config.dictConfig(satosa_config["LOGGING"]) - else: - stderr_handler = logging.StreamHandler(sys.stderr) - stderr_handler.setLevel(logging.DEBUG) - - root_logger = logging.getLogger("") - root_logger.addHandler(stderr_handler) - root_logger.setLevel(logging.DEBUG) + default_logging_config = { + "version": 1, + "formatters": { + "simple": { + "format": "[%(asctime)s] [%(levelname)s] [%(name)s.%(funcName)s] %(message)s" + } + }, + "handlers": { + "stdout": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + "level": "DEBUG", + "formatter": "simple", + } + }, + "loggers": {"satosa": {"level": "DEBUG"}}, + "root": {"level": "DEBUG", "handlers": ["stdout"]}, + } + logging.config.dictConfig(satosa_config.get("LOGGING", default_logging_config)) logger.info("Running SATOSA version {v}".format(v=satosa.__version__)) From 7e213e88eadc811e5781e861ca3a3dd10bb500fd Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 7 Mar 2020 22:53:32 +0200 Subject: [PATCH 011/288] Add flake8 preferences Signed-off-by: Ivan Kanakarakis --- setup.cfg | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 224a77957..88673863b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,25 @@ [metadata] -description-file = README.md \ No newline at end of file +description-file = README.md + + +[flake8] +max-line-length = 88 +author-attribute = forbidden +no-accept-encodings = True +assertive-snakecase = True +# assertive-test-pattern = +inline-quotes = " +multiline-quotes = """ +docstring-quotes = """ +application-import-names = satosa + +hang_closing = false +doctests = false +max-complexity = 10 +exclude = + .git + __pycache__ + doc/source/conf.py + docs/source/conf.py + build + dist From 61ce2534e130ebc14cbd35bec80b7cab3d04d659 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 7 Mar 2020 22:56:59 +0200 Subject: [PATCH 012/288] Format code in wsgi module Signed-off-by: Ivan Kanakarakis --- src/satosa/wsgi.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/satosa/wsgi.py b/src/satosa/wsgi.py index e5e9e1948..f53751ed1 100644 --- a/src/satosa/wsgi.py +++ b/src/satosa/wsgi.py @@ -17,13 +17,14 @@ def main(): global app - parser = argparse.ArgumentParser(description='Process some integers.') - parser.add_argument('port', type=int) - parser.add_argument('--keyfile', type=str) - parser.add_argument('--certfile', type=str) - parser.add_argument('--host', type=str) - parser.add_argument('-d', action='store_true', dest="debug", - help="enable debug mode.") + parser = argparse.ArgumentParser(description="Process some integers.") + parser.add_argument("port", type=int) + parser.add_argument("--keyfile", type=str) + parser.add_argument("--certfile", type=str) + parser.add_argument("--host", type=str) + parser.add_argument( + "-d", action='store_true', dest="debug", help="enable debug mode." + ) args = parser.parse_args() if (args.keyfile and not args.certfile) or (args.certfile and not args.keyfile): @@ -45,5 +46,5 @@ def main(): run_simple('localhost', args.port, app, ssl_context=ssl_context) -if __name__ == '__main__': +if __name__ == "__main__": main() From e29c6e9434ceaa53e121ab082a8dca0eccbb444d Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 7 Mar 2020 22:58:00 +0200 Subject: [PATCH 013/288] Remove debug middleware Signed-off-by: Ivan Kanakarakis --- src/satosa/wsgi.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/satosa/wsgi.py b/src/satosa/wsgi.py index f53751ed1..502048e9a 100644 --- a/src/satosa/wsgi.py +++ b/src/satosa/wsgi.py @@ -1,9 +1,7 @@ import argparse -import functools import os import sys -from werkzeug.debug import DebuggedApplication from werkzeug.serving import run_simple from satosa.proxy_server import make_app @@ -22,18 +20,12 @@ def main(): parser.add_argument("--keyfile", type=str) parser.add_argument("--certfile", type=str) parser.add_argument("--host", type=str) - parser.add_argument( - "-d", action='store_true', dest="debug", help="enable debug mode." - ) args = parser.parse_args() if (args.keyfile and not args.certfile) or (args.certfile and not args.keyfile): print("Both keyfile and certfile must be specified for HTTPS.") sys.exit() - if args.debug: - app.app = functools.partial(app.app, debug=True) - app = DebuggedApplication(app) if (args.keyfile and args.certfile): ssl_context = (args.certfile, args.keyfile) From 702e121d225bdea96ad5df00f55b6c4310775a5d Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 7 Mar 2020 22:59:03 +0200 Subject: [PATCH 014/288] Fail with status code 1 Signed-off-by: Ivan Kanakarakis --- src/satosa/wsgi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/wsgi.py b/src/satosa/wsgi.py index 502048e9a..402fd626c 100644 --- a/src/satosa/wsgi.py +++ b/src/satosa/wsgi.py @@ -24,7 +24,7 @@ def main(): if (args.keyfile and not args.certfile) or (args.certfile and not args.keyfile): print("Both keyfile and certfile must be specified for HTTPS.") - sys.exit() + sys.exit(1) if (args.keyfile and args.certfile): From 57e2f1b0724ed7975528e8e8cf5c8582e1f27e9f Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 7 Mar 2020 22:59:28 +0200 Subject: [PATCH 015/288] Follow a single execution path Signed-off-by: Ivan Kanakarakis --- src/satosa/wsgi.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/satosa/wsgi.py b/src/satosa/wsgi.py index 402fd626c..86220eb06 100644 --- a/src/satosa/wsgi.py +++ b/src/satosa/wsgi.py @@ -26,16 +26,13 @@ def main(): print("Both keyfile and certfile must be specified for HTTPS.") sys.exit(1) - - if (args.keyfile and args.certfile): - ssl_context = (args.certfile, args.keyfile) - else: - ssl_context = None - - if args.host: - run_simple(args.host, args.port, app, ssl_context=ssl_context) - else: - run_simple('localhost', args.port, app, ssl_context=ssl_context) + ssl_context = ( + (args.certfile, args.keyfile) + if args.keyfile and args.certfile + else None + ) + host = args.host or "localhost" + run_simple(host, args.port, app, ssl_context=ssl_context) if __name__ == "__main__": From 2e5b0f8441fd709c4df240a3c41792602e3db784 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 1 Mar 2020 16:41:03 +0200 Subject: [PATCH 016/288] Remove deprecated modules and options Signed-off-by: Ivan Kanakarakis --- README.md | 21 +- doc/README.md | 8 - doc/mod_wsgi.md | 2 - src/satosa/backends/saml2.py | 1 - src/satosa/base.py | 26 +-- src/satosa/deprecated.py | 272 ------------------------- src/satosa/frontends/openid_connect.py | 1 - src/satosa/frontends/saml2.py | 2 - src/satosa/internal_data.py | 14 -- tests/satosa/test_base.py | 20 -- 10 files changed, 11 insertions(+), 356 deletions(-) delete mode 100644 src/satosa/deprecated.py delete mode 100644 src/satosa/internal_data.py diff --git a/README.md b/README.md index cfa4ae5dd..daefcd7e3 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![Build Status](https://travis-ci.org/IdentityPython/SATOSA.svg?branch=travis)](https://travis-ci.org/IdentityPython/SATOSA) [![PyPI](https://img.shields.io/pypi/v/SATOSA.svg)](https://pypi.python.org/pypi/SATOSA) -A configurable proxy for translating between different authentication protocols such as SAML2, +A configurable proxy for translating between different authentication protocols such as SAML2, OpenID Connect and OAuth2. # Table of Contents @@ -19,7 +19,6 @@ OpenID Connect and OAuth2. - [attributes](doc/README.md#attributes) - [user_id_from_attrs](doc/README.md#user_id_from_attrs) - [user_id_to_attr](doc/README.md#user_id_to_attr) - - [hash](doc/README.md#hash) - [Plugins](doc/README.md#plugins) - [SAML2 plugins](doc/README.md#saml_plugin) - [Metadata](doc/README.md#metadata) @@ -36,26 +35,26 @@ OpenID Connect and OAuth2. # Use cases -In this section a set of use cases for the proxy is presented. +In this section a set of use cases for the proxy is presented. ## SAML2<->SAML2 -There are SAML2 service providers for example Box which is not able to handle multiple identity -providers. For more information about how to set up, configure and run such a proxy instance +There are SAML2 service providers for example Box which is not able to handle multiple identity +providers. For more information about how to set up, configure and run such a proxy instance please visit [Single Service Provider<->Multiple Identity providers](doc/one-to-many.md) -If an identity provider can not communicate with service providers in for example a federation the +If an identity provider can not communicate with service providers in for example a federation the can convert request and make the communication possible. ## SAML2<->Social logins -This setup makes it possible to connect a SAML2 service provider to multiple social media identity -providers such as Google and Facebook. The proxy makes it possible to mirror a identity provider by -generating SAML2 metadata corresponding that provider and create dynamic endpoint which +This setup makes it possible to connect a SAML2 service provider to multiple social media identity +providers such as Google and Facebook. The proxy makes it possible to mirror a identity provider by +generating SAML2 metadata corresponding that provider and create dynamic endpoint which are connected to a single identity provider. -For more information about how to set up, configure and run such a proxy instance please visit +For more information about how to set up, configure and run such a proxy instance please visit [SAML2<->Social logins](doc/SAML2-to-Social_logins.md) ## SAML2<->OIDC -The proxy is able to act as a proxy between a SAML2 service provider and a OpenID connect provider +The proxy is able to act as a proxy between a SAML2 service provider and a OpenID connect provider [SAML2<->OIDC](doc/saml2-to-oidc.md) # Contact diff --git a/doc/README.md b/doc/README.md index 29a2a9182..11f12b9bd 100644 --- a/doc/README.md +++ b/doc/README.md @@ -44,7 +44,6 @@ in the [example directory](../example). | `BACKEND_MODULES` | string[] | `[openid_connect_backend.yaml, saml2_backend.yaml]` | list of plugin configuration file paths, describing enabled backends | | `FRONTEND_MODULES` | string[] | `[saml2_frontend.yaml, openid_connect_frontend.yaml]` | list of plugin configuration file paths, describing enabled frontends | | `MICRO_SERVICES` | string[] | `[statistics_service.yaml]` | list of plugin configuration file paths, describing enabled microservices | -| `USER_ID_HASH_SALT` | string | `61a89d2db0b9e1e2` | **DEPRECATED - use the hasher micro-service** salt used when creating the persistent user identifier, will be overridden by the environment variable `SATOSA_USER_ID_HASH_SALT` if it is set | | `LOGGING` | dict | see [Python logging.conf](https://docs.python.org/3/library/logging.config.html) | optional configuration of application logging | @@ -120,13 +119,6 @@ linking, the `user_id_to_attr` configuration parameter should be set, since that service will overwrite the subject identifier generated by the proxy. -### hash **DEPRECATED - use the hasher micro-service** -The proxy can hash any attribute value (e.g., for obfuscation) before passing -it on to the client. The `hash` key should contain a list of all attribute names -for which the corresponding attribute values should be hashed before being -returned to the client. - - ## Plugins The authentication protocol specific communication is handled by different plugins, divided into frontends (receiving requests from clients) and backends (sending requests diff --git a/doc/mod_wsgi.md b/doc/mod_wsgi.md index 8605c7abb..e739028dc 100644 --- a/doc/mod_wsgi.md +++ b/doc/mod_wsgi.md @@ -110,8 +110,6 @@ BASE: https://some.host.org STATE_ENCRYPTION_KEY: fazmC8yELv38f9PF0kbS -USER_ID_HASH_SALT: i7tmt34rzb2QRDgN1Ggy - INTERNAL_ATTRIBUTES: "/etc/satosa/internal_attributes.yaml" COOKIE_STATE_NAME: "SATOSA_STATE" diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 024e948d8..c2e39f17a 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -30,7 +30,6 @@ MetadataDescription, OrganizationDesc, ContactPersonDesc, UIInfoDesc ) from satosa.backends.base import BackendModule -from satosa.deprecated import SAMLInternalResponse logger = logging.getLogger(__name__) diff --git a/src/satosa/base.py b/src/satosa/base.py index ff5865ec7..d458293e1 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -4,7 +4,6 @@ import json import logging import uuid -import warnings as _warnings from saml2.s_utils import UnknownSystemEntity @@ -17,10 +16,9 @@ from .routing import ModuleRouter, SATOSANoBoundEndpointError from .state import cookie_to_state, SATOSAStateError, State, state_to_cookie -from satosa.deprecated import hash_attributes - import satosa.logging_util as lu + logger = logging.getLogger(__name__) STATE_KEY = "SATOSA_BASE" @@ -41,22 +39,6 @@ def __init__(self, config): """ self.config = config - for option in ["USER_ID_HASH_SALT"]: - if option in self.config: - msg = ( - "'{opt}' configuration option is deprecated." - " Use the hasher microservice instead." - ).format(opt=option) - _warnings.warn(msg, DeprecationWarning) - - for option in ["hash"]: - if option in self.config["INTERNAL_ATTRIBUTES"]: - msg = ( - "'{opt}' configuration option is deprecated." - " Use the hasher microservice instead." - ).format(opt=option) - _warnings.warn(msg, DeprecationWarning) - logger.info("Loading backend modules...") backends = load_backends(self.config, self._auth_resp_callback_func, self.config["INTERNAL_ATTRIBUTES"]) @@ -130,12 +112,6 @@ def _auth_resp_finish(self, context, internal_response): if user_id_to_attr: internal_response.attributes[user_id_to_attr] = [internal_response.subject_id] - hash_attributes( - self.config["INTERNAL_ATTRIBUTES"].get("hash", []), - internal_response.attributes, - self.config.get("USER_ID_HASH_SALT", ""), - ) - # remove all session state unless CONTEXT_STATE_DELETE is False context.state.delete = self.config.get("CONTEXT_STATE_DELETE", True) context.request = None diff --git a/src/satosa/deprecated.py b/src/satosa/deprecated.py deleted file mode 100644 index 2ab16c6ed..000000000 --- a/src/satosa/deprecated.py +++ /dev/null @@ -1,272 +0,0 @@ -import datetime -import warnings as _warnings -from enum import Enum - -from saml2.saml import NAMEID_FORMAT_TRANSIENT -from saml2.saml import NAMEID_FORMAT_PERSISTENT -from saml2.saml import NAMEID_FORMAT_EMAILADDRESS -from saml2.saml import NAMEID_FORMAT_UNSPECIFIED - -from satosa.internal import AuthenticationInformation as _AuthenticationInformation -from satosa.internal import InternalData as _InternalData -from satosa import util - - -class InternalRequest(_InternalData): - def __init__(self, user_id_hash_type, requester, requester_name=None): - msg = ( - "InternalRequest is deprecated." - " Use satosa.internal.InternalData class instead." - ) - _warnings.warn(msg, DeprecationWarning) - super().__init__( - user_id_hash_type=user_id_hash_type, - requester=requester, - requester_name=requester_name, - ) - - @classmethod - def from_dict(cls, data): - instance = cls( - user_id_hash_type=data.get("hash_type"), - requester=data.get("requester"), - requester_name=data.get("requester_name"), - ) - return instance - - -class InternalResponse(_InternalData): - def __init__(self, auth_info=None): - msg = ( - "InternalResponse is deprecated." - " Use satosa.internal.InternalData class instead." - ) - _warnings.warn(msg, DeprecationWarning) - auth_info = auth_info or _AuthenticationInformation() - super().__init__(auth_info=auth_info) - - @classmethod - def from_dict(cls, data): - """ - :type data: dict[str, dict[str, str] | str] - :rtype: satosa.internal_data.InternalResponse - :param data: A dict representation of an InternalResponse object - :return: An InternalResponse object - """ - auth_info = _AuthenticationInformation.from_dict(data.get("auth_info")) - instance = cls(auth_info=auth_info) - instance.user_id_hash_type = data.get("hash_type") - instance.attributes = data.get("attributes", {}) - instance.user_id = data.get("user_id") - instance.requester = data.get("requester") - return instance - - -class SAMLInternalResponse(InternalResponse): - """ - Like the parent InternalResponse, holds internal representation of - service related data, but includes additional details relevant to - SAML interoperability. - - :type name_id: instance of saml2.saml.NameID from pysaml2 - """ - - def __init__(self, auth_info=None): - msg = ( - "SAMLInternalResponse is deprecated." - " Use satosa.internal.InternalData class instead." - ) - _warnings.warn(msg, DeprecationWarning) - super().__init__(auth_info=auth_info) - - -class UserIdHashType(Enum): - """ - All different user id hash types - """ - - transient = 1 - persistent = 2 - pairwise = 3 - public = 4 - emailaddress = 5 - unspecified = 6 - - def __getattr__(self, name): - if name != "_value_": - msg = "UserIdHashType is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - return self.__getattribute__(name) - - @classmethod - def from_string(cls, str): - msg = "UserIdHashType is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - try: - return getattr(cls, str) - except AttributeError: - raise ValueError("Unknown hash type '{}'".format(str)) - - -class UserIdHasher(object): - """ - Class for creating different user id types - """ - - STATE_KEY = "IDHASHER" - - @staticmethod - def save_state(internal_request, state): - """ - Saves all necessary information needed by the UserIdHasher - - :type internal_request: satosa.internal_data.InternalRequest - - :param internal_request: The request - :param state: The current state - """ - state_data = {"hash_type": internal_request.user_id_hash_type} - state[UserIdHasher.STATE_KEY] = state_data - - @staticmethod - def hash_data(salt, value): - """ - Hashes a value together with a salt. - :type salt: str - :type value: str - :param salt: hash salt - :param value: value to hash together with the salt - :return: hash value (SHA512) - """ - msg = "UserIdHasher is deprecated; use satosa.util.hash_data instead." - _warnings.warn(msg, DeprecationWarning) - return util.hash_data(salt, value) - - @staticmethod - def hash_type(state): - state_data = state[UserIdHasher.STATE_KEY] - hash_type = state_data["hash_type"] - return hash_type - - @staticmethod - def hash_id(salt, user_id, requester, state): - """ - Sets a user id to the internal_response, - in the format specified by the internal response - - :type salt: str - :type user_id: str - :type requester: str - :type state: satosa.state.State - :rtype: str - - :param salt: A salt string for the ID hashing - :param user_id: the user id - :param user_id_hash_type: Hashing type - :param state: The current state - :return: the internal_response containing the hashed user ID - """ - hash_type_to_format = { - NAMEID_FORMAT_TRANSIENT: "{id}{req}{time}", - NAMEID_FORMAT_PERSISTENT: "{id}{req}", - "pairwise": "{id}{req}", - "public": "{id}", - NAMEID_FORMAT_EMAILADDRESS: "{id}", - NAMEID_FORMAT_UNSPECIFIED: "{id}", - } - - format_args = { - "id": user_id, - "req": requester, - "time": datetime.datetime.utcnow().timestamp(), - } - - hash_type = UserIdHasher.hash_type(state) - try: - fmt = hash_type_to_format[hash_type] - except KeyError as e: - raise ValueError("Unknown hash type: {}".format(hash_type)) from e - else: - user_id = fmt.format(**format_args) - - hasher = ( - (lambda salt, value: value) - if hash_type - in [NAMEID_FORMAT_EMAILADDRESS, NAMEID_FORMAT_UNSPECIFIED] - else util.hash_data - ) - return hasher(salt, user_id) - - -def saml_name_id_format_to_hash_type(name_format): - """ - Translate pySAML2 name format to satosa format - - :type name_format: str - :rtype: satosa.internal_data.UserIdHashType - :param name_format: SAML2 name format - :return: satosa format - """ - msg = "saml_name_id_format_to_hash_type is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - - name_id_format_to_hash_type = { - NAMEID_FORMAT_TRANSIENT: UserIdHashType.transient, - NAMEID_FORMAT_PERSISTENT: UserIdHashType.persistent, - NAMEID_FORMAT_EMAILADDRESS: UserIdHashType.emailaddress, - NAMEID_FORMAT_UNSPECIFIED: UserIdHashType.unspecified, - } - - return name_id_format_to_hash_type.get( - name_format, UserIdHashType.transient - ) - - -def hash_type_to_saml_name_id_format(hash_type): - """ - Translate satosa format to pySAML2 name format - - :type hash_type: satosa.internal_data.UserIdHashType - :rtype: str - :param hash_type: satosa format - :return: pySAML2 name format - """ - msg = "hash_type_to_saml_name_id_format is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - - hash_type_to_name_id_format = { - UserIdHashType.transient: NAMEID_FORMAT_TRANSIENT, - UserIdHashType.persistent: NAMEID_FORMAT_PERSISTENT, - UserIdHashType.emailaddress: NAMEID_FORMAT_EMAILADDRESS, - UserIdHashType.unspecified: NAMEID_FORMAT_UNSPECIFIED, - } - - return hash_type_to_name_id_format.get(hash_type, NAMEID_FORMAT_PERSISTENT) - - -def oidc_subject_type_to_hash_type(subject_type): - msg = "oidc_subject_type_to_hash_type is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - - if subject_type == "public": - return UserIdHashType.public - - return UserIdHashType.pairwise - - -def hash_attributes(hash_attributes, internal_attributes, salt): - msg = ( - "'USER_ID_HASH_SALT' configuration option is deprecated." - " 'hash' configuration option is deprecated." - " Use the hasher microservice instead." - ) - _warnings.warn(msg, DeprecationWarning) - - # Hash all attributes specified in INTERNAL_ATTRIBUTES["hash"] - for attribute in hash_attributes: - # hash all attribute values individually - if attribute in internal_attributes: - hashed_values = [ - util.hash_data(salt, v) for v in internal_attributes[attribute] - ] - internal_attributes[attribute] = hashed_values diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index e93cf4998..1e0d20793 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -29,7 +29,6 @@ import satosa.logging_util as lu from satosa.internal import InternalData -from satosa.deprecated import oidc_subject_type_to_hash_type logger = logging.getLogger(__name__) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 6ce12a476..259a1728a 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -38,8 +38,6 @@ import satosa.logging_util as lu from satosa.internal import InternalData -from satosa.deprecated import saml_name_id_format_to_hash_type -from satosa.deprecated import hash_type_to_saml_name_id_format logger = logging.getLogger(__name__) diff --git a/src/satosa/internal_data.py b/src/satosa/internal_data.py deleted file mode 100644 index 7e3a8e89e..000000000 --- a/src/satosa/internal_data.py +++ /dev/null @@ -1,14 +0,0 @@ -import warnings as _warnings - -from satosa.internal import InternalData -from satosa.internal import AuthenticationInformation -from satosa.deprecated import UserIdHashType -from satosa.deprecated import UserIdHasher -from satosa.deprecated import InternalRequest -from satosa.deprecated import InternalResponse - - -_warnings.warn( - "internal_data is deprecated; use satosa.internal instead.", - DeprecationWarning, -) diff --git a/tests/satosa/test_base.py b/tests/satosa/test_base.py index 713160aca..0f2a35f50 100644 --- a/tests/satosa/test_base.py +++ b/tests/satosa/test_base.py @@ -1,10 +1,8 @@ -import copy from unittest.mock import Mock import pytest import satosa -from satosa import util from satosa.base import SATOSABase from satosa.internal import AuthenticationInformation from satosa.internal import InternalData @@ -39,24 +37,6 @@ def test_auth_resp_callback_func_user_id_from_attrs_is_used_to_override_user_id( expected_user_id = "user@example.com" assert internal_resp.subject_id == expected_user_id - def test_auth_resp_callback_func_hashes_all_specified_attributes(self, context, satosa_config): - satosa_config["INTERNAL_ATTRIBUTES"]["hash"] = ["user_id", "mail"] - base = SATOSABase(satosa_config) - - attributes = {"user_id": ["user"], "mail": ["user@example.com", "user@otherdomain.com"]} - internal_resp = InternalData(auth_info=AuthenticationInformation("", "", "")) - internal_resp.attributes = copy.copy(attributes) - internal_resp.subject_id = "test_user" - context.state[satosa.base.STATE_KEY] = {"requester": "test_requester"} - context.state[satosa.routing.STATE_KEY] = satosa_config["FRONTEND_MODULES"][0]["name"] - - base._auth_resp_callback_func(context, internal_resp) - for attr in satosa_config["INTERNAL_ATTRIBUTES"]["hash"]: - assert internal_resp.attributes[attr] == [ - util.hash_data(satosa_config.get("USER_ID_HASH_SALT", ""), v) - for v in attributes[attr] - ] - def test_auth_resp_callback_func_respects_user_id_to_attr(self, context, satosa_config): satosa_config["INTERNAL_ATTRIBUTES"]["user_id_to_attr"] = "user_id" base = SATOSABase(satosa_config) From 0f198742c7271c48d4ff80683ae4d91006193709 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 10 Mar 2020 08:39:30 +0200 Subject: [PATCH 017/288] Fix info_file logging handler example Signed-off-by: Ivan Kanakarakis --- example/proxy_conf.yaml.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example/proxy_conf.yaml.example b/example/proxy_conf.yaml.example index 760714b00..d6937f594 100644 --- a/example/proxy_conf.yaml.example +++ b/example/proxy_conf.yaml.example @@ -47,7 +47,7 @@ LOGGING: level: ERROR formatter: simple info_file: - class: logging.FileHandler + class: logging.handlers.RotatingFileHandler filename: satosa-info.log encoding: utf8 maxBytes: 10485760 # 10MB From e48d9bb6ae633bd3c7f10b8e4bb428183e006ecc Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 13 Mar 2020 21:48:50 +0200 Subject: [PATCH 018/288] Remove deprecated internal data properties Signed-off-by: Ivan Kanakarakis --- src/satosa/internal.py | 45 +++--------------------------------------- 1 file changed, 3 insertions(+), 42 deletions(-) diff --git a/src/satosa/internal.py b/src/satosa/internal.py index 2302a3da2..38b82acfb 100644 --- a/src/satosa/internal.py +++ b/src/satosa/internal.py @@ -109,13 +109,6 @@ class InternalData(_Datafy): A base class for the data carriers between frontends/backends """ - _DEPRECATED_TO_NEW_MEMBERS = { - "name_id": "subject_id", - "user_id": "subject_id", - "user_id_hash_type": "subject_type", - "approved_attributes": "attributes", - } - def __init__( self, auth_info=None, @@ -124,10 +117,6 @@ def __init__( subject_id=None, subject_type=None, attributes=None, - user_id=None, - user_id_hash_type=None, - name_id=None, - approved_attributes=None, *args, **kwargs, ): @@ -138,10 +127,6 @@ def __init__( :param subject_id: :param subject_type: :param attributes: - :param user_id: - :param user_id_hash_type: - :param name_id: - :param approved_attributes: :type auth_info: AuthenticationInformation :type requester: str @@ -149,10 +134,6 @@ def __init__( :type subject_id: str :type subject_type: str :type attributes: dict - :type user_id: str - :type user_id_hash_type: str - :type name_id: str - :type approved_attributes: dict """ super().__init__(self, *args, **kwargs) self.auth_info = ( @@ -166,26 +147,6 @@ def __init__( if requester_name is not None else [{"text": requester, "lang": "en"}] ) - self.subject_id = ( - subject_id - if subject_id is not None - else user_id - if user_id is not None - else name_id - if name_id is not None - else None - ) - self.subject_type = ( - subject_type - if subject_type is not None - else user_id_hash_type - if user_id_hash_type is not None - else None - ) - self.attributes = ( - attributes - if attributes is not None - else approved_attributes - if approved_attributes is not None - else {} - ) + self.subject_id = subject_id + self.subject_type = subject_type + self.attributes = attributes if attributes is not None else {} From fd50aa336865060f35a577096ea1ff5a3469e98d Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 13 Mar 2020 15:37:53 +0200 Subject: [PATCH 019/288] Add KEY_METADATA_STORE to store the saml2 metadata store KEY_METADATA_STORE will hold the metadata store for saml2 backends and frontends. Accessing it that through the Context object will allow one to peek into the metadata of the corresponding backend or frontend. Previously, KEY_BACKEND_METADATA_STORE was used for this reason, but was available only on the backend. It is now a deprecated alias. Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 2 +- src/satosa/context.py | 14 ++++++++++---- src/satosa/frontends/saml2.py | 1 + 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index c2e39f17a..13349ee37 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -343,7 +343,7 @@ def authn_response(self, context, binding): logger.debug(logline) raise SATOSAAuthenticationError(context.state, "State did not match relay state") - context.decorate(Context.KEY_BACKEND_METADATA_STORE, self.sp.metadata) + context.decorate(Context.KEY_METADATA_STORE, self.sp.metadata) if self.config.get(SAMLBackend.KEY_MEMORIZE_IDP): issuer = authn_response.response.issuer.text.strip() context.state[Context.KEY_MEMORIZED_IDP] = issuer diff --git a/src/satosa/context.py b/src/satosa/context.py index 2413624d2..196cb6f4d 100644 --- a/src/satosa/context.py +++ b/src/satosa/context.py @@ -1,3 +1,5 @@ +from warnings import warn as _warn + from satosa.exception import SATOSAError @@ -12,7 +14,7 @@ class Context(object): """ Holds methods for sharing proxy data through the current request """ - KEY_BACKEND_METADATA_STORE = 'metadata_store' + KEY_METADATA_STORE = 'metadata_store' KEY_TARGET_ENTITYID = 'target_entity_id' KEY_FORCE_AUTHN = 'force_authn' KEY_MEMORIZED_IDP = 'memorized_idp' @@ -28,9 +30,13 @@ def __init__(self): self.cookie = None self.state = None - def __repr__(self): - from pprint import pformat - return pformat(vars(self)) + @property + def KEY_BACKEND_METADATA_STORE(self): + msg = "'{old_key}' is deprecated; use '{new_key}' instead.".format( + old_key="KEY_BACKEND_METADATA_STORE", new_key="KEY_METADATA_STORE" + ) + _warn(msg, DeprecationWarning) + return Context.KEY_METADATA_STORE @property def path(self): diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 259a1728a..168dddc66 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -247,6 +247,7 @@ def _handle_authn_request(self, context, binding_in, idp): idp, idp_policy, requester, context.state ) + context.decorate(Context.KEY_METADATA_STORE, self.idp.metadata) return self.auth_req_callback_func(context, internal_req) def _get_approved_attributes(self, idp, idp_policy, sp_entity_id, state): From b96f6fad5f57380689d547d274ded644bb6d6e86 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 17 Mar 2020 23:09:07 +0200 Subject: [PATCH 020/288] Remove hardcoded OS Signed-off-by: Ivan Kanakarakis --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 77214ff2c..20fc9b290 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,3 @@ -dist: xenial addons: apt: packages: From cf81b9fadee841e4aa8eec7427e1937788705835 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 17 Mar 2020 23:22:27 +0200 Subject: [PATCH 021/288] Test on python 3.8 Signed-off-by: Ivan Kanakarakis --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 20fc9b290..5552b3586 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,6 +19,7 @@ jobs: include: - python: 3.6 - python: 3.7 + - python: 3.8 - python: pypy3 - stage: Deploy latest version From f3f2ae9e30a84de4181813e31fc2e1728427f552 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 17 Mar 2020 23:22:37 +0200 Subject: [PATCH 022/288] Test on python 3.9-dev Signed-off-by: Ivan Kanakarakis --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 5552b3586..bfc190c9a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,6 +20,7 @@ jobs: - python: 3.6 - python: 3.7 - python: 3.8 + - python: 3.9-dev - python: pypy3 - stage: Deploy latest version From c6bb74a22730bbda80f4123af9b3f6a7a74eac32 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 18 Mar 2020 16:23:26 +0200 Subject: [PATCH 023/288] Replace deprecated key skip_cleanup with cleanup jobs.include.deploy: deprecated key skip_cleanup (not supported in dpl v2, use cleanup) See, https://docs.travis-ci.com/user/deployment-v2#cleaning-up-the-git-working-directory > By default your Git working directory will not be cleaned up before the deploy step, so it might contain left over artifacts from previous steps. Signed-off-by: Ivan Kanakarakis --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index bfc190c9a..47e745bb7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,7 +38,6 @@ jobs: - provider: releases api_key: secure: EOM9qDlyGQrD6NXs8KKMNr2htFXU/H47tO051aA3RKWQrEk7paLXYTDSbQiEq3W9yLg+fifDb0qVqAcFhnV4OWf5ArP++khjaiKQCHYoTaoKIRrTch+12Unq22FEgNj0SYd3HX+CKkG2WpyMoBQAiChgaouDnYIPOvCoqfCxiJzj5e/l5Qomt31smUgZSYhqeDPvX0lN6LP47OLrzsEGDvVxz/fb+EMK3mkCppgPwsB2zy849dER7ofHD6uJiYhY3jP4oCHDBv6GdzqxgMIyDD4zJYh9qCfy1kAwOwc7CYInrELk8GK+YwLFRKMXdTMHu4nYUTgTAJeiXgX6n7oEUfvj4ip+UJ2MfsLdaX7MmgRb2sVStlYjqLWgVR1sZThKmDTH1SzztmZFcNjXBg5Yvs8zPKe+955AoL/EG+pu0ZapFTIrsW7Wq7dCSiXhUkdJ3E/3RZqawqDhTHmrQEiG2j4N2B90SeK7TcXncr7TxaQMwjRpUpkDHmNQPMW3TEHyjEVlTKjzeCmvJEzu/n2oDR12kD6FL5oh4lkMIzIIQqVtp09cB9IJXEO0ww3elIbjZPhMASOocwvoFWM/m9ZTH8i2NjulWuIsnPj9AMmQ8hryR+nqSmkK942D+/9W0/ZHX4rzZ4/6hpEwAi+2+BNnS9yPk1zP4LNMy5FA4NwCV14= - skip_cleanup: true on: repo: IdentityPython/SATOSA tags: true From 22bff16fba357ee4b870b083ea2f8eefc88a63b7 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 17 Mar 2020 23:21:54 +0200 Subject: [PATCH 024/288] Separate stages for GH, PyPI and DockerHub Signed-off-by: Ivan Kanakarakis --- .travis.yml | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 47e745bb7..ab49d92fd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,8 +23,9 @@ jobs: - python: 3.9-dev - python: pypy3 - - stage: Deploy latest version + - stage: Deploy latest image on DockerHub script: skip + if: branch = master deploy: - provider: script script: scripts/travis_create_docker_image_branch.sh @@ -32,7 +33,8 @@ jobs: repo: IdentityPython/SATOSA branch: master - - stage: Deploy new release + - stage: Deploy new release on GitHub + if: tag IS present script: skip deploy: - provider: releases @@ -41,6 +43,11 @@ jobs: on: repo: IdentityPython/SATOSA tags: true + + - stage: Deploy new release on PyPI + if: tag IS present + script: skip + deploy: - provider: pypi distributions: sdist bdist_wheel user: Lundberg @@ -49,12 +56,16 @@ jobs: on: repo: IdentityPython/SATOSA tags: true + + - stage: Deploy new release on DockerHub + if: tag IS present + script: skip + deploy: - provider: script script: scripts/travis_create_docker_image_tag.sh on: repo: IdentityPython/SATOSA tags: true - if: tag IS present env: global: From 5fcbc753e54a0029ead24cd6041b490534ade042 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 18 Mar 2020 16:11:42 +0200 Subject: [PATCH 025/288] Update CI process for docker images and tags References: https://docs.travis-ci.com/user/build-stages/ https://docs.travis-ci.com/user/build-stages/share-docker-image/ https://docs.travis-ci.com/user/conditions-v1 https://docs.travis-ci.com/user/conditional-builds-stages-jobs/ Signed-off-by: Ivan Kanakarakis --- .travis.yml | 51 +++++++++++++------- scripts/travis_create_docker_image_branch.sh | 9 ---- scripts/travis_create_docker_image_tag.sh | 15 ------ 3 files changed, 33 insertions(+), 42 deletions(-) delete mode 100755 scripts/travis_create_docker_image_branch.sh delete mode 100755 scripts/travis_create_docker_image_tag.sh diff --git a/.travis.yml b/.travis.yml index ab49d92fd..c99756d06 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,15 +23,39 @@ jobs: - python: 3.9-dev - python: pypy3 - - stage: Deploy latest image on DockerHub - script: skip + - stage: Build docker image by commit and deploy on DockerHub + script: + - set -e + - docker build -f Dockerfile -t "${REPO}:${TRAVIS_COMMIT}" . + - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - docker push "$REPO" + + - stage: Tag docker image with branch + if: branch IS present + script: + - set -e + - docker pull "${REPO}:${TRAVIS_COMMIT}" + - docker tag "${REPO}:${TRAVIS_COMMIT}" "${REPO}:latest" + - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - docker push "$REPO" + + - stage: Tag docker image as latest if: branch = master - deploy: - - provider: script - script: scripts/travis_create_docker_image_branch.sh - on: - repo: IdentityPython/SATOSA - branch: master + script: + - set -e + - docker pull "${REPO}:${TRAVIS_COMMIT}" + - docker tag "${REPO}:${TRAVIS_COMMIT}" "${REPO}:latest" + - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - docker push "$REPO" + + - stage: Tag docker image with git-tag + if: tag IS present + script: + - set -e + - docker pull "${REPO}:${TRAVIS_COMMIT}" + - docker tag "${REPO}:${TRAVIS_COMMIT}" "${REPO}:${TRAVIS_TAG}" + - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - docker push "$REPO" - stage: Deploy new release on GitHub if: tag IS present @@ -57,17 +81,8 @@ jobs: repo: IdentityPython/SATOSA tags: true - - stage: Deploy new release on DockerHub - if: tag IS present - script: skip - deploy: - - provider: script - script: scripts/travis_create_docker_image_tag.sh - on: - repo: IdentityPython/SATOSA - tags: true - env: global: - secure: ymdbaVVKJFq193xn+pr7sRyjkcOBtpm6eu+A9QsdtzO6vhaj+MhFfsjWAJbGiaSvX691bLd+9kFqH76HViN1LbmkCujBm2+4k0DBSakb70T/81UNNpYGy4iIYzCKaWPPSwvFGfRjNY14RILEpOI8NCkJuDMuO7CiGkwOtmlOdP/tYdV9T3p36Hgpwa/0U5kIahqwnbBKiwjGGhI8YA4Ik01P4biEv3Fr++jS4dhzMe+hYjWDXW+bksf9OikbtJkPzHlZxCDgFH4yNY1TH6P3X/B8NLTrvpNZOj2GgQoZBDrTEM+RLdaLQ8EYcrJaEaOZs65Jicpw5Ycz8DHUuBXwlSiG1g/VJlzxYchGxnLguVyEELEm7p7vhDFYNOROL3J4PpY8E1+L834xzmhCqbHM2kHB2WeiIob0j1Hq7U1802tFuM+tu8P4gdEyGxstQaIehiTI/VQEJm+sKB1W5xtDQokrnMyiQfJy4K7T4ZrONV/gVhb85ayS6eF/Xu1vr/5s/fWyQOxNKvoeEiO6VVoLTWNPEysTewLFc8o7HcE/Qnv/67IwuK/vx0ZlESbNCRgTfqyWpn5vybyWmgo9aUC51hDiVQtZfVeaoF/Xtg2yxVn/4C1aPybpA2Oacll8LjyYwyoCeH3naD0j9Msy4izny2PF7MTT4iNbtwhRoAAqXic= - secure: loJ+Bfind3tbEVrWqEalZT5bMqGFrMewo3jDwH9iJEw28tl+PasTCvCOJRsOomtdMp2QZh8e5wwnL1m7mkHWZaBDMxAg2mXlEv2W817SyAKkgFVnjXr8FJK4kjGAA5l2WXWKo7HKs2lOygZaDxj67i4htvg6cIxVf3dnI+MHpN5CONBfF6cXkFGMZoW+uc2diApyvIVCzte0JZkp6ZepWiyjelPl38pgWlD9elJEUaut0qKGZHtsRnLgTOzbBl49FV4lzCqt7wBnnwwQpTtvEyRW47O/VMYORAFFXpgUDPejE37+bf1wS6hlr0vSHFSUKILQWUH0l09+BPrxpoRj5SYkFD18xvqlWDNrNoANSMgRm/8cL1ucd7T5N03lKtNpaKT2ejHPj6Hu86mXFvcxcZnIcH7ppmXjZU2xfI2ytmmqxXysYeiCc6RgClmFBf3lnZz7iaHVrL8tU1x+eDzEQKvDbYHQnO9+4xXY37PH4ViJJEDoLq3NGhKxbDJ4oMgtz0mrjdWm8a1nWXIm8QTs2+oIhf+HrCpdqE8FfKnI7OyM8C+cwraApY77cZ9xfBqJGDQIgX3c+syB1ufVxY/DPDOXTysRUUHyWVgJeaL8EJEiMVnZMoGliY7QtnBznOglxynekIIaaZ5FMfh8hwA0pQ5idruqrtzVkBQoq8CdHfk= + - REPO: "satosa/satosa" diff --git a/scripts/travis_create_docker_image_branch.sh b/scripts/travis_create_docker_image_branch.sh deleted file mode 100755 index 13da052b4..000000000 --- a/scripts/travis_create_docker_image_branch.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -set -e - -docker login -u $DOCKER_USER -p $DOCKER_PASS -export REPO=satosa/satosa -export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi` -docker build -f Dockerfile -t $REPO:$TAG . -docker push $REPO diff --git a/scripts/travis_create_docker_image_tag.sh b/scripts/travis_create_docker_image_tag.sh deleted file mode 100755 index 504d2d41f..000000000 --- a/scripts/travis_create_docker_image_tag.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -set -e - -# Travis does not know which branch the repo is on when building a tag -# Make sure to only call this script when building tags - -docker login -u $DOCKER_USER -p $DOCKER_PASS -export REPO=satosa/satosa -export TAG=latest -docker build -f Dockerfile -t $REPO:$TAG . -if [ -n "$TRAVIS_TAG" ]; then - docker tag $REPO:$TAG $REPO:$TRAVIS_TAG -fi -docker push $REPO From e073d1ad2229ceb114055b04e8643217dff83535 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 20 Mar 2020 19:34:51 +0200 Subject: [PATCH 026/288] Remove unused start_proxy script Signed-off-by: Ivan Kanakarakis --- scripts/start_proxy.py | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 scripts/start_proxy.py diff --git a/scripts/start_proxy.py b/scripts/start_proxy.py deleted file mode 100644 index 1d9af1162..000000000 --- a/scripts/start_proxy.py +++ /dev/null @@ -1,12 +0,0 @@ -import re -import sys - -from gunicorn.app.wsgiapp import run - -print('\n'.join(sys.path)) -# use this entrypoint to start the proxy from the IDE - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(run()) - From 6fcba5220074a75ac076f4d3c4f44e711547f9d4 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 20 Mar 2020 19:27:41 +0200 Subject: [PATCH 027/288] Add stage to display env-var information References: https://docs.travis-ci.com/user/environment-variables/ https://config.travis-ci.com/ref/env Signed-off-by: Ivan Kanakarakis --- .travis.yml | 45 +++++++++++++++++++++++++++++++++------------ 1 file changed, 33 insertions(+), 12 deletions(-) diff --git a/.travis.yml b/.travis.yml index c99756d06..31685b2d7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,39 +23,60 @@ jobs: - python: 3.9-dev - python: pypy3 + - stage: Expose env-var information + script: | + cat < Date: Wed, 18 Mar 2020 19:48:51 +0200 Subject: [PATCH 028/288] Combine env-vars and use them References: https://docs.travis-ci.com/user/environment-variables/ https://config.travis-ci.com/ref/env Signed-off-by: Ivan Kanakarakis --- .travis.yml | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/.travis.yml b/.travis.yml index 31685b2d7..0a8b8bcc4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -42,39 +42,43 @@ jobs: TRAVIS_PULL_REQUEST_SLUG: $TRAVIS_PULL_REQUEST_SLUG DOCKER_REPO: $DOCKER_REPO + DOCKER_TAG_COMMIT: $DOCKER_TAG_COMMIT + DOCKER_TAG_BRANCH: $DOCKER_TAG_BRANCH + DOCKER_TAG_GITTAG: $DOCKER_TAG_GITTAG + DOCKER_TAG_LATEST: $DOCKER_TAG_LATEST EOF - stage: Build docker image by commit and deploy on DockerHub script: - set -e - - docker build -f Dockerfile -t "${DOCKER_REPO}:${TRAVIS_COMMIT}" . + - docker build -f Dockerfile -t "$DOCKER_TAG_COMMIT" . - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" - docker push "$DOCKER_REPO" - - stage: Tag docker image with branch + - stage: Tag docker image with branch name if: branch IS present script: - set -e - - docker pull "${DOCKER_REPO}:${TRAVIS_COMMIT}" - - docker tag "${DOCKER_REPO}:${TRAVIS_COMMIT}" "${DOCKER_REPO}:latest" + - docker pull "$DOCKER_TAG_COMMIT" + - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_BRANCH" - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" - docker push "$DOCKER_REPO" - - stage: Tag docker image as latest - if: branch = master + - stage: Tag docker image with git-tag + if: tag IS present script: - set -e - - docker pull "${DOCKER_REPO}:${TRAVIS_COMMIT}" - - docker tag "${DOCKER_REPO}:${TRAVIS_COMMIT}" "${DOCKER_REPO}:latest" + - docker pull "$DOCKER_TAG_COMMIT" + - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_GITTAG" - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" - docker push "$DOCKER_REPO" - - stage: Tag docker image with git-tag - if: tag IS present + - stage: Tag docker image as latest + if: branch = master script: - set -e - - docker pull "${DOCKER_REPO}:${TRAVIS_COMMIT}" - - docker tag "${DOCKER_REPO}:${TRAVIS_COMMIT}" "${DOCKER_REPO}:${TRAVIS_TAG}" + - docker pull "$DOCKER_TAG_COMMIT" + - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_LATEST" - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" - docker push "$DOCKER_REPO" @@ -107,3 +111,7 @@ env: - secure: ymdbaVVKJFq193xn+pr7sRyjkcOBtpm6eu+A9QsdtzO6vhaj+MhFfsjWAJbGiaSvX691bLd+9kFqH76HViN1LbmkCujBm2+4k0DBSakb70T/81UNNpYGy4iIYzCKaWPPSwvFGfRjNY14RILEpOI8NCkJuDMuO7CiGkwOtmlOdP/tYdV9T3p36Hgpwa/0U5kIahqwnbBKiwjGGhI8YA4Ik01P4biEv3Fr++jS4dhzMe+hYjWDXW+bksf9OikbtJkPzHlZxCDgFH4yNY1TH6P3X/B8NLTrvpNZOj2GgQoZBDrTEM+RLdaLQ8EYcrJaEaOZs65Jicpw5Ycz8DHUuBXwlSiG1g/VJlzxYchGxnLguVyEELEm7p7vhDFYNOROL3J4PpY8E1+L834xzmhCqbHM2kHB2WeiIob0j1Hq7U1802tFuM+tu8P4gdEyGxstQaIehiTI/VQEJm+sKB1W5xtDQokrnMyiQfJy4K7T4ZrONV/gVhb85ayS6eF/Xu1vr/5s/fWyQOxNKvoeEiO6VVoLTWNPEysTewLFc8o7HcE/Qnv/67IwuK/vx0ZlESbNCRgTfqyWpn5vybyWmgo9aUC51hDiVQtZfVeaoF/Xtg2yxVn/4C1aPybpA2Oacll8LjyYwyoCeH3naD0j9Msy4izny2PF7MTT4iNbtwhRoAAqXic= - secure: loJ+Bfind3tbEVrWqEalZT5bMqGFrMewo3jDwH9iJEw28tl+PasTCvCOJRsOomtdMp2QZh8e5wwnL1m7mkHWZaBDMxAg2mXlEv2W817SyAKkgFVnjXr8FJK4kjGAA5l2WXWKo7HKs2lOygZaDxj67i4htvg6cIxVf3dnI+MHpN5CONBfF6cXkFGMZoW+uc2diApyvIVCzte0JZkp6ZepWiyjelPl38pgWlD9elJEUaut0qKGZHtsRnLgTOzbBl49FV4lzCqt7wBnnwwQpTtvEyRW47O/VMYORAFFXpgUDPejE37+bf1wS6hlr0vSHFSUKILQWUH0l09+BPrxpoRj5SYkFD18xvqlWDNrNoANSMgRm/8cL1ucd7T5N03lKtNpaKT2ejHPj6Hu86mXFvcxcZnIcH7ppmXjZU2xfI2ytmmqxXysYeiCc6RgClmFBf3lnZz7iaHVrL8tU1x+eDzEQKvDbYHQnO9+4xXY37PH4ViJJEDoLq3NGhKxbDJ4oMgtz0mrjdWm8a1nWXIm8QTs2+oIhf+HrCpdqE8FfKnI7OyM8C+cwraApY77cZ9xfBqJGDQIgX3c+syB1ufVxY/DPDOXTysRUUHyWVgJeaL8EJEiMVnZMoGliY7QtnBznOglxynekIIaaZ5FMfh8hwA0pQ5idruqrtzVkBQoq8CdHfk= - DOCKER_REPO: "satosa/satosa" + - DOCKER_TAG_COMMIT: "${DOCKER_REPO}:${TRAVIS_COMMIT}" + - DOCKER_TAG_BRANCH: "${DOCKER_REPO}:${TRAVIS_BRANCH}" + - DOCKER_TAG_GITTAG: "${DOCKER_REPO}:${TRAVIS_TAG:-NO_TAG}" + - DOCKER_TAG_LATEST: "${DOCKER_REPO}:latest" From be62144cc2f9713c98ef3110af7e8e6a426c50c7 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 19 Mar 2020 14:21:17 +0200 Subject: [PATCH 029/288] Tag by pull request number using the PR prefix Signed-off-by: Ivan Kanakarakis --- .travis.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.travis.yml b/.travis.yml index 0a8b8bcc4..898449c55 100644 --- a/.travis.yml +++ b/.travis.yml @@ -44,6 +44,7 @@ jobs: DOCKER_REPO: $DOCKER_REPO DOCKER_TAG_COMMIT: $DOCKER_TAG_COMMIT DOCKER_TAG_BRANCH: $DOCKER_TAG_BRANCH + DOCKER_TAG_PR_NUM: $DOCKER_TAG_PR_NUM DOCKER_TAG_GITTAG: $DOCKER_TAG_GITTAG DOCKER_TAG_LATEST: $DOCKER_TAG_LATEST EOF @@ -64,6 +65,15 @@ jobs: - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" - docker push "$DOCKER_REPO" + - stage: Tag docker image with pull request number + if: type = pull_request + script: + - set -e + - docker pull "$DOCKER_TAG_COMMIT" + - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_NUM" + - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - docker push "$DOCKER_REPO" + - stage: Tag docker image with git-tag if: tag IS present script: @@ -113,5 +123,6 @@ env: - DOCKER_REPO: "satosa/satosa" - DOCKER_TAG_COMMIT: "${DOCKER_REPO}:${TRAVIS_COMMIT}" - DOCKER_TAG_BRANCH: "${DOCKER_REPO}:${TRAVIS_BRANCH}" + - DOCKER_TAG_PR_NUM: "${DOCKER_REPO}:PR${TRAVIS_PULL_REQUEST}" - DOCKER_TAG_GITTAG: "${DOCKER_REPO}:${TRAVIS_TAG:-NO_TAG}" - DOCKER_TAG_LATEST: "${DOCKER_REPO}:latest" From 43541de63e2cd96ff0fd01cf9d35f6b762a60270 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 20 Mar 2020 20:21:25 +0200 Subject: [PATCH 030/288] Change docker login command to read password from stdin Signed-off-by: Ivan Kanakarakis --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 898449c55..64135a283 100644 --- a/.travis.yml +++ b/.travis.yml @@ -53,7 +53,7 @@ jobs: script: - set -e - docker build -f Dockerfile -t "$DOCKER_TAG_COMMIT" . - - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_REPO" - stage: Tag docker image with branch name @@ -62,7 +62,7 @@ jobs: - set -e - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_BRANCH" - - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_REPO" - stage: Tag docker image with pull request number @@ -71,7 +71,7 @@ jobs: - set -e - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_NUM" - - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_REPO" - stage: Tag docker image with git-tag @@ -80,7 +80,7 @@ jobs: - set -e - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_GITTAG" - - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_REPO" - stage: Tag docker image as latest @@ -89,7 +89,7 @@ jobs: - set -e - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_LATEST" - - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASS" + - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_REPO" - stage: Deploy new release on GitHub From a860cebd1fcb7c7389f0f808c6a05c96b2ba8c05 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 20 Mar 2020 21:04:12 +0200 Subject: [PATCH 031/288] Remove secure vars Signed-off-by: Ivan Kanakarakis --- .travis.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 64135a283..e45a8b2bd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -118,8 +118,6 @@ jobs: env: global: - - secure: ymdbaVVKJFq193xn+pr7sRyjkcOBtpm6eu+A9QsdtzO6vhaj+MhFfsjWAJbGiaSvX691bLd+9kFqH76HViN1LbmkCujBm2+4k0DBSakb70T/81UNNpYGy4iIYzCKaWPPSwvFGfRjNY14RILEpOI8NCkJuDMuO7CiGkwOtmlOdP/tYdV9T3p36Hgpwa/0U5kIahqwnbBKiwjGGhI8YA4Ik01P4biEv3Fr++jS4dhzMe+hYjWDXW+bksf9OikbtJkPzHlZxCDgFH4yNY1TH6P3X/B8NLTrvpNZOj2GgQoZBDrTEM+RLdaLQ8EYcrJaEaOZs65Jicpw5Ycz8DHUuBXwlSiG1g/VJlzxYchGxnLguVyEELEm7p7vhDFYNOROL3J4PpY8E1+L834xzmhCqbHM2kHB2WeiIob0j1Hq7U1802tFuM+tu8P4gdEyGxstQaIehiTI/VQEJm+sKB1W5xtDQokrnMyiQfJy4K7T4ZrONV/gVhb85ayS6eF/Xu1vr/5s/fWyQOxNKvoeEiO6VVoLTWNPEysTewLFc8o7HcE/Qnv/67IwuK/vx0ZlESbNCRgTfqyWpn5vybyWmgo9aUC51hDiVQtZfVeaoF/Xtg2yxVn/4C1aPybpA2Oacll8LjyYwyoCeH3naD0j9Msy4izny2PF7MTT4iNbtwhRoAAqXic= - - secure: loJ+Bfind3tbEVrWqEalZT5bMqGFrMewo3jDwH9iJEw28tl+PasTCvCOJRsOomtdMp2QZh8e5wwnL1m7mkHWZaBDMxAg2mXlEv2W817SyAKkgFVnjXr8FJK4kjGAA5l2WXWKo7HKs2lOygZaDxj67i4htvg6cIxVf3dnI+MHpN5CONBfF6cXkFGMZoW+uc2diApyvIVCzte0JZkp6ZepWiyjelPl38pgWlD9elJEUaut0qKGZHtsRnLgTOzbBl49FV4lzCqt7wBnnwwQpTtvEyRW47O/VMYORAFFXpgUDPejE37+bf1wS6hlr0vSHFSUKILQWUH0l09+BPrxpoRj5SYkFD18xvqlWDNrNoANSMgRm/8cL1ucd7T5N03lKtNpaKT2ejHPj6Hu86mXFvcxcZnIcH7ppmXjZU2xfI2ytmmqxXysYeiCc6RgClmFBf3lnZz7iaHVrL8tU1x+eDzEQKvDbYHQnO9+4xXY37PH4ViJJEDoLq3NGhKxbDJ4oMgtz0mrjdWm8a1nWXIm8QTs2+oIhf+HrCpdqE8FfKnI7OyM8C+cwraApY77cZ9xfBqJGDQIgX3c+syB1ufVxY/DPDOXTysRUUHyWVgJeaL8EJEiMVnZMoGliY7QtnBznOglxynekIIaaZ5FMfh8hwA0pQ5idruqrtzVkBQoq8CdHfk= - DOCKER_REPO: "satosa/satosa" - DOCKER_TAG_COMMIT: "${DOCKER_REPO}:${TRAVIS_COMMIT}" - DOCKER_TAG_BRANCH: "${DOCKER_REPO}:${TRAVIS_BRANCH}" From 89a2e734a50a37bcd6f5e24aaaeb648b3614fb28 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 20 Mar 2020 21:49:04 +0200 Subject: [PATCH 032/288] Set docker repository to idpy/satosa Signed-off-by: Ivan Kanakarakis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e45a8b2bd..ce110b9a0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -118,7 +118,7 @@ jobs: env: global: - - DOCKER_REPO: "satosa/satosa" + - DOCKER_REPO: "idpy/satosa" - DOCKER_TAG_COMMIT: "${DOCKER_REPO}:${TRAVIS_COMMIT}" - DOCKER_TAG_BRANCH: "${DOCKER_REPO}:${TRAVIS_BRANCH}" - DOCKER_TAG_PR_NUM: "${DOCKER_REPO}:PR${TRAVIS_PULL_REQUEST}" From 16895f84eb30b815c6d943bb01241d9f8c0cb3cb Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 23 Mar 2020 17:06:23 +0200 Subject: [PATCH 033/288] Revert "Set docker repository to idpy/satosa" Push to satosa/satosa. idpy/satosa may be removed or turned into a collection of other repos like a mirror. This reverts commit 89a2e734a50a37bcd6f5e24aaaeb648b3614fb28. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index ce110b9a0..e45a8b2bd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -118,7 +118,7 @@ jobs: env: global: - - DOCKER_REPO: "idpy/satosa" + - DOCKER_REPO: "satosa/satosa" - DOCKER_TAG_COMMIT: "${DOCKER_REPO}:${TRAVIS_COMMIT}" - DOCKER_TAG_BRANCH: "${DOCKER_REPO}:${TRAVIS_BRANCH}" - DOCKER_TAG_PR_NUM: "${DOCKER_REPO}:PR${TRAVIS_PULL_REQUEST}" From dbb10c03e642ec5a87a120554530f67bd3ed182d Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 23 Mar 2020 17:07:57 +0200 Subject: [PATCH 034/288] Push docker images specifying their tag Signed-off-by: Ivan Kanakarakis --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index e45a8b2bd..25f1ba472 100644 --- a/.travis.yml +++ b/.travis.yml @@ -54,7 +54,7 @@ jobs: - set -e - docker build -f Dockerfile -t "$DOCKER_TAG_COMMIT" . - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - - docker push "$DOCKER_REPO" + - docker push "$DOCKER_TAG_COMMIT" - stage: Tag docker image with branch name if: branch IS present @@ -63,7 +63,7 @@ jobs: - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_BRANCH" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - - docker push "$DOCKER_REPO" + - docker push "$DOCKER_TAG_BRANCH" - stage: Tag docker image with pull request number if: type = pull_request @@ -72,7 +72,7 @@ jobs: - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_NUM" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - - docker push "$DOCKER_REPO" + - docker push "$DOCKER_TAG_PR_NUM" - stage: Tag docker image with git-tag if: tag IS present @@ -81,7 +81,7 @@ jobs: - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_GITTAG" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - - docker push "$DOCKER_REPO" + - docker push "$DOCKER_TAG_GITTAG" - stage: Tag docker image as latest if: branch = master @@ -90,7 +90,7 @@ jobs: - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_LATEST" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - - docker push "$DOCKER_REPO" + - docker push "$DOCKER_TAG_LATEST" - stage: Deploy new release on GitHub if: tag IS present From 05558d496b37e5168fa4c115a7a666f97bec40be Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 23 Mar 2020 17:47:00 +0200 Subject: [PATCH 035/288] Add restrictions by setting conditions on docker builds Signed-off-by: Ivan Kanakarakis --- .travis.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 25f1ba472..304a9ae85 100644 --- a/.travis.yml +++ b/.travis.yml @@ -57,7 +57,7 @@ jobs: - docker push "$DOCKER_TAG_COMMIT" - stage: Tag docker image with branch name - if: branch IS present + if: type = push AND branch IS present script: - set -e - docker pull "$DOCKER_TAG_COMMIT" @@ -75,7 +75,7 @@ jobs: - docker push "$DOCKER_TAG_PR_NUM" - stage: Tag docker image with git-tag - if: tag IS present + if: type = push AND tag IS present script: - set -e - docker pull "$DOCKER_TAG_COMMIT" @@ -84,7 +84,7 @@ jobs: - docker push "$DOCKER_TAG_GITTAG" - stage: Tag docker image as latest - if: branch = master + if: type = push AND branch = master script: - set -e - docker pull "$DOCKER_TAG_COMMIT" @@ -93,7 +93,7 @@ jobs: - docker push "$DOCKER_TAG_LATEST" - stage: Deploy new release on GitHub - if: tag IS present + if: type = push AND branch = master AND tag IS present script: skip deploy: - provider: releases @@ -104,7 +104,7 @@ jobs: tags: true - stage: Deploy new release on PyPI - if: tag IS present + if: type = push AND branch = master AND tag IS present script: skip deploy: - provider: pypi From 6eb73036e9ea354e0b712048eb3273f37ef149d6 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 23 Mar 2020 17:47:20 +0200 Subject: [PATCH 036/288] Tag docker images by pull-request branch name Signed-off-by: Ivan Kanakarakis --- .travis.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.travis.yml b/.travis.yml index 304a9ae85..c2467c038 100644 --- a/.travis.yml +++ b/.travis.yml @@ -44,6 +44,7 @@ jobs: DOCKER_REPO: $DOCKER_REPO DOCKER_TAG_COMMIT: $DOCKER_TAG_COMMIT DOCKER_TAG_BRANCH: $DOCKER_TAG_BRANCH + DOCKER_TAG_PR_BRANCH: $DOCKER_TAG_PR_BRANCH DOCKER_TAG_PR_NUM: $DOCKER_TAG_PR_NUM DOCKER_TAG_GITTAG: $DOCKER_TAG_GITTAG DOCKER_TAG_LATEST: $DOCKER_TAG_LATEST @@ -65,6 +66,15 @@ jobs: - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_TAG_BRANCH" + - stage: Tag docker image with pull-request branch name + if: type = pull_request AND head_branch IS present + script: + - set -e + - docker pull "$DOCKER_TAG_COMMIT" + - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_BRANCH" + - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin + - docker push "$DOCKER_TAG_PR_BRANCH" + - stage: Tag docker image with pull request number if: type = pull_request script: @@ -121,6 +131,7 @@ env: - DOCKER_REPO: "satosa/satosa" - DOCKER_TAG_COMMIT: "${DOCKER_REPO}:${TRAVIS_COMMIT}" - DOCKER_TAG_BRANCH: "${DOCKER_REPO}:${TRAVIS_BRANCH}" + - DOCKER_TAG_PR_BRANCH: "${DOCKER_REPO}:PR${TRAVIS_PULL_REQUEST_BRANCH}" - DOCKER_TAG_PR_NUM: "${DOCKER_REPO}:PR${TRAVIS_PULL_REQUEST}" - DOCKER_TAG_GITTAG: "${DOCKER_REPO}:${TRAVIS_TAG:-NO_TAG}" - DOCKER_TAG_LATEST: "${DOCKER_REPO}:latest" From 0cef2eb28e62a26340725524b9698931fdb01cf4 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 23 Mar 2020 17:51:01 +0200 Subject: [PATCH 037/288] Fix stage name Signed-off-by: Ivan Kanakarakis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c2467c038..1a9ddf3f2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -75,7 +75,7 @@ jobs: - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_TAG_PR_BRANCH" - - stage: Tag docker image with pull request number + - stage: Tag docker image with pull-request number if: type = pull_request script: - set -e From bd9f71b16dd23082e185b1927e7878eba62b94fc Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 23 Mar 2020 18:14:11 +0200 Subject: [PATCH 038/288] Expose more env-vars Signed-off-by: Ivan Kanakarakis --- .travis.yml | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 1a9ddf3f2..6e8d3cf3e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,15 +27,27 @@ jobs: script: | cat < Date: Mon, 23 Mar 2020 20:09:37 +0200 Subject: [PATCH 039/288] Tag pull-request docker-image only when secure env-vars are present Signed-off-by: Ivan Kanakarakis --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 6e8d3cf3e..70c8f4f02 100644 --- a/.travis.yml +++ b/.travis.yml @@ -82,6 +82,7 @@ jobs: if: type = pull_request AND head_branch IS present script: - set -e + - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_BRANCH" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -91,6 +92,7 @@ jobs: if: type = pull_request script: - set -e + - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_NUM" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin From 753c59f113d5f4f45bcbd83f6488149f3f1b64dc Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 24 Mar 2020 18:18:38 +0200 Subject: [PATCH 040/288] Do not run the install step on build and deploy stages Signed-off-by: Ivan Kanakarakis --- .travis.yml | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 70c8f4f02..65a8d75e8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,13 +1,13 @@ -addons: - apt: - packages: - - xmlsec1 services: - docker - mongodb language: python + +before_install: + - sudo apt-get install -y xmlsec1 + install: - pip install tox - pip install tox-travis @@ -63,6 +63,8 @@ jobs: EOF - stage: Build docker image by commit and deploy on DockerHub + before_install: skip + install: skip script: - set -e - docker build -f Dockerfile -t "$DOCKER_TAG_COMMIT" . @@ -71,6 +73,8 @@ jobs: - stage: Tag docker image with branch name if: type = push AND branch IS present + before_install: skip + install: skip script: - set -e - docker pull "$DOCKER_TAG_COMMIT" @@ -80,6 +84,8 @@ jobs: - stage: Tag docker image with pull-request branch name if: type = pull_request AND head_branch IS present + before_install: skip + install: skip script: - set -e - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 @@ -90,6 +96,8 @@ jobs: - stage: Tag docker image with pull-request number if: type = pull_request + before_install: skip + install: skip script: - set -e - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 @@ -100,6 +108,8 @@ jobs: - stage: Tag docker image with git-tag if: type = push AND tag IS present + before_install: skip + install: skip script: - set -e - docker pull "$DOCKER_TAG_COMMIT" @@ -109,6 +119,8 @@ jobs: - stage: Tag docker image as latest if: type = push AND branch = master + before_install: skip + install: skip script: - set -e - docker pull "$DOCKER_TAG_COMMIT" @@ -118,6 +130,8 @@ jobs: - stage: Deploy new release on GitHub if: type = push AND branch = master AND tag IS present + before_install: skip + install: skip script: skip deploy: - provider: releases @@ -129,6 +143,8 @@ jobs: - stage: Deploy new release on PyPI if: type = push AND branch = master AND tag IS present + before_install: skip + install: skip script: skip deploy: - provider: pypi From 94f5653c491ae4342339878f015eb8fdb869cf74 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 24 Mar 2020 18:51:24 +0200 Subject: [PATCH 041/288] Add assumed and unaliased keys Signed-off-by: Ivan Kanakarakis --- .travis.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 65a8d75e8..185d38196 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,10 +1,11 @@ +os: linux +dist: xenial +language: python services: - docker - mongodb -language: python - before_install: - sudo apt-get install -y xmlsec1 @@ -135,7 +136,7 @@ jobs: script: skip deploy: - provider: releases - api_key: + token: secure: EOM9qDlyGQrD6NXs8KKMNr2htFXU/H47tO051aA3RKWQrEk7paLXYTDSbQiEq3W9yLg+fifDb0qVqAcFhnV4OWf5ArP++khjaiKQCHYoTaoKIRrTch+12Unq22FEgNj0SYd3HX+CKkG2WpyMoBQAiChgaouDnYIPOvCoqfCxiJzj5e/l5Qomt31smUgZSYhqeDPvX0lN6LP47OLrzsEGDvVxz/fb+EMK3mkCppgPwsB2zy849dER7ofHD6uJiYhY3jP4oCHDBv6GdzqxgMIyDD4zJYh9qCfy1kAwOwc7CYInrELk8GK+YwLFRKMXdTMHu4nYUTgTAJeiXgX6n7oEUfvj4ip+UJ2MfsLdaX7MmgRb2sVStlYjqLWgVR1sZThKmDTH1SzztmZFcNjXBg5Yvs8zPKe+955AoL/EG+pu0ZapFTIrsW7Wq7dCSiXhUkdJ3E/3RZqawqDhTHmrQEiG2j4N2B90SeK7TcXncr7TxaQMwjRpUpkDHmNQPMW3TEHyjEVlTKjzeCmvJEzu/n2oDR12kD6FL5oh4lkMIzIIQqVtp09cB9IJXEO0ww3elIbjZPhMASOocwvoFWM/m9ZTH8i2NjulWuIsnPj9AMmQ8hryR+nqSmkK942D+/9W0/ZHX4rzZ4/6hpEwAi+2+BNnS9yPk1zP4LNMy5FA4NwCV14= on: repo: IdentityPython/SATOSA @@ -149,7 +150,7 @@ jobs: deploy: - provider: pypi distributions: sdist bdist_wheel - user: Lundberg + username: Lundberg password: secure: NwkpOakaeJjErjTF4Y5MWeHzMvkxYZqrBFdRkzfenVfkWsomuyy553A691d3lc1+oREsh1fJJLjpZQYxTLUFIHOUmt/9zr02rFfguzj7hEYfWF8wHBXG6YSWv6T3aCA4RTMXvvzv9cHf1zfxh0fS7kgc+NRMAnd01diVLfYpBciLgmQ31J4mlwShp8yBQUoRBIvzSdzrgjr0TzCQZXB9xM6R2t/oJgXLo6Zz8dTzqq3De9nOU/1P2ZHLxodDikuFdu2/0CjoDgFXB0KnGKGKmJ6G1WMCVvi7abY7smmGA3s4a4NVL7Cirx6VwIj79PsAcgupr2iBAQk/GsPffzdpLtIrBek9u//p84hxrj/IaJWgPOeKeD7+r2Kc2g0r2dQjaM+9MqBx9/lC57xJRX/JHLQWirXfCucB9YyPun5I13Sf3hArkssQy/Jvd2aLFZ885BTfow6TAwl1ud+UPeauvEj6myKO98sko/3Y521EGXRofLGaPokLyPjI/3I4N4jCvw8m86eZAjjIhPFL7JKHf8OVc5gQCYQy3kxiF5wyvbfOeMBp0sk9UvJOrWvBEXFrimAZPu8o8T5WtlQAV02q7rxUwhMd+fpnbGewsl7Ob6eE4rGVrfWQIb86wOHbbJk3lCwPytjEFEI2bdUfRUFcrWxhC040hRP0gzVKLa+nBHM= on: From cb638d8188ab3105be045b9987128ffdfd5bd021 Mon Sep 17 00:00:00 2001 From: sebulibah Date: Thu, 19 Dec 2019 12:05:17 +0000 Subject: [PATCH 042/288] Improve logging - satosa.micro_services.ldap_attribute_store Signed-off-by: Ivan Kanakarakis --- .../micro_services/ldap_attribute_store.py | 98 +++++++++++-------- 1 file changed, 56 insertions(+), 42 deletions(-) diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index 624357081..f947ff451 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -15,11 +15,10 @@ from ldap3.core.exceptions import LDAPException from satosa.exception import SATOSAError -from satosa.logging_util import satosa_logging from satosa.micro_services.base import ResponseMicroService from satosa.response import Redirect - +import satosa.logging_util as lu logger = logging.getLogger(__name__) KEY_FOUND_LDAP_RECORD = "ldap_attribute_store_found_record" @@ -66,7 +65,7 @@ def __init__(self, config, *args, **kwargs): if "default" in config and "" in config: msg = """Use either 'default' or "" in config but not both""" - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) if "" in config: @@ -74,7 +73,7 @@ def __init__(self, config, *args, **kwargs): if "default" not in config: msg = "No default configuration is present" - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) self.config = {} @@ -88,7 +87,7 @@ def __init__(self, config, *args, **kwargs): for sp in sp_list: if not isinstance(config[sp], dict): msg = "Configuration value for {} must be a dictionary" - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) # Initialize configuration using module defaults then update @@ -111,14 +110,14 @@ def __init__(self, config, *args, **kwargs): if connection_params in connections: sp_config["connection"] = connections[connection_params] msg = "Reusing LDAP connection for SP {}".format(sp) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) else: try: connection = self._ldap_connection_factory(sp_config) connections[connection_params] = connection sp_config["connection"] = connection msg = "Created new LDAP connection for SP {}".format(sp) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) except LdapAttributeStoreError: # It is acceptable to not have a default LDAP connection # but all SP overrides must have a connection, either @@ -126,13 +125,13 @@ def __init__(self, config, *args, **kwargs): if sp != "default": msg = "No LDAP connection can be initialized for SP {}" msg = msg.format(sp) - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) self.config[sp] = sp_config msg = "LDAP Attribute Store microservice initialized" - satosa_logging(logger, logging.INFO, msg, None) + logger.info(msg) def _construct_filter_value( self, candidate, name_id_value, name_id_format, issuer, attributes @@ -176,7 +175,7 @@ def _construct_filter_value( for attr_value in [attributes.get(identifier_name)] ] msg = "Found candidate values {}".format(values) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) # If one of the configured identifier names is name_id then if there is # also a configured name_id_format add the value for the NameID of that @@ -190,7 +189,7 @@ def _construct_filter_value( and candidate_name_id_format == name_id_format ): msg = "IdP asserted NameID {}".format(name_id_value) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) candidate_nameid_value = name_id_value # Only add the NameID value asserted by the IdP if it is not @@ -201,18 +200,18 @@ def _construct_filter_value( if candidate_nameid_value not in values: msg = "Added NameID {} to candidate values" msg = msg.format(candidate_nameid_value) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) values.append(candidate_nameid_value) else: msg = "NameID {} value also asserted as attribute value" msg = msg.format(candidate_nameid_value) - satosa_logging(logger, logging.WARN, msg, None) + logger.warning(msg) # If no value was asserted by the IdP for one of the configured list of # identifier names for this candidate then go onto the next candidate. if None in values: msg = "Candidate is missing value so skipping" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) return None # All values for the configured list of attribute names are present @@ -225,14 +224,14 @@ def _construct_filter_value( else candidate["add_scope"] ) msg = "Added scope {} to values".format(scope) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) values.append(scope) # Concatenate all values to create the filter value. value = "".join(values) msg = "Constructed filter value {}".format(value) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) return value @@ -283,13 +282,13 @@ def _ldap_connection_factory(self, config): server = ldap3.Server(**args) msg = "Creating a new LDAP connection" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) msg = "Using LDAP URL {}".format(ldap_url) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) msg = "Using bind DN {}".format(bind_dn) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) auto_bind_string = config["auto_bind"] auto_bind_map = { @@ -309,9 +308,9 @@ def _ldap_connection_factory(self, config): if client_strategy == ldap3.REUSABLE: msg = "Using pool size {}".format(pool_size) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) msg = "Using pool keep alive {}".format(pool_keepalive) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) try: connection = ldap3.Connection( @@ -327,16 +326,16 @@ def _ldap_connection_factory(self, config): pool_keepalive=pool_keepalive, ) msg = "Successfully connected to LDAP server" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) except LDAPException as e: msg = "Caught exception when connecting to LDAP server: {}" msg = msg.format(e) - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) msg = "Successfully connected to LDAP server" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) return connection @@ -348,7 +347,7 @@ def _populate_attributes(self, config, record): ldap_attributes = record.get("attributes", None) if not ldap_attributes: msg = "No attributes returned with LDAP record" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) return ldap_to_internal_map = ( @@ -374,7 +373,7 @@ def _populate_attributes(self, config, record): ) msg = "Recording internal attribute {} with values {}" msg = msg.format(internal_attr, attributes[internal_attr]) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) return attributes @@ -408,12 +407,14 @@ def process(self, context, data): "issuer": issuer, "config": self._filter_config(config), } - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) # Ignore this SP entirely if so configured. if config["ignore"]: msg = "Ignoring SP {}".format(requester) - satosa_logging(logger, logging.INFO, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) return super().process(context, data) # The list of values for the LDAP search filters that will be tried in @@ -437,7 +438,8 @@ def process(self, context, data): if filter_value ] msg = {"message": "Search filters", "filter_values": filter_values} - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) # Initialize an empty LDAP record. The first LDAP record found using # the ordered # list of search filter values will be the record used. @@ -459,7 +461,8 @@ def process(self, context, data): "message": "LDAP query with constructed search filter", "search filter": search_filter, } - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) attributes = ( config["query_return_attributes"] @@ -480,13 +483,15 @@ def process(self, context, data): exp_msg = "Caught unhandled exception: {}".format(err) if exp_msg: - satosa_logging(logger, logging.ERROR, exp_msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=exp_msg) + logger.error(logline) return super().process(context, data) if not results: msg = "Querying LDAP server: No results for {}." msg = msg.format(filter_val) - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) continue if isinstance(results, bool): @@ -495,9 +500,11 @@ def process(self, context, data): responses = connection.get_response(results)[0] msg = "Done querying LDAP server" - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) msg = "LDAP server returned {} records".format(len(responses)) - satosa_logging(logger, logging.INFO, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) # For now consider only the first record found (if any). if len(responses) > 0: @@ -505,7 +512,8 @@ def process(self, context, data): msg = "LDAP server returned {} records using search filter" msg = msg + " value {}" msg = msg.format(len(responses), filter_val) - satosa_logging(logger, logging.WARN, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.warning(logline) record = responses[0] break @@ -514,7 +522,8 @@ def process(self, context, data): if config["clear_input_attributes"]: msg = "Clearing values for these input attributes: {}" msg = msg.format(data.attributes) - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) data.attributes = {} # This adapts records with different search and connection strategy @@ -538,7 +547,8 @@ def process(self, context, data): "DN": record["dn"], "attributes": record["attributes"], } - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) # Populate attributes as configured. new_attrs = self._populate_attributes(config, record) @@ -555,16 +565,18 @@ def process(self, context, data): if user_ids: data.subject_id = "".join(user_ids) msg = "NameID value is {}".format(data.subject_id) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) # Add the record to the context so that later microservices # may use it if required. context.decorate(KEY_FOUND_LDAP_RECORD, record) msg = "Added record {} to context".format(record) - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) else: msg = "No record found in LDAP so no attributes will be added" - satosa_logging(logger, logging.WARN, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.warning(msg) on_ldap_search_result_empty = config["on_ldap_search_result_empty"] if on_ldap_search_result_empty: # Redirect to the configured URL with @@ -578,9 +590,11 @@ def process(self, context, data): encoded_idp_entity_id, ) msg = "Redirecting to {}".format(url) - satosa_logging(logger, logging.INFO, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(msg) return Redirect(url) msg = "Returning data.attributes {}".format(data.attributes) - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(msg) return super().process(context, data) From 9f85123654414681d66f050a61f2c81c03008aa8 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 23 Mar 2020 17:20:54 +0200 Subject: [PATCH 043/288] Fixes to the logging params Signed-off-by: Ivan Kanakarakis --- .../micro_services/ldap_attribute_store.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index f947ff451..e401127e3 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -14,11 +14,12 @@ import ldap3 from ldap3.core.exceptions import LDAPException +import satosa.logging_util as lu from satosa.exception import SATOSAError from satosa.micro_services.base import ResponseMicroService from satosa.response import Redirect -import satosa.logging_util as lu + logger = logging.getLogger(__name__) KEY_FOUND_LDAP_RECORD = "ldap_attribute_store_found_record" @@ -372,8 +373,8 @@ def _populate_attributes(self, config, record): else [values] ) msg = "Recording internal attribute {} with values {}" - msg = msg.format(internal_attr, attributes[internal_attr]) - logger.debug(msg) + logline = msg.format(internal_attr, attributes[internal_attr]) + logger.debug(logline) return attributes @@ -452,7 +453,8 @@ def process(self, context, data): "message": "LDAP server host", "server host": connection.server.host, } - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) for filter_val in filter_values: ldap_ident_attr = config["ldap_identifier_attribute"] @@ -576,7 +578,7 @@ def process(self, context, data): else: msg = "No record found in LDAP so no attributes will be added" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.warning(msg) + logger.warning(logline) on_ldap_search_result_empty = config["on_ldap_search_result_empty"] if on_ldap_search_result_empty: # Redirect to the configured URL with @@ -591,10 +593,10 @@ def process(self, context, data): ) msg = "Redirecting to {}".format(url) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.info(msg) + logger.info(logline) return Redirect(url) msg = "Returning data.attributes {}".format(data.attributes) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(msg) + logger.debug(logline) return super().process(context, data) From 7dff564fd31ba0405dec08294cb971ed7b9c02d0 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 24 Mar 2020 19:30:42 +0200 Subject: [PATCH 044/288] Log if an endpoint cannot be matched to a function Signed-off-by: Ivan Kanakarakis --- src/satosa/proxy_server.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index 868ffd5b1..9ac6713b6 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -8,6 +8,7 @@ from cookies_samesite_compat import CookiesSameSiteCompatMiddleware import satosa +import satosa.logging_util as lu from .base import SATOSABase from .context import Context from .response import ServiceError, NotFound @@ -118,17 +119,21 @@ def __call__(self, environ, start_response, debug=False): if isinstance(resp, Exception): raise resp return resp(environ, start_response) - except SATOSANoBoundEndpointError: + except SATOSANoBoundEndpointError as e: + import ipdb; ipdb.set_trace() # noqa XXX + msg = str(e) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) resp = NotFound("The Service or Identity Provider you requested could not be found.") return resp(environ, start_response) - except Exception as err: - if type(err) != UnknownSystemEntity: - logline = "{}".format(err) + except Exception as e: + if type(e) != UnknownSystemEntity: + logline = "{}".format(e) logger.exception(logline) if debug: raise - resp = ServiceError("%s" % err) + resp = ServiceError("%s" % e) return resp(environ, start_response) From 1100dbb1dc4ecdccbc4edda094de540ffd014228 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 26 Mar 2020 00:02:52 +0200 Subject: [PATCH 045/288] Remove debugger call Signed-off-by: Ivan Kanakarakis --- src/satosa/proxy_server.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index 9ac6713b6..a3c336145 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -120,7 +120,6 @@ def __call__(self, environ, start_response, debug=False): raise resp return resp(environ, start_response) except SATOSANoBoundEndpointError as e: - import ipdb; ipdb.set_trace() # noqa XXX msg = str(e) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) From 6a88c1adcbdeea0ce07b45904b75ef301d1d5890 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 26 Mar 2020 11:16:34 +0200 Subject: [PATCH 046/288] Allow py39 builds to fail Signed-off-by: Ivan Kanakarakis --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 185d38196..7ff2a2a70 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,11 +17,12 @@ script: - tox jobs: + allow_failures: + - python: 3.9-dev include: - python: 3.6 - python: 3.7 - python: 3.8 - - python: 3.9-dev - python: pypy3 - stage: Expose env-var information From 4a574b14d8af8e8f0d48b3f7a44034da54a9c33c Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 26 Mar 2020 11:17:40 +0200 Subject: [PATCH 047/288] Build and tag docker images only when the secure env-vars are present Signed-off-by: Ivan Kanakarakis --- .travis.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.travis.yml b/.travis.yml index 7ff2a2a70..11cc6a49f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -69,6 +69,7 @@ jobs: install: skip script: - set -e + - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 - docker build -f Dockerfile -t "$DOCKER_TAG_COMMIT" . - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_TAG_COMMIT" @@ -79,6 +80,7 @@ jobs: install: skip script: - set -e + - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_BRANCH" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -114,6 +116,7 @@ jobs: install: skip script: - set -e + - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_GITTAG" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -125,6 +128,7 @@ jobs: install: skip script: - set -e + - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_LATEST" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin From 81d6d5c06dbb9827c7d108106b8fbdcee27d5f7f Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 26 Mar 2020 11:18:05 +0200 Subject: [PATCH 048/288] Use a token to deploy GitHub release Signed-off-by: Ivan Kanakarakis --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 11cc6a49f..4236748d8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -141,8 +141,7 @@ jobs: script: skip deploy: - provider: releases - token: - secure: EOM9qDlyGQrD6NXs8KKMNr2htFXU/H47tO051aA3RKWQrEk7paLXYTDSbQiEq3W9yLg+fifDb0qVqAcFhnV4OWf5ArP++khjaiKQCHYoTaoKIRrTch+12Unq22FEgNj0SYd3HX+CKkG2WpyMoBQAiChgaouDnYIPOvCoqfCxiJzj5e/l5Qomt31smUgZSYhqeDPvX0lN6LP47OLrzsEGDvVxz/fb+EMK3mkCppgPwsB2zy849dER7ofHD6uJiYhY3jP4oCHDBv6GdzqxgMIyDD4zJYh9qCfy1kAwOwc7CYInrELk8GK+YwLFRKMXdTMHu4nYUTgTAJeiXgX6n7oEUfvj4ip+UJ2MfsLdaX7MmgRb2sVStlYjqLWgVR1sZThKmDTH1SzztmZFcNjXBg5Yvs8zPKe+955AoL/EG+pu0ZapFTIrsW7Wq7dCSiXhUkdJ3E/3RZqawqDhTHmrQEiG2j4N2B90SeK7TcXncr7TxaQMwjRpUpkDHmNQPMW3TEHyjEVlTKjzeCmvJEzu/n2oDR12kD6FL5oh4lkMIzIIQqVtp09cB9IJXEO0ww3elIbjZPhMASOocwvoFWM/m9ZTH8i2NjulWuIsnPj9AMmQ8hryR+nqSmkK942D+/9W0/ZHX4rzZ4/6hpEwAi+2+BNnS9yPk1zP4LNMy5FA4NwCV14= + token: "$GITHUB_RELEASE_TOKEN" on: repo: IdentityPython/SATOSA tags: true From 3656940d5d529db49cde28dd61390f294fa2ecbd Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 26 Mar 2020 11:18:18 +0200 Subject: [PATCH 049/288] Use a token to deploy PyPI releases Signed-off-by: Ivan Kanakarakis --- .travis.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4236748d8..a673733cf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -154,9 +154,8 @@ jobs: deploy: - provider: pypi distributions: sdist bdist_wheel - username: Lundberg - password: - secure: NwkpOakaeJjErjTF4Y5MWeHzMvkxYZqrBFdRkzfenVfkWsomuyy553A691d3lc1+oREsh1fJJLjpZQYxTLUFIHOUmt/9zr02rFfguzj7hEYfWF8wHBXG6YSWv6T3aCA4RTMXvvzv9cHf1zfxh0fS7kgc+NRMAnd01diVLfYpBciLgmQ31J4mlwShp8yBQUoRBIvzSdzrgjr0TzCQZXB9xM6R2t/oJgXLo6Zz8dTzqq3De9nOU/1P2ZHLxodDikuFdu2/0CjoDgFXB0KnGKGKmJ6G1WMCVvi7abY7smmGA3s4a4NVL7Cirx6VwIj79PsAcgupr2iBAQk/GsPffzdpLtIrBek9u//p84hxrj/IaJWgPOeKeD7+r2Kc2g0r2dQjaM+9MqBx9/lC57xJRX/JHLQWirXfCucB9YyPun5I13Sf3hArkssQy/Jvd2aLFZ885BTfow6TAwl1ud+UPeauvEj6myKO98sko/3Y521EGXRofLGaPokLyPjI/3I4N4jCvw8m86eZAjjIhPFL7JKHf8OVc5gQCYQy3kxiF5wyvbfOeMBp0sk9UvJOrWvBEXFrimAZPu8o8T5WtlQAV02q7rxUwhMd+fpnbGewsl7Ob6eE4rGVrfWQIb86wOHbbJk3lCwPytjEFEI2bdUfRUFcrWxhC040hRP0gzVKLa+nBHM= + user: "__token__" + password: "$PYPI_RELEASE_TOKEN" on: repo: IdentityPython/SATOSA tags: true From 15513b6d01ff067218d9e3e46fe167439c3d086f Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 26 Mar 2020 13:49:42 +0200 Subject: [PATCH 050/288] Run the install step when building the docker image Signed-off-by: Ivan Kanakarakis --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index a673733cf..9dc1f9556 100644 --- a/.travis.yml +++ b/.travis.yml @@ -66,7 +66,6 @@ jobs: - stage: Build docker image by commit and deploy on DockerHub before_install: skip - install: skip script: - set -e - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 From 193aed21c7f1dafd20824d8096fae84c4c620d90 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 26 Mar 2020 13:50:58 +0200 Subject: [PATCH 051/288] Use the username key instead of user Signed-off-by: Ivan Kanakarakis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9dc1f9556..31de7b809 100644 --- a/.travis.yml +++ b/.travis.yml @@ -153,7 +153,7 @@ jobs: deploy: - provider: pypi distributions: sdist bdist_wheel - user: "__token__" + username: "__token__" password: "$PYPI_RELEASE_TOKEN" on: repo: IdentityPython/SATOSA From 6f34e290796c38da71e8c9b323cf0e4a5dc1b473 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 26 Mar 2020 14:16:16 +0200 Subject: [PATCH 052/288] Revert "Run the install step when building the docker image" This reverts commit 15513b6d01ff067218d9e3e46fe167439c3d086f. --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 31de7b809..85e05bd31 100644 --- a/.travis.yml +++ b/.travis.yml @@ -66,6 +66,7 @@ jobs: - stage: Build docker image by commit and deploy on DockerHub before_install: skip + install: skip script: - set -e - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 From 375e4d9e1a04265acbc08b5282013f0444aad943 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 27 Mar 2020 01:19:20 +0200 Subject: [PATCH 053/288] Do not use exit; instead rely on set -e to abort Signed-off-by: Ivan Kanakarakis --- .travis.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 85e05bd31..99ae0d68d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -69,7 +69,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 + - test "$TRAVIS_SECURE_ENV_VARS" = "true" - docker build -f Dockerfile -t "$DOCKER_TAG_COMMIT" . - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_TAG_COMMIT" @@ -80,7 +80,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 + - test "$TRAVIS_SECURE_ENV_VARS" = "true" - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_BRANCH" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -92,7 +92,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 + - test "$TRAVIS_SECURE_ENV_VARS" = "true" - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_BRANCH" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -104,7 +104,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 + - test "$TRAVIS_SECURE_ENV_VARS" = "true" - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_NUM" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -116,7 +116,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 + - test "$TRAVIS_SECURE_ENV_VARS" = "true" - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_GITTAG" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -128,7 +128,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "false" && exit 0 + - test "$TRAVIS_SECURE_ENV_VARS" = "true" - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_LATEST" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin From 1c026924ea91d45486ea917faa2e739a9875c73c Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 27 Mar 2020 17:08:52 +0200 Subject: [PATCH 054/288] Abort docker build gracefully when secure env-vars are not provided Signed-off-by: Ivan Kanakarakis --- .travis.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 99ae0d68d..4e80014d7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -69,7 +69,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "true" + - test "$TRAVIS_SECURE_ENV_VARS" = "true" || exit 0 - docker build -f Dockerfile -t "$DOCKER_TAG_COMMIT" . - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin - docker push "$DOCKER_TAG_COMMIT" @@ -80,7 +80,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "true" + - test "$TRAVIS_SECURE_ENV_VARS" = "true" || exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_BRANCH" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -92,7 +92,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "true" + - test "$TRAVIS_SECURE_ENV_VARS" = "true" || exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_BRANCH" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -104,7 +104,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "true" + - test "$TRAVIS_SECURE_ENV_VARS" = "true" || exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_PR_NUM" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -116,7 +116,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "true" + - test "$TRAVIS_SECURE_ENV_VARS" = "true" || exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_GITTAG" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin @@ -128,7 +128,7 @@ jobs: install: skip script: - set -e - - test "$TRAVIS_SECURE_ENV_VARS" = "true" + - test "$TRAVIS_SECURE_ENV_VARS" = "true" || exit 0 - docker pull "$DOCKER_TAG_COMMIT" - docker tag "$DOCKER_TAG_COMMIT" "$DOCKER_TAG_LATEST" - echo "$DOCKER_PASS" | docker login -u "$DOCKER_USERNAME" --password-stdin From d9971306573d710fd1c9bbc34583b4a7bcf3ef64 Mon Sep 17 00:00:00 2001 From: Scott Koranda Date: Thu, 13 Feb 2020 12:16:55 -0600 Subject: [PATCH 055/288] Pull YAML configuration values from environment Add logic so that a YAML tag of the form !ENV indicates that a value of the form ${SOME_ENVIRONMENT_VARIABLE} should be replaced with the value of the process environment variable of the same name. --- doc/README.md | 14 ++++++++++ .../ldap_attribute_store.yaml.example | 3 +- src/satosa/satosa_config.py | 28 +++++++++++++++++++ tests/satosa/test_satosa_config.py | 9 ++++++ 4 files changed, 53 insertions(+), 1 deletion(-) diff --git a/doc/README.md b/doc/README.md index 11f12b9bd..cd7339b73 100644 --- a/doc/README.md +++ b/doc/README.md @@ -29,9 +29,23 @@ apt-get install libffi-dev libssl-dev xmlsec1 Alternatively the application can be installed directly from PyPI (`pip install satosa`), or the [Docker image](https://hub.docker.com/r/satosa/) can be used. # Configuration +SATOSA is configured using YAML. + All default configuration files, as well as an example WSGI application for the proxy, can be found in the [example directory](../example). +A configuration value that includes the tag !ENV will have a value of the form `${SOME_ENVIRONMENT_VARIABLE}` +replaced with the value from the process environment variable of the same name. For example if the file +`ldap_attribute_store.yaml' includes + +``` +bind_password: !ENV ${LDAP_BIND_PASSWORD} +``` + +and the SATOSA process environment includes the environment variable `LDAP_BIND_PASSWORD` with +value `my_password` then the configuration for `bind_password` will be `my_password`. + + ## SATOSA proxy configuration: `proxy_conf.yaml.example` | Parameter name | Data type | Example value | Description | | -------------- | --------- | ------------- | ----------- | diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 43dd20e1f..62eab2f71 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -8,7 +8,8 @@ config: "": ldap_url: ldaps://ldap.example.org bind_dn: cn=admin,dc=example,dc=org - bind_password: xxxxxxxx + # Obtain bind password from environment variable LDAP_BIND_PASSWORD. + bind_password: !ENV ${LDAP_BIND_PASSWORD} search_base: ou=People,dc=example,dc=org read_only: true auto_bind: true diff --git a/src/satosa/satosa_config.py b/src/satosa/satosa_config.py index d3b414520..8c1c6f9a7 100644 --- a/src/satosa/satosa_config.py +++ b/src/satosa/satosa_config.py @@ -3,6 +3,7 @@ """ import logging import os +import re import yaml @@ -143,6 +144,33 @@ def _load_yaml(self, config_file): :param config_file: config to load. Can be file path or yaml string :return: Loaded config """ + # Tag to indicate environment variable: !ENV + tag = '!ENV' + + # Pattern for environment variable: ${word} + pattern = re.compile('.*?\${(\w+)}.*?') + + yaml.SafeLoader.add_implicit_resolver(tag, pattern, None) + + def constructor_env_variables(loader, node): + """ + Extracts the environment variable from the node's value. + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: value of the environment variable + """ + value = loader.construct_scalar(node) + match = pattern.findall(value) + if match: + new_value = value + for m in match: + new_value = new_value.replace('${' + m + '}', + os.environ.get(m, m)) + return new_value + return value + + yaml.SafeLoader.add_constructor(tag, constructor_env_variables) + try: with open(os.path.abspath(config_file)) as f: return yaml.safe_load(f.read()) diff --git a/tests/satosa/test_satosa_config.py b/tests/satosa/test_satosa_config.py index 030ae6485..73e537045 100644 --- a/tests/satosa/test_satosa_config.py +++ b/tests/satosa/test_satosa_config.py @@ -1,4 +1,5 @@ import json +import os from unittest.mock import mock_open, patch import pytest @@ -7,6 +8,7 @@ from satosa.exception import SATOSAConfigurationError from satosa.satosa_config import SATOSAConfig +TEST_RESOURCE_BASE_PATH = os.path.join(os.path.dirname(__file__), "../test_resources") class TestSATOSAConfig: @pytest.fixture @@ -73,3 +75,10 @@ def test_can_read_endpoint_configs_from_file(self, satosa_config_dict, modules_k with pytest.raises(SATOSAConfigurationError): SATOSAConfig(satosa_config_dict) + + def test_can_substitute_from_environment_variable(self, monkeypatch): + monkeypatch.setenv("SATOSA_COOKIE_STATE_NAME", "oatmeal_raisin") + config = SATOSAConfig(os.path.join(TEST_RESOURCE_BASE_PATH, + "proxy_conf_environment_test.yaml")) + + assert config["COOKIE_STATE_NAME"] == 'oatmeal_raisin' From 9991b1a70c93b3a901bb21a1226358cb24e7ba20 Mon Sep 17 00:00:00 2001 From: Scott Koranda Date: Thu, 13 Feb 2020 13:57:00 -0600 Subject: [PATCH 056/288] Resource file needed for test. Resource file needed for test. --- tests/test_resources/proxy_conf_environment_test.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 tests/test_resources/proxy_conf_environment_test.yaml diff --git a/tests/test_resources/proxy_conf_environment_test.yaml b/tests/test_resources/proxy_conf_environment_test.yaml new file mode 100644 index 000000000..a2c0d7968 --- /dev/null +++ b/tests/test_resources/proxy_conf_environment_test.yaml @@ -0,0 +1,10 @@ +BASE: https://example.com + +STATE_ENCRYPTION_KEY: state_encryption_key + +INTERNAL_ATTRIBUTES: {"attributes": {}} + +COOKIE_STATE_NAME: !ENV ${SATOSA_COOKIE_STATE_NAME} + +BACKEND_MODULES: [] +FRONTEND_MODULES: [] From 022f98935ee7b30629cb5897fb6049a64c1b6238 Mon Sep 17 00:00:00 2001 From: Scott Koranda Date: Fri, 21 Feb 2020 07:53:07 -0600 Subject: [PATCH 057/288] Pull YAML configuration values from file pointed to by environment Add logic so that a YAML tag of the form !ENVFILE indicates that a value of the form $(SOME_ENVIRONMENT_VARIABLE_FILE) should be replaced with the value obtained by reading the process environment variable of the same name to get a file path and then reading the file contents. --- .../ldap_attribute_store.yaml.example | 3 ++ src/satosa/satosa_config.py | 43 ++++++++++++++++--- tests/satosa/test_satosa_config.py | 9 ++++ tests/test_resources/cookie_state_name | 1 + .../proxy_conf_environment_file_test.yaml | 10 +++++ 5 files changed, 61 insertions(+), 5 deletions(-) create mode 100644 tests/test_resources/cookie_state_name create mode 100644 tests/test_resources/proxy_conf_environment_file_test.yaml diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 62eab2f71..8c14ba667 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -10,6 +10,9 @@ config: bind_dn: cn=admin,dc=example,dc=org # Obtain bind password from environment variable LDAP_BIND_PASSWORD. bind_password: !ENV ${LDAP_BIND_PASSWORD} + # Obtain bind password from file pointed to by + # environment variable LDAP_BIND_PASSWORD_FILE. + # bind_password: !ENVFILE $(LDAP_BIND_PASSWORD) search_base: ou=People,dc=example,dc=org read_only: true auto_bind: true diff --git a/src/satosa/satosa_config.py b/src/satosa/satosa_config.py index 8c1c6f9a7..dee4a17b2 100644 --- a/src/satosa/satosa_config.py +++ b/src/satosa/satosa_config.py @@ -3,6 +3,7 @@ """ import logging import os +import os.path import re import yaml @@ -145,12 +146,12 @@ def _load_yaml(self, config_file): :return: Loaded config """ # Tag to indicate environment variable: !ENV - tag = '!ENV' + tag_env = '!ENV' # Pattern for environment variable: ${word} - pattern = re.compile('.*?\${(\w+)}.*?') + pattern_env = re.compile('.*?\${(\w+)}.*?') - yaml.SafeLoader.add_implicit_resolver(tag, pattern, None) + yaml.SafeLoader.add_implicit_resolver(tag_env, pattern_env, None) def constructor_env_variables(loader, node): """ @@ -160,7 +161,7 @@ def constructor_env_variables(loader, node): :return: value of the environment variable """ value = loader.construct_scalar(node) - match = pattern.findall(value) + match = pattern_env.findall(value) if match: new_value = value for m in match: @@ -169,7 +170,39 @@ def constructor_env_variables(loader, node): return new_value return value - yaml.SafeLoader.add_constructor(tag, constructor_env_variables) + yaml.SafeLoader.add_constructor(tag_env, constructor_env_variables) + + # Tag to indicate file pointed to by environment variable: !ENVFILE + tag_env_file = '!ENVFILE' + + # Pattern for environment variable: $(word) + pattern_env_file = re.compile('.*?\$\((\w+)\).*?') + + yaml.SafeLoader.add_implicit_resolver(tag_env_file, + pattern_env_file, None) + + def constructor_envfile_variables(loader, node): + """ + Extracts the environment variable from the node's value. + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: value read from file pointed to by environment variable + """ + value = loader.construct_scalar(node) + match = pattern_env_file.findall(value) + if match: + new_value = value + for m in match: + path = os.environ.get(m, '') + if os.path.exists(path): + with open(path, 'r') as f: + new_value = new_value.replace('$(' + m + ')', + f.read().strip()) + return new_value + return value + + yaml.SafeLoader.add_constructor(tag_env_file, + constructor_envfile_variables) try: with open(os.path.abspath(config_file)) as f: diff --git a/tests/satosa/test_satosa_config.py b/tests/satosa/test_satosa_config.py index 73e537045..d5233f9ee 100644 --- a/tests/satosa/test_satosa_config.py +++ b/tests/satosa/test_satosa_config.py @@ -82,3 +82,12 @@ def test_can_substitute_from_environment_variable(self, monkeypatch): "proxy_conf_environment_test.yaml")) assert config["COOKIE_STATE_NAME"] == 'oatmeal_raisin' + + def test_can_substitute_from_environment_variable_file(self, monkeypatch): + cookie_file = os.path.join(TEST_RESOURCE_BASE_PATH, + 'cookie_state_name') + monkeypatch.setenv("SATOSA_COOKIE_STATE_NAME_FILE", cookie_file) + config = SATOSAConfig(os.path.join(TEST_RESOURCE_BASE_PATH, + "proxy_conf_environment_file_test.yaml")) + + assert config["COOKIE_STATE_NAME"] == 'chocolate_chip' diff --git a/tests/test_resources/cookie_state_name b/tests/test_resources/cookie_state_name new file mode 100644 index 000000000..dd5b622b7 --- /dev/null +++ b/tests/test_resources/cookie_state_name @@ -0,0 +1 @@ +chocolate_chip diff --git a/tests/test_resources/proxy_conf_environment_file_test.yaml b/tests/test_resources/proxy_conf_environment_file_test.yaml new file mode 100644 index 000000000..f10a1999a --- /dev/null +++ b/tests/test_resources/proxy_conf_environment_file_test.yaml @@ -0,0 +1,10 @@ +BASE: https://example.com + +STATE_ENCRYPTION_KEY: state_encryption_key + +INTERNAL_ATTRIBUTES: {"attributes": {}} + +COOKIE_STATE_NAME: !ENVFILE $(SATOSA_COOKIE_STATE_NAME_FILE) + +BACKEND_MODULES: [] +FRONTEND_MODULES: [] From e78894cb9e25b245385fde125570a6aa56ff0875 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 29 Feb 2020 03:15:21 +0200 Subject: [PATCH 058/288] Declare yaml as a dependency with extensions At the same time, the syntax for the !ENV and !ENVFILE tags is changed to just the env-var name. No need for ${} or $(). Signed-off-by: Ivan Kanakarakis --- doc/README.md | 6 +- .../ldap_attribute_store.yaml.example | 4 +- src/satosa/satosa_config.py | 68 ++----------------- src/satosa/yaml.py | 58 ++++++++++++++++ tests/satosa/test_satosa_config.py | 15 ++-- tests/test_resources/cookie_state_name | 2 +- .../proxy_conf_environment_file_test.yaml | 2 +- .../proxy_conf_environment_test.yaml | 2 +- 8 files changed, 80 insertions(+), 77 deletions(-) create mode 100644 src/satosa/yaml.py diff --git a/doc/README.md b/doc/README.md index cd7339b73..431954cc0 100644 --- a/doc/README.md +++ b/doc/README.md @@ -34,16 +34,16 @@ SATOSA is configured using YAML. All default configuration files, as well as an example WSGI application for the proxy, can be found in the [example directory](../example). -A configuration value that includes the tag !ENV will have a value of the form `${SOME_ENVIRONMENT_VARIABLE}` +A configuration value that includes the tag !ENV will have a value of the form `SOME_ENVIRONMENT_VARIABLE` replaced with the value from the process environment variable of the same name. For example if the file `ldap_attribute_store.yaml' includes ``` -bind_password: !ENV ${LDAP_BIND_PASSWORD} +bind_password: !ENV LDAP_BIND_PASSWORD ``` and the SATOSA process environment includes the environment variable `LDAP_BIND_PASSWORD` with -value `my_password` then the configuration for `bind_password` will be `my_password`. +value `my_password` then the configuration value for `bind_password` will be `my_password`. ## SATOSA proxy configuration: `proxy_conf.yaml.example` diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 8c14ba667..8f0e74c8f 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -9,10 +9,10 @@ config: ldap_url: ldaps://ldap.example.org bind_dn: cn=admin,dc=example,dc=org # Obtain bind password from environment variable LDAP_BIND_PASSWORD. - bind_password: !ENV ${LDAP_BIND_PASSWORD} + bind_password: !ENV LDAP_BIND_PASSWORD # Obtain bind password from file pointed to by # environment variable LDAP_BIND_PASSWORD_FILE. - # bind_password: !ENVFILE $(LDAP_BIND_PASSWORD) + # bind_password: !ENVFILE LDAP_BIND_PASSWORD search_base: ou=People,dc=example,dc=org read_only: true auto_bind: true diff --git a/src/satosa/satosa_config.py b/src/satosa/satosa_config.py index dee4a17b2..b107e5728 100644 --- a/src/satosa/satosa_config.py +++ b/src/satosa/satosa_config.py @@ -4,11 +4,11 @@ import logging import os import os.path -import re -import yaml +from satosa.exception import SATOSAConfigurationError +from satosa.yaml import load as yaml_load +from satosa.yaml import YAMLError -from .exception import SATOSAConfigurationError logger = logging.getLogger(__name__) @@ -145,69 +145,11 @@ def _load_yaml(self, config_file): :param config_file: config to load. Can be file path or yaml string :return: Loaded config """ - # Tag to indicate environment variable: !ENV - tag_env = '!ENV' - - # Pattern for environment variable: ${word} - pattern_env = re.compile('.*?\${(\w+)}.*?') - - yaml.SafeLoader.add_implicit_resolver(tag_env, pattern_env, None) - - def constructor_env_variables(loader, node): - """ - Extracts the environment variable from the node's value. - :param yaml.Loader loader: the yaml loader - :param node: the current node in the yaml - :return: value of the environment variable - """ - value = loader.construct_scalar(node) - match = pattern_env.findall(value) - if match: - new_value = value - for m in match: - new_value = new_value.replace('${' + m + '}', - os.environ.get(m, m)) - return new_value - return value - - yaml.SafeLoader.add_constructor(tag_env, constructor_env_variables) - - # Tag to indicate file pointed to by environment variable: !ENVFILE - tag_env_file = '!ENVFILE' - - # Pattern for environment variable: $(word) - pattern_env_file = re.compile('.*?\$\((\w+)\).*?') - - yaml.SafeLoader.add_implicit_resolver(tag_env_file, - pattern_env_file, None) - - def constructor_envfile_variables(loader, node): - """ - Extracts the environment variable from the node's value. - :param yaml.Loader loader: the yaml loader - :param node: the current node in the yaml - :return: value read from file pointed to by environment variable - """ - value = loader.construct_scalar(node) - match = pattern_env_file.findall(value) - if match: - new_value = value - for m in match: - path = os.environ.get(m, '') - if os.path.exists(path): - with open(path, 'r') as f: - new_value = new_value.replace('$(' + m + ')', - f.read().strip()) - return new_value - return value - - yaml.SafeLoader.add_constructor(tag_env_file, - constructor_envfile_variables) try: with open(os.path.abspath(config_file)) as f: - return yaml.safe_load(f.read()) - except yaml.YAMLError as exc: + return yaml_load(f.read()) + except YAMLError as exc: logger.error("Could not parse config as YAML: {}".format(exc)) if hasattr(exc, 'problem_mark'): mark = exc.problem_mark diff --git a/src/satosa/yaml.py b/src/satosa/yaml.py new file mode 100644 index 000000000..5d300d30e --- /dev/null +++ b/src/satosa/yaml.py @@ -0,0 +1,58 @@ +import os +import re + +from yaml import SafeLoader as _safe_loader +from yaml import YAMLError +from yaml import safe_load as load + + +def _constructor_env_variables(loader, node): + """ + Extracts the environment variable from the node's value. + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: value of the environment variable + """ + raw_value = loader.construct_scalar(node) + new_value = os.environ.get(raw_value) + if new_value is None: + msg = "Cannot construct value from {node}: {value}".format( + node=node, value=new_value + ) + raise YAMLError(msg) + return new_value + + +def _constructor_envfile_variables(loader, node): + """ + Extracts the environment variable from the node's value. + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: value read from file pointed to by environment variable + """ + raw_value = loader.construct_scalar(node) + filepath = os.environ.get(raw_value) + if filepath is None: + msg = "Cannot construct value from {node}: {path}".format( + node=node, path=filepath + ) + raise YAMLError(msg) + + try: + with open(filepath, "r") as fd: + new_value = fd.read() + except (TypeError, IOError) as e: + msg = "Cannot construct value from {node}: {path}".format( + node=node, path=filepath + ) + raise YAMLError(msg) from e + else: + return new_value + + +TAG_ENV = "!ENV" +TAG_ENV_FILE = "!ENVFILE" + + +_safe_loader.add_constructor(TAG_ENV, _constructor_env_variables) +_safe_loader.add_constructor(TAG_ENV_FILE, _constructor_envfile_variables) diff --git a/tests/satosa/test_satosa_config.py b/tests/satosa/test_satosa_config.py index d5233f9ee..d291d9c87 100644 --- a/tests/satosa/test_satosa_config.py +++ b/tests/satosa/test_satosa_config.py @@ -78,16 +78,19 @@ def test_can_read_endpoint_configs_from_file(self, satosa_config_dict, modules_k def test_can_substitute_from_environment_variable(self, monkeypatch): monkeypatch.setenv("SATOSA_COOKIE_STATE_NAME", "oatmeal_raisin") - config = SATOSAConfig(os.path.join(TEST_RESOURCE_BASE_PATH, - "proxy_conf_environment_test.yaml")) + config = SATOSAConfig( + os.path.join(TEST_RESOURCE_BASE_PATH, "proxy_conf_environment_test.yaml") + ) assert config["COOKIE_STATE_NAME"] == 'oatmeal_raisin' def test_can_substitute_from_environment_variable_file(self, monkeypatch): - cookie_file = os.path.join(TEST_RESOURCE_BASE_PATH, - 'cookie_state_name') + cookie_file = os.path.join(TEST_RESOURCE_BASE_PATH, 'cookie_state_name') monkeypatch.setenv("SATOSA_COOKIE_STATE_NAME_FILE", cookie_file) - config = SATOSAConfig(os.path.join(TEST_RESOURCE_BASE_PATH, - "proxy_conf_environment_file_test.yaml")) + config = SATOSAConfig( + os.path.join( + TEST_RESOURCE_BASE_PATH, "proxy_conf_environment_file_test.yaml" + ) + ) assert config["COOKIE_STATE_NAME"] == 'chocolate_chip' diff --git a/tests/test_resources/cookie_state_name b/tests/test_resources/cookie_state_name index dd5b622b7..84bb814b8 100644 --- a/tests/test_resources/cookie_state_name +++ b/tests/test_resources/cookie_state_name @@ -1 +1 @@ -chocolate_chip +chocolate_chip \ No newline at end of file diff --git a/tests/test_resources/proxy_conf_environment_file_test.yaml b/tests/test_resources/proxy_conf_environment_file_test.yaml index f10a1999a..801c109e8 100644 --- a/tests/test_resources/proxy_conf_environment_file_test.yaml +++ b/tests/test_resources/proxy_conf_environment_file_test.yaml @@ -4,7 +4,7 @@ STATE_ENCRYPTION_KEY: state_encryption_key INTERNAL_ATTRIBUTES: {"attributes": {}} -COOKIE_STATE_NAME: !ENVFILE $(SATOSA_COOKIE_STATE_NAME_FILE) +COOKIE_STATE_NAME: !ENVFILE SATOSA_COOKIE_STATE_NAME_FILE BACKEND_MODULES: [] FRONTEND_MODULES: [] diff --git a/tests/test_resources/proxy_conf_environment_test.yaml b/tests/test_resources/proxy_conf_environment_test.yaml index a2c0d7968..ab8118f31 100644 --- a/tests/test_resources/proxy_conf_environment_test.yaml +++ b/tests/test_resources/proxy_conf_environment_test.yaml @@ -4,7 +4,7 @@ STATE_ENCRYPTION_KEY: state_encryption_key INTERNAL_ATTRIBUTES: {"attributes": {}} -COOKIE_STATE_NAME: !ENV ${SATOSA_COOKIE_STATE_NAME} +COOKIE_STATE_NAME: !ENV SATOSA_COOKIE_STATE_NAME BACKEND_MODULES: [] FRONTEND_MODULES: [] From db888cf08518e73f81df9f8a1ccb1761dd507876 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 29 Feb 2020 03:33:04 +0200 Subject: [PATCH 059/288] Remove unneeded check Signed-off-by: Ivan Kanakarakis --- src/satosa/yaml.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/satosa/yaml.py b/src/satosa/yaml.py index 5d300d30e..0d8747408 100644 --- a/src/satosa/yaml.py +++ b/src/satosa/yaml.py @@ -32,12 +32,6 @@ def _constructor_envfile_variables(loader, node): """ raw_value = loader.construct_scalar(node) filepath = os.environ.get(raw_value) - if filepath is None: - msg = "Cannot construct value from {node}: {path}".format( - node=node, path=filepath - ) - raise YAMLError(msg) - try: with open(filepath, "r") as fd: new_value = fd.read() From 22f5a8dfaf9f9374c79577db7f7727aae37f64d5 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 25 Mar 2020 20:48:03 +0200 Subject: [PATCH 060/288] Rename TAG_ENV_FILE to TAG_ENVFILE Signed-off-by: Ivan Kanakarakis --- src/satosa/yaml.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/satosa/yaml.py b/src/satosa/yaml.py index 0d8747408..9efa202c6 100644 --- a/src/satosa/yaml.py +++ b/src/satosa/yaml.py @@ -45,8 +45,8 @@ def _constructor_envfile_variables(loader, node): TAG_ENV = "!ENV" -TAG_ENV_FILE = "!ENVFILE" +TAG_ENVFILE = "!ENVFILE" _safe_loader.add_constructor(TAG_ENV, _constructor_env_variables) -_safe_loader.add_constructor(TAG_ENV_FILE, _constructor_envfile_variables) +_safe_loader.add_constructor(TAG_ENVFILE, _constructor_envfile_variables) From 67b96102d003e545dc64457c854778179644ac89 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 25 Mar 2020 21:01:03 +0200 Subject: [PATCH 061/288] Update the yaml dependency on satosa.plugin_loader module Signed-off-by: Ivan Kanakarakis --- src/satosa/plugin_loader.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/satosa/plugin_loader.py b/src/satosa/plugin_loader.py index 65c535de2..b7eb4cf46 100644 --- a/src/satosa/plugin_loader.py +++ b/src/satosa/plugin_loader.py @@ -7,8 +7,8 @@ from contextlib import contextmanager from pydoc import locate -import yaml -from yaml.error import YAMLError +from satosa.yaml import load as yaml_load +from satosa.yaml import YAMLError from .backends.base import BackendModule from .exception import SATOSAConfigurationError @@ -143,7 +143,7 @@ def _response_micro_service_filter(cls): def _load_plugin_config(config): try: - return yaml.safe_load(config) + return yaml_load(config) except YAMLError as exc: if hasattr(exc, 'problem_mark'): mark = exc.problem_mark From f9f1b5c58ce9af7d4adfcdf2e44cdd36d17eec67 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 25 Mar 2020 23:43:03 +0200 Subject: [PATCH 062/288] Update documentation for ENV and ENVFILE yaml tags Signed-off-by: Ivan Kanakarakis --- doc/README.md | 33 ++++++++++++++++++++++++++++----- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/doc/README.md b/doc/README.md index 431954cc0..fe87e1f97 100644 --- a/doc/README.md +++ b/doc/README.md @@ -34,16 +34,39 @@ SATOSA is configured using YAML. All default configuration files, as well as an example WSGI application for the proxy, can be found in the [example directory](../example). -A configuration value that includes the tag !ENV will have a value of the form `SOME_ENVIRONMENT_VARIABLE` -replaced with the value from the process environment variable of the same name. For example if the file -`ldap_attribute_store.yaml' includes +The default YAML syntax is extended to include the capability to resolve +environment variables. The following tags are used to achieve this: + +* The `!ENV` tag + +The `!ENV` tag is followed by a string that denotes the environment variable +name. It will be replaced by the value of the environment variable with the +same name. + +In the example below `LDAP_BIND_PASSWORD` will, at runtime, be replaced with +the value from the process environment variable of the same name. If the +process environment has been set with `LDAP_BIND_PASSWORD=secret_password` then +the configuration value for `bind_password` will be `secret_password`. ``` bind_password: !ENV LDAP_BIND_PASSWORD ``` -and the SATOSA process environment includes the environment variable `LDAP_BIND_PASSWORD` with -value `my_password` then the configuration value for `bind_password` will be `my_password`. +* The `!ENVFILE` tag + +The `!ENVFILE` tag is followed by a string that denotes the environment +variable name. It will be replaced by the value of the environment variable +with the same name. + +In the example below `LDAP_BIND_PASSWORD_FILE` will, at runtime, be replaced +with the value from the process environment variable of the same name. If the +process environment has been set with +`LDAP_BIND_PASSWORD_FILE=/etc/satosa/secrets/ldap.txt` then the configuration +value for `bind_password` will be `secret_password`. + +``` +bind_password: !ENVFILE LDAP_BIND_PASSWORD_FILE +``` ## SATOSA proxy configuration: `proxy_conf.yaml.example` From 23df299a5e5a17cad95d8802414aae04f08091cd Mon Sep 17 00:00:00 2001 From: Scott Koranda Date: Wed, 8 Apr 2020 10:53:43 -0500 Subject: [PATCH 063/288] Flake8 formatting Better line lengths for flake8 formatting. No change in functionality. --- .../micro_services/ldap_attribute_store.py | 34 ++++++++++--------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index e401127e3..0f373310d 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -399,6 +399,8 @@ def process(self, context, data): Default interface for microservices. Process the input data for the input context. """ + session_id = lu.get_session_id(context.state) + issuer = data.auth_info.issuer requester = data.requester config = self.config.get(requester) or self.config["default"] @@ -408,13 +410,13 @@ def process(self, context, data): "issuer": issuer, "config": self._filter_config(config), } - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) # Ignore this SP entirely if so configured. if config["ignore"]: msg = "Ignoring SP {}".format(requester) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.info(logline) return super().process(context, data) @@ -439,7 +441,7 @@ def process(self, context, data): if filter_value ] msg = {"message": "Search filters", "filter_values": filter_values} - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) # Initialize an empty LDAP record. The first LDAP record found using @@ -453,7 +455,7 @@ def process(self, context, data): "message": "LDAP server host", "server host": connection.server.host, } - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) for filter_val in filter_values: @@ -463,7 +465,7 @@ def process(self, context, data): "message": "LDAP query with constructed search filter", "search filter": search_filter, } - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) attributes = ( @@ -485,14 +487,14 @@ def process(self, context, data): exp_msg = "Caught unhandled exception: {}".format(err) if exp_msg: - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=exp_msg) + logline = lu.LOG_FMT.format(id=session_id, message=exp_msg) logger.error(logline) return super().process(context, data) if not results: msg = "Querying LDAP server: No results for {}." msg = msg.format(filter_val) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) continue @@ -502,10 +504,10 @@ def process(self, context, data): responses = connection.get_response(results)[0] msg = "Done querying LDAP server" - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) msg = "LDAP server returned {} records".format(len(responses)) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.info(logline) # For now consider only the first record found (if any). @@ -514,7 +516,7 @@ def process(self, context, data): msg = "LDAP server returned {} records using search filter" msg = msg + " value {}" msg = msg.format(len(responses), filter_val) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.warning(logline) record = responses[0] break @@ -524,7 +526,7 @@ def process(self, context, data): if config["clear_input_attributes"]: msg = "Clearing values for these input attributes: {}" msg = msg.format(data.attributes) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) data.attributes = {} @@ -549,7 +551,7 @@ def process(self, context, data): "DN": record["dn"], "attributes": record["attributes"], } - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) # Populate attributes as configured. @@ -573,11 +575,11 @@ def process(self, context, data): # may use it if required. context.decorate(KEY_FOUND_LDAP_RECORD, record) msg = "Added record {} to context".format(record) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) else: msg = "No record found in LDAP so no attributes will be added" - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.warning(logline) on_ldap_search_result_empty = config["on_ldap_search_result_empty"] if on_ldap_search_result_empty: @@ -592,11 +594,11 @@ def process(self, context, data): encoded_idp_entity_id, ) msg = "Redirecting to {}".format(url) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.info(logline) return Redirect(url) msg = "Returning data.attributes {}".format(data.attributes) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) return super().process(context, data) From d244ffea8b6c6ac837077c8c22a324195c28d713 Mon Sep 17 00:00:00 2001 From: Scott Koranda Date: Wed, 8 Apr 2020 11:45:09 -0500 Subject: [PATCH 064/288] Generalize per-SP override for LDAP attribute authority Generalize the per-SP override for the LDAP attribute authority microservice so that the override can be per-SP, per-IdP, or per- CO virtual IdP. This enhancement does not allow for nested overrides, which may be included in future work. --- .../ldap_attribute_store.yaml.example | 24 ++++++++++++------- .../micro_services/ldap_attribute_store.py | 24 +++++++++++++++---- 2 files changed, 35 insertions(+), 13 deletions(-) diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 8f0e74c8f..a83873a9b 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -2,10 +2,11 @@ module: LdapAttributeStore name: LdapAttributeStore config: - # The microservice may be configured per SP. - # The configuration key is the entityID of the SP. - # The empty key ("") specifies the default configuration - "": + # The microservice may be configured per entityID. + # The configuration key is the entityID of the requesting SP, + # the authenticating IdP, or the entityID of the CO virtual IdP. + # The key "default" specifies the default configuration + default: ldap_url: ldaps://ldap.example.org bind_dn: cn=admin,dc=example,dc=org # Obtain bind password from environment variable LDAP_BIND_PASSWORD. @@ -96,9 +97,13 @@ config: # from LDAP. The default is not to redirect. on_ldap_search_result_empty: https://my.vo.org/please/go/enroll - # The microservice may be configured per SP. - # The configuration key is the entityID of the SP. - # Ī‘ny missing parameters are looked up from the default configuration. + # The microservice may be configured per entityID. + # The configuration key is the entityID of the requesting SP, + # the authenticating IdP, or the entityID of the CO virtual IdP. + # When more than one configured entityID matches during a flow + # the priority ordering is requesting SP, then authenticating IdP, then + # CO virtual IdP. Ī‘ny missing parameters are taken from the + # default configuration. https://sp.myserver.edu/shibboleth-sp: search_base: ou=People,o=MyVO,dc=example,dc=org search_return_attributes: @@ -109,6 +114,9 @@ config: user_id_from_attrs: - uid - # The microservice may be configured to ignore a particular SP. + https://federation-proxy.my.edu/satosa/idp/proxy/some_co + search_base: ou=People,o=some_co,dc=example,dc=org + + # The microservice may be configured to ignore a particular entityID. https://another.sp.myserver.edu: ignore: true diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index 0f373310d..6d61559b1 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -18,6 +18,8 @@ from satosa.exception import SATOSAError from satosa.micro_services.base import ResponseMicroService from satosa.response import Redirect +from satosa.frontends.saml2 import SAMLVirtualCoFrontend +from satosa.routing import STATE_KEY as ROUTING_STATE_KEY logger = logging.getLogger(__name__) @@ -399,23 +401,35 @@ def process(self, context, data): Default interface for microservices. Process the input data for the input context. """ - session_id = lu.get_session_id(context.state) + state = context.state + session_id = lu.get_session_id(state) - issuer = data.auth_info.issuer requester = data.requester - config = self.config.get(requester) or self.config["default"] + issuer = data.auth_info.issuer + + frontend_name = state.get(ROUTING_STATE_KEY) + co_entity_id_key = SAMLVirtualCoFrontend.KEY_CO_ENTITY_ID + co_entity_id = state.get(frontend_name, {}).get(co_entity_id_key) + + entity_ids = [requester, issuer, co_entity_id, "default"] + + config, entity_id = next((self.config.get(e), e) + for e in entity_ids if self.config.get(e)) + msg = { "message": "entityID for the involved entities", "requester": requester, "issuer": issuer, "config": self._filter_config(config), } + if co_entity_id: + msg["co_entity_id"] = co_entity_id logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.debug(logline) - # Ignore this SP entirely if so configured. + # Ignore this entityID entirely if so configured. if config["ignore"]: - msg = "Ignoring SP {}".format(requester) + msg = "Ignoring entityID {}".format(entity_id) logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.info(logline) return super().process(context, data) From fc6ee2961a7c98116f9ead1b5c11c84ccd30c23c Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 17 Apr 2020 18:28:16 +0300 Subject: [PATCH 065/288] Add cdb.json example file Signed-off-by: Ivan Kanakarakis --- .gitignore | 3 --- example/cdb.json.example | 10 ++++++++++ 2 files changed, 10 insertions(+), 3 deletions(-) create mode 100644 example/cdb.json.example diff --git a/.gitignore b/.gitignore index ac511933f..6c67df01d 100644 --- a/.gitignore +++ b/.gitignore @@ -4,9 +4,6 @@ _build *.pyc *.log* -example/* -!example/plugins - *.xml *.db *static/ diff --git a/example/cdb.json.example b/example/cdb.json.example new file mode 100644 index 000000000..a64750b0e --- /dev/null +++ b/example/cdb.json.example @@ -0,0 +1,10 @@ +{ + "test_client": { + "response_types": ["code", "and", "other", "types"], + "client_id": "the_client_id", + "client_secret": "the_client_secret", + "redirect_uris": [ + "http://example.org/rp/the_redirect_uri" + ] + } +} From 1b7bcf0dd0ca53dd6ca4a1d1643934fd3c49a881 Mon Sep 17 00:00:00 2001 From: wert Date: Sun, 19 Apr 2020 13:03:15 +0000 Subject: [PATCH 066/288] Exception handing on context.request["SAMLResponse"] KeyError --- src/satosa/backends/saml2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 13349ee37..2c37e6a2b 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -311,7 +311,7 @@ def authn_response(self, context, binding): :param binding: The saml binding type :return: response """ - if not context.request["SAMLResponse"]: + if not context.request.get("SAMLResponse"): msg = "Missing Response for state" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) From f3b250ef28ceefa3a3cb4e91953ceb9e72557e0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20Vysko=C4=8Dil?= Date: Wed, 6 May 2020 18:09:27 +0200 Subject: [PATCH 067/288] Fix the cdb.json example file * Key must be the client_id --- example/cdb.json.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example/cdb.json.example b/example/cdb.json.example index a64750b0e..611574b5d 100644 --- a/example/cdb.json.example +++ b/example/cdb.json.example @@ -1,5 +1,5 @@ { - "test_client": { + "the_client_id": { "response_types": ["code", "and", "other", "types"], "client_id": "the_client_id", "client_secret": "the_client_secret", From e5e504468ea56862f9644be05ad27cafb26bfba5 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 26 May 2020 01:53:53 +0300 Subject: [PATCH 068/288] Remove mention of SAMLUnsolicitedFrontend frontend form the changelog Signed-off-by: Ivan Kanakarakis --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d22f425d5..8a0324494 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -112,7 +112,6 @@ Trigger new version build to automatically upload to PyPI and docker hub. - Add initial eIDAS support - Support memoization of IdP selection when using MDQ - plugins: Warn when AssertionConsumerService binding is HTTP-Redirect in the saml2 backend -- plugins: Add SAMLUnsolicitedFrontend frontend - plugins: Add SAMLVirtualCoFrontend frontend - plugins: Add extra_scopes configuration to support multiple scopes - plugins: Use the latest pyop version From 047eaebeb7f14c73a1b8e6b4088f4eaa4faae95f Mon Sep 17 00:00:00 2001 From: Scott Koranda Date: Thu, 4 Jun 2020 07:19:33 -0500 Subject: [PATCH 069/288] Add PrimaryIdentifier YAML configuration example Add PrimaryIdentifier YAML configuration example. --- .../primary_identifier.yaml.example | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 example/plugins/microservices/primary_identifier.yaml.example diff --git a/example/plugins/microservices/primary_identifier.yaml.example b/example/plugins/microservices/primary_identifier.yaml.example new file mode 100644 index 000000000..dbc13dbf7 --- /dev/null +++ b/example/plugins/microservices/primary_identifier.yaml.example @@ -0,0 +1,51 @@ +module: PrimaryIdentifier +name: PrimaryIdentifier +config: + # The ordered identifier candidates are searched in order + # to find a candidate primary identifier. The search ends + # when the first candidate is found. The identifier or attribute + # names are the internal SATOSA names for the attributes as + # defined in internal_attributes.yaml. The configuration below + # would search in order for eduPersonUniqueID, eduPersonPrincipalName + # combined with a SAML2 Persistent NameID, eduPersonPrincipalName + # combined with eduPersonTargetedId, eduPersonPrincipalName, + # SAML 2 Persistent NameID, and finally eduPersonTargetedId. + ordered_identifier_candidates: + - attribute_names: [epuid] + # The line below combines, if found, eduPersonPrincipalName and SAML 2 + # persistent NameID to create a primary identifier. + - attribute_names: [eppn, name_id] + name_id_format: urn:oasis:names:tc:SAML:2.0:nameid-format:persistent + # The line below combines, if found, eduPersonPrincipalName and + # eduPersonTargetedId to create a primary identifier. + - attribute_names: [eppn, edupersontargetedid] + - attribute_names: [eppn] + - attribute_names: [name_id] + name_id_format: urn:oasis:names:tc:SAML:2.0:nameid-format:persistent + # The line below addes the IdP entityID to the value for the SAML2 + # Persistent NameID to ensure the value is fully scoped. + add_scope: issuer_entityid + - attribute_names: [edupersontargetedid] + add_scope: issuer_entityid + # The internal SATOSA attribute into which to place the primary + # identifier value once found from the above configured ordered + # candidates. + primary_identifier: uid + # Whether or not to clear the input attributes after setting the + # primary identifier value. + clear_input_attributes: no + # If defined redirect to this page if no primary identifier can + # be found. + on_error: https://my.org/errors/no_primary_identifier + + # The microservice may be configured per entityID. + # The configuration key is the entityID of the requesting SP, + # or the authenticating IdP. An SP configuration overrides an IdP + # configuration when there is a conflict. + "https://my.org/idp/shibboleth": + ordered_identifier_candidates: + - attribute_names: [eppn] + + "https://service.my.org/sp/shibboleth": + ordered_identifier_candidates: + - attribute_names: [mail] From 2801eb1a19feddf5e8571a383368cfc30b683295 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 9 Jun 2020 00:56:49 +0300 Subject: [PATCH 070/288] Make the AuthnContextClassRefs available through the context Signed-off-by: Ivan Kanakarakis --- src/satosa/context.py | 1 + src/satosa/frontends/saml2.py | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/src/satosa/context.py b/src/satosa/context.py index 196cb6f4d..a30f67c3d 100644 --- a/src/satosa/context.py +++ b/src/satosa/context.py @@ -18,6 +18,7 @@ class Context(object): KEY_TARGET_ENTITYID = 'target_entity_id' KEY_FORCE_AUTHN = 'force_authn' KEY_MEMORIZED_IDP = 'memorized_idp' + KEY_AUTHN_CONTEXT_CLASS_REF = 'authn_context_class_ref' def __init__(self): self._path = None diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 168dddc66..545dbea6f 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -247,6 +247,11 @@ def _handle_authn_request(self, context, binding_in, idp): idp, idp_policy, requester, context.state ) + authn_context_class_ref_nodes = getattr( + authn_req.requested_authn_context, 'authn_context_class_ref', [] + ) + authn_context = [ref.text for ref in authn_context_class_ref_nodes] + context.decorate(Context.KEY_AUTHN_CONTEXT_CLASS_REF, authn_context) context.decorate(Context.KEY_METADATA_STORE, self.idp.metadata) return self.auth_req_callback_func(context, internal_req) From ef00df2cdd0882ad7ee51d6b3d3ba925f3fcc9c8 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 9 Jun 2020 01:48:37 +0300 Subject: [PATCH 071/288] Release version 7.0.0 Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 45 +++++++++++++++++++++++++++++++++++++++++++++ setup.py | 3 ++- 3 files changed, 48 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index b29a4f3fa..767a45603 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 6.1.0 +current_version = 7.0.0 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index 8a0324494..d365ba66c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,50 @@ # Changelog +## 7.0.0 (2020-06-09) + +- Make the AuthnContextClassRefs available through the context +- Extend YAML parsing to understand the `!ENV` and `!ENVFILE` tags, that read + values or file contents from the environment +- Add `satosa.yaml` module to handle YAML parsing +- BREAKING: Remove previously deprecated configuration options: + - `hash`: use the hasher micro-service instead + - `USER_ID_HASH_SALT`: use the hasher micro-service instead +- BREAKING: Remove previously deprecated classes: + - `SAMLInternalResponse`: use `satosa.internal.InternalData` instead + - `InternalRequest`: use `satosa.internal.InternalData` instead + - `InternalResponse`: use `satosa.internal.InternalData` instead + - `UserIdHashType`: use the hasher micro-service instead + - `UserIdHasher`: use the hasher micro-service instead +- BREAKING: Remove previously deprecated functions: + - `hash_attributes`: use the hasher micro-service instead + - `oidc_subject_type_to_hash_type`: use `satosa.internal.InternalData.subject_type` directly + - `saml_name_id_format_to_hash_type`: use `satosa.internal.InternalData.subject_type` directly + - `hash_type_to_saml_name_id_format`: use `satosa.internal.InternalData.subject_type` directly +- BREAKING: Remove previously deprecated modules: + - `src/satosa/internal_data.py` +- BREAKING: Remove previously deprecated properties of the `saml2.internal.InternalData` class: + - `name_id`: use use `subject_id` instead, + - `user_id`: use `subject_id` instead, + - `user_id_hash_type`: use `subject_type` instead, + - `approved_attributes`: use `attributes` instead, +- The cookie is now a session-cookie; To have the the cookie removed + immediately after use, the CONTEXT_STATE_DELETE configuration option should + be set to `True` +- Create dedicated module to handle the proxy version +- Set the logger to log to stdout on DEBUG level by default +- Cleanup code around the wsgi calls +- micro-services: separate core from micro-services; drop checks for + micro-services order; drop references to the Consent and AccountLinking + micro-services +- micro-services: generate a random name for the pool name when REUSABLE client + strategy is used for the ldap-attribute-store micro-service. +- docs: improve example proxy configuration +- docs: minor fixes/typos/etc +- build: update CI to use Travis-CI stages +- build: run tests for Python3.8 +- build: tag docker image by commit, branch, PR number, version and "latest" + + ## 6.1.0 (2020-02-28) - Set the SameSite cookie attribute to "None" diff --git a/setup.py b/setup.py index 0f9fb46eb..2377459ac 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='6.1.0', + version='7.0.0', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', @@ -34,6 +34,7 @@ "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", ], entry_points={ "console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"] From 4319bd688cae45118ebb41de7fa7d41855f83f78 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 9 Jun 2020 14:28:23 +0300 Subject: [PATCH 072/288] Fix the CI release process Signed-off-by: Ivan Kanakarakis --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4e80014d7..3fa650542 100644 --- a/.travis.yml +++ b/.travis.yml @@ -135,7 +135,7 @@ jobs: - docker push "$DOCKER_TAG_LATEST" - stage: Deploy new release on GitHub - if: type = push AND branch = master AND tag IS present + if: type = push AND tag IS present before_install: skip install: skip script: skip @@ -147,7 +147,7 @@ jobs: tags: true - stage: Deploy new release on PyPI - if: type = push AND branch = master AND tag IS present + if: type = push AND tag IS present before_install: skip install: skip script: skip From 74fc79a7816836b4af23dcbc01a870177f99738a Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 9 Jun 2020 14:28:59 +0300 Subject: [PATCH 073/288] Release version 7.0.1 Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 5 +++++ setup.py | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 767a45603..489e6c1c5 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 7.0.0 +current_version = 7.0.1 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index d365ba66c..c813a6ede 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # Changelog +## 7.0.1 (2020-06-09) + +- build: fix the CI release process + + ## 7.0.0 (2020-06-09) - Make the AuthnContextClassRefs available through the context diff --git a/setup.py b/setup.py index 2377459ac..3bfe6d94d 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='7.0.0', + version='7.0.1', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', From a7430c0b5c5e1350f0b20fd5fd258a4f6a02e012 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 10 Jul 2020 12:44:23 +0300 Subject: [PATCH 074/288] Fix SAMLVirtualCoFrontend metadata generation and example config Signed-off-by: Ivan Kanakarakis --- .../frontends/saml2_virtualcofrontend.yaml.example | 6 +++--- src/satosa/frontends/saml2.py | 12 ++++++------ tests/satosa/frontends/test_saml2.py | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example index 5ab44fee0..111dbf732 100644 --- a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example +++ b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example @@ -2,7 +2,7 @@ module: satosa.frontends.saml2.SAMLVirtualCoFrontend name: Saml2IDP config: collaborative_organizations: - # The encodeable name for the CO will be URL encoded and used + # The encodeable name for the CO will be URL encoded and used # both for the entityID and the SSO endpoints of the virtual IdP. # The entityID has the form # @@ -12,7 +12,7 @@ config: # # {base}/{backend}/{co_name}/{path} # - - encodedable_name: MESS + - encodeable_name: MESS # If organization and contact_person details appear they # will override the same from the base configuration in # the generated metadata for the CO IdP. @@ -23,7 +23,7 @@ config: contact_person: - contact_type: technical email_address: help@messproject.org - given_name MESS Technical Support + given_name: MESS Technical Support # SAML attributes and static values about the CO to be asserted for each user. # The key is the SATOSA internal attribute name. co_static_saml_attributes: diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 545dbea6f..752ff431b 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -919,7 +919,7 @@ def _add_endpoints_to_config(self, config, co_name, backend_name): return config - def _add_entity_id(self, context, config, co_name): + def _add_entity_id(self, config, co_name): """ Use the CO name to construct the entity ID for the virtual IdP for the CO and add it to the config. Also add it to the @@ -943,7 +943,6 @@ def _add_entity_id(self, context, config, co_name): base_entity_id = config['entityid'] co_entity_id = "{}/{}".format(base_entity_id, quote_plus(co_name)) config['entityid'] = co_entity_id - context.decorate(self.KEY_CO_ENTITY_ID, co_entity_id) return config @@ -1026,10 +1025,11 @@ def _create_co_virtual_idp(self, context): # and the entityID for the CO virtual IdP. backend_name = context.target_backend idp_config = copy.deepcopy(self.idp_config) - idp_config = self._add_endpoints_to_config(idp_config, - co_name, - backend_name) - idp_config = self._add_entity_id(context, idp_config, co_name) + idp_config = self._add_endpoints_to_config( + idp_config, co_name, backend_name + ) + idp_config = self._add_entity_id(idp_config, co_name) + context.decorate(self.KEY_CO_ENTITY_ID, idp_config['entityid']) # Use the overwritten IdP config to generate a pysaml2 config object # and from it a server object. diff --git a/tests/satosa/frontends/test_saml2.py b/tests/satosa/frontends/test_saml2.py index 3e89fd2fa..00890a56e 100644 --- a/tests/satosa/frontends/test_saml2.py +++ b/tests/satosa/frontends/test_saml2.py @@ -541,7 +541,7 @@ def test_co_static_attributes(self, frontend, context, internal_response, backend_name = context.target_backend idp_conf = frontend._add_endpoints_to_config(idp_conf, co_name, backend_name) - idp_conf = frontend._add_entity_id(context, idp_conf, co_name) + idp_conf = frontend._add_entity_id(idp_conf, co_name) # Use a utility function to serialize the idp_conf IdP configuration # fixture to a string and then dynamically update the sp_conf From 106962029c961af1748de384fc2590984b2aa911 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 21 Jul 2020 15:31:53 +0300 Subject: [PATCH 075/288] Add middleware image --- doc/images/middlewares.png | Bin 0 -> 64995 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 doc/images/middlewares.png diff --git a/doc/images/middlewares.png b/doc/images/middlewares.png new file mode 100644 index 0000000000000000000000000000000000000000..2aca071ad661109bc286531a558000196e3a9937 GIT binary patch literal 64995 zcmeFZXH-*L)GiDdRP2I^*uVlvMARr|WsY;VxLXjFg zDhLV00)!4q?@I67wG;H<{q>D|f8Fmro^gh8@4ebwbItn9^+a1!W#_g-+nAV`cB)>z zq{GCtiNM6fdSUBk@Fd7r;V1YDW^-QSJQGuH(DvmUtl*zqwihpI+h0_<=&> zrL~H+leLSjrH;x4CZ==GpX;05I-IR9!`X+fZ>!%_P_kc#f{oSxa;rkF1lzKyrFf#)Uq^MRLHBAgOG(EdS>6@-Ex_{Vc@ zer0&=sE}EzI*XLncJ~64u8aS8AzKg(D)j@T1K`8a=N$I_XcCS<&Z z-ALMC-|L%En2z||+Y>id;~d}JdN}3yK7~q%Q+zOS!*oeVl6T{Z$v+}`;zJ9+QKCDV+yEol{TiJ1jYkNXTfkeU+yA@Z0iJy5Irt~os| z^keipw+}^<*C!|CV3kiwOkVz~c3E=Njh!6m8dX4+cbb(Mhtv602GHauqVybVykGTHz0_ zm=eSK1?0lWvc#;=vRz`gZ?EC6BA4pxl+rx)_oEI*Xm|ZXj+=W9{QN@r_~hf!sXM=V z4R>Gh!PgK5vZu}Djh#hzUCD@GU9?*;;L?14DP#Knw12*EIY0LFN>zCswia85t;fDI zV|gJ*j_IlRE_$0AjjWorn>uv8%jynTk+@skewj~#INN9y~v8xoWJJGNS0 zxYmrrXO1i#h!cxdIJ(P{yFX>gv_jN?`N>i)%e2O-zP{ej?J$9(2F@DiD$a8RoVsWg z5GJKJ_ttRg)roMeJ{{XVx^lW4sksg3cl#5Q-QpctmX?n;raI;641cxk#IhKFlXV)| zS-0OE@ip}3p=>EZ{LA6N$8Qck`lKFsOV_{E@%mxbiO%Vu=_rZP-5e&OD$z?jDl3$J zjV_7)2+U`~AGwFCKIwFjTDzr__1x5h*1WcNM_*j^-rn6XH#$pNl(_4&(lFHi0i7`* za<4#IE{4=K;o79Ayq3;n^W2v?KF(Wzlj-&eAT)R^uj*(pF?sSaG2MH>#Iy(=-5X?L z!iY054O=iV$wo3Uao&zDxh4-@+_g4RMQLa-34-5SnOK?GnKpr6%-}yJ=0i+CW zIQWb0_Elpv6Vt&c=znHa%>6LXgR!lik*kr0x~!$Mqv#DQ=bP4|cO7p-t(fHR%7UMc z)~+{r?m9X+p=IwXoS=Up3w}e7#ZK_hKXJ8JIANrr&2!P&#hOP_^sK1(3B_$ZJUsF) zRw!AWOIOyLgMTTUxaI14TUJcW-Q8W(9WLtZVk34&Mn*VB> z34M~$$?twHS)(moY;U{TIy><|{oc6gjB!;saRM6X&mYD-U2ReSj^u=1Ulv%P81zQ$ zjHtNSpT0p;dFZLEmW!=57#Zqc@r*qE%fFtjk0UPz4ZdMAjFr-#f~6{MlNb9#HpOk~ z|L_|#F)1^tUOKOLmwB{fcZPK%mU|IwAZ-ITqnBTM%8^*#w)vg)w`+KeS9LTU@lh9 z^_)ICGYf3%9v&xIrdlu2I!pMI0z5Vr)jRgM!>yb+bW@QC6(u|$?UOO-0|x~EZJ}gR}4a+^CEr) zS6p0t2A?(9c;fcp=U0ti!=BjrPXs+Vl=bTH$dctPS$=k&9106cB7(~lvyo>bQOt0{ zH~EsqD;C>WmwA1+ZJD_pLHY0;Ws3C;sJc@S{$t6q;Q8a(Jv^VaD%DX+$E(>k9v}+~ z!10xO66_m!R9T7n{C{6=pvZqz`j12Y2&waXa+V>xwdQ_RXt=)HR z1@kSte72RfG}xPF=5B`R!p`aQI?EJA&&SS{rO2*Ke@jmYQY?`jN(`-aGJzDi5g!mY z66ez&0cu|EestLB&3N%Z?4(=g?BYmvwi9V(R`X{h6P%>&j;KEBZdzbX#t$cOuiWDI zE%Kh-8wtZr%8225Znm1%kGa(+Yec1$j+cx)R_b;o+jG;Gbfwaz<;yw2hoJvWY-O{p z_LzGFre8@B=4rc*%If*{VNj+!rsq)Inty?7`s*Od!dfu=qF11jb+9=9W z5mo&dln8vpWjHnE*sJg)-x}Qt(YM`x!wUm(*;Cj)A*9R+(otCQWgbLTcG+aD-O}vX zb~gdvw*p<<)?RZR?uKkr$AoZG zB|I|GxwsFH+orL)e6!qrqI0+sw`o(%Ne-Oq-cJSgq6~rHSJDNuxK0G_xT97U~*N#qC;l}3IvbWyQsc$3s z`kI-uXg;Yt%VyPo24P0rF)P=#pjlu;ja@`3v7O@#jrE?akzIPoY5q3yTQ4s#h;Y0R zdF~D9fko^%`SSyZRs-oA|L3yhNpgCqmSVB&>U>Xn`<~N?O{Gbk=67Ey;(HcqJI+cw zG;7226VkV71Hw#Ab#AC|O`5V*dQ z?(sZwPsRZ-oGZ+wyctbZFciCkT^vdoiJG@6Qh(X%g607HK4NrTRoH#S7YmgF#>uHe?-4LKm1+VC5u*8&2!%$c$iTzpE`6Z#Ey9m zzFOR@)F(qAFc*ZCMd7hfzuBPQvI;eRu(|R$mlbQ{RhoR{z@|=*ff#+1RY*1=rQNKM z&}Gl^O*ojhSG71-e`>d2xzqBuveMDBrGtk7v-6#=lq7_TtF>OeSE;wPcUMmFssxg^ z*}+YCX>vP8$7?p3K(%Kf@@QvC`ph&M4q)a8HnD`c&ck%>!UBW1QJU}irF)fO+pJp5 z@#TDd$qGfCp7vODh|9^i-Ppm4Gr96K=en1YGxlqHBK2{UPX4~EoCWim9_-}S&`JMD zW6T$4(3!d)SPc6W)x&IiA`ju@|FI(DMKHcn&Yh-xTt3CdH5i(i;aFQyc139aSW%av z(K-Mw&q`3Rw?d!2=4Hp1MeP`|O z?;j3L8B#I8SEJY*RTr#F6z&sxYG`9dGR}tzzn!Zf%ZPsKj^J8J?4ODFC{B!=@akF` zw%oG&ScVNeN_wWI*jV6mSzvLr+|D-JgI%@(1l@oxlHleDFH(RnEq6{AC=* zgGD+#yQ}qA>x*C<|77%uzsQjf7U{Tm;;+^gw*YxRyK0`=upA~_;XNMWf5iOFNdFP@ zFW&l}!I_oUF2)T(hd1M1WqW)3wuUq{meAwfOO_Hq2Y==C(RCp-1)gvLQJHLUN;W2TV};ApE8J&Q(w8X$QH>G|R{v zNh}D|vj@-<6aq+dlb*~Xx`MH&VYx3HE)s4eVRFg=;x9R%6-%BygQu$R~38{Hg=r*i6kLMz9@$M_;t;f>s!52c*2- z?(BMclneJ|+mRXfz%7?(o#WQERB!JedjZ{XTEP(}tA$aVjQY(G`*Siqc2dhjyq#1L3 zofgpaD{BEe8#W=w{tN>| z?(S$q>F|jU0nKUlgU(|m6q{S>33+Z~t!KhBi~wN*R4Uc`@0x})*qE`WIKn=_y}daj zXE%@zRt0>LBSGmpe^De9P=s*AEe4W1zdx?L3u~XZVOZsmjG6zw+t&ySEWUL6rQ*1y zVmn~6lJ%yR?XoHshdrw!NXvZ-Tgpz-d)Rn7)t^;v7c0~50QPC4bj7dd9%hRRacc|1 zMZQZ#>6nqGx;CY8=`pu}QpG;{e&QMh!R}_BOfZAC;At_jRxe{lGK&lh%`o~@i7VL39^@5~G{{~j z%|<(!_coVy&SZJ|;=ixouw`?V&5#pUR&NgI#xiM7=U zS~ihnJf|?%uG;~;nijK^u$9V9yIuX-U^&R8R@(5rw=G%rS;fhV&%5TId(4DOK(Yz& zyB+6k_m)P*3$1idKxOMGCb`*W%Ms<Up&O%#UYc2FSHVS~}#!G^!|0zVM9Np{2AkUFN;EptLryxWcda>xst_ zIVxX>88y7n(Y4Ycdsm|cxT7})u3TOlQd&tF#+x_|<)`L)!|Z8UnOgSdR00Ht6Jh&RDERjjQ{IFHB9Ew}Zra z(Mp%Q7q?qA{aRg`$j-h`*)iZdzkBVKt^sNbx+l(QXuxNDpk!=(6d_LFflCjm1{3W0 ziO~hHqL=_Bk2@ej)s$Vk^J#1Klp;J`wHO7rc1NtVHjW)xqKmUiHQ*5$dcqp$e$q4c zPwZcxiB-H+Ou>p+x~)!YtxbbykGcg)V(QHcl$vqhU9|5KX7be4x`3W)rPW`asSe{z zZ9+-0uvcR|@F0O2mzJw7K5x5%+C@ggIm%XMo!8uK z9_4ssq9&!M6sIWgm;nQP>0P(dXLp7vM=e8jd}5!ssE?uBF!(Y8b6vq})#3&1ajPhc zvyN(N+>^4@8Fg`4ftn8MYH1z7z2kX3a03ZumW2H|rf&JMYLGa`S?~E2bDg7?_o~-q z4S%}QYL8OZ>KCO&70hk?$iX(68lMqaqLISq^XnNhBO-To%6FBTPNXV&ne#;4%QjSt z*?p{{9_}6}LNk?`Dx`{FlAoItIc2uZl1am76@*+3?%Kc+o1}3tmj@FC)P68i*BCi4XGD!Wi z;EH^|M{F7X7{9PtP@QkRBNSx#!y9CQ#f0PDzFfz4c=PLnXS#R(qu`mtXADi=G<(KjT1_Xh!5RV;qwCTK0j_%cP#|v7B?zfB_2cz2Ijz0pR|l;16No%ZRwdNMFv#)DfI#Royf=AO0%meqkC z`h1^FNj@uO*v<_C0?M!C*ZO^d>wxa<)yIj{ z8NL7|44i}HMqkwDmSK4#c`}ZD8?J=Z5|6f&S`0yvKhqnROun@x%Wmy2z#|UMRaPucX^= zd)j?)sBk&JmtJfJnNrUWxuc&v#?WZZT6vJRldYa0t*vJDr35rAjS{H_P;v&-WNg!j zly1L$G2F%6YplSE5TO>87yjqWfYeeZI2HM^aFf=+qjG66a_+~ztU&@t#tmdtkK7Sv zwg?>SB9X2WQ5slzTp!5X8Etv&fnEMpZCL6ymX-xFs>Au;H04g*QorW`Y+r+cXtZX5 z$5g*vR|U55uyfh94v=EQ$neK`PwpKU?I!3CcIp1!bW_i_Snt&W-nbaj29%VjGv8T6yiHh{g@LKrrD$BA8rW z$Q#JQaKgpnSx$g^AVNzGXfhLrD=$ZBYKV(;QtVIa9bm1!CAd#&bBe zvFiC@xl$+(?1_gn-y(KeeWDDC6t#b?8DZTZC&*BI#3MST1P^7nV^t}_R#S&Q2|v_W zY#&fGbZ&#k$oRD1sn!SC^z0ztQA_UhBIDUsVSYJ|v(UE1l(iW8H3(;qg|px4A4v?= z91j2Jvq9!Cv|PV1r#^~%t$X)s{QF>@+aUk0@Ap|FN~#ea(`SLk6Gss_1n=T2aD*v* zgm8E@e^Y&k64@&BB{8Zmk`WkWSd2oD&A~O7nm!ZZGZZnbXkFv*74r6yGC0j#udOK3 z%%p5&$-*{X2PSi?kEM^7PE^>9wHD0OdAPifYcJ!MX0Gk9Bx7Pq-6yihbp?@q$>(PR z->>y%<+5o;A;(XdxH4RKx)um91XA0)iy)vtzDArbXdfIHN{P!_GWYw${s2#?&ub`9 z*iJB}3O~aF=1K!RonArEa_XJ}(!I$&2-}U@GVB7s__AT_(FOMoj3HLK751i2XzA1j z>Qn*KYLcCP! z%3zon@B|tWV&x4BcK9L54cplRRDU4NJz0$e($piXe1iz>=3OT^#8EAFgudku{EAFzHAf9}zt-p8jb6KgLR z6|}#mSWEubr5FWVzp+h>{+7qZ9$F}6Dg+}i%AyJjkZsArD3&^K&@<8Z6sf~=&n)y0u?Am|#IZ=w{?{H?GI z8GICrq^{44x}AP`u2R8x{7wKm}My?5|(t))xI^422{`(49c zMB%M3e-Os*<`<`^y~x;RYwj9aNDEx-2ZFBC)I&p;jz|4=c3xZ8cmpY$b4eMV{FLNm zjA(1Cu3c?tbWo{@s6hFaQtK*CIeQ={3#TF`L&R1PN>KyhJxL0E*lb4wuT$3j#Yi<1 z9UJ_q5+g_rT}x)}b>tQ$L>*hsVAXN-p|GgwnDR zhF%_FXhg;2>Q4&oRhVHdWx;XdO1m*LblM02dU1Bk%9)f`H*q1e3;Xo*3ykspqr9QxUc#bc#uLXxy?D z)(nlm=QCH@mAPvRYvcXhibhEPqOjNjr_WjnGkP-s!9=4WQZnSm-zM0+;9mXdKjPNw z)qA_LrP$W6!oBurlG5ahpoXzpOiOW-?3hL=QLP&uw?NrzJ<&o;kv-zt=s|wh{%({ByKrcvf7%c&A!auBwPXFSz8H??6+h%_m*J@(L2^9Kn`l|oO5?9_sqn%B2k5MYtk z^jur&LLx1ZioRTHiS6AY2n^k!iPQw%gKJ>Axk#w8kBs6(+0=0Bt~Cs)WT8JQA{EuR z=b(DLun7h8&%&mwPJS~W%(>HTlBIZZ>#Oz*A%V8{M5FDSvvw;j-;Uvbr_%aDS(y2z zUl8mB?MgqenEl?2B601-tfaT~ zf%G|Z zcqt7=v0D|EzZ{5Yse(OPAkdaQr|P6fGOR~m4)kHhx5nzp4@>*63G`GPabFR|Fw&Ki zz!;05nK*LrX|vQ))O*KsC+=3u4VP-+RnimX7Lw+}4b6ur(dtH=lI~AgJy#~ShBB1m`XPhtlsCBY9!V29n{5`6tp?Xp!GvoC+fLm!uSM7>|jx4BDReOM^NKNI0LQ)`04O+|*nk~%6pS&Ikae0|IuO3TZK^Wq?GN>%~f zq+nTmLP3D6GPNC1gv>Wcymm6DM)>&>^A)?8@)SHZr8%n%S=ivCJ;~;hr$2;&TxzeRl*F7QT23 zUzHav9nEVMVYNO`B#d_2^21uj$=1Z|PMmLT#_d#+;jBxCRaF|{>so%%f`{!f>If{( zR=2cVMy$u&X}&>`O2-XFdIJg38}$9@G!Mxe#b#@)_;O36jj_!7U80r%KTWKPw9G8FxrVkw zhvRW|(((~eY6_*|{I~o56XNYzq_$#Pa>=?XM~0N%{*(QoEJo z9)=Mf$zR|RAZ_ge5fS2NM{G0?Fbo!s)oEOJ;rzDj5~p%MjH}>lxVIL6E*j=D<%!*Drsz&7r##6NU22V?qYks-V8kf(c2oua4%J27Tb%$0A z1YKSE`dqrxDC&a%&I|9?U!UKB+Dn|46tIp6lYMH7a!07`dFjK)^UQZ)lX+kl2;jsN zS~s0V<&q<@Kksvowp*lxH9AKzNzH`K&XBp6^2b1LZ>kg9e+sYEViI|=;PNEO$(;Gewf7#azplc}er zn&6*CV2ypo1Y?~x2P)41`5TglJQ6bb*#BB$fjzD1Bc(Q|5_Ujo@k@{_vzh!5FXrr!*HmWa+DrNKeY{h^GCNCs_x1M{JVw0-38WKaQ z8rZylMaPG$R!n_i#I2asx4vd!!MbqybqLZ9jkQFnAS&@lKeVP#s_kkBUScX@6-M+p zcem4bCLNhMd2jyw@Fy`#fYR0Kn7#(%N>UIICzf>h5Y&j*HYpQKhhCuNDh?s z7A=wA7T=)jbp3_J83D|xpJN6pSuu7g!Wbb;e$dV-MVyPVs9~#=x=j6XaXVa)&ve4f z+~tZb9NK(WRqqj~=}hQ`)*i8mBA=0>;g)5}R@CxjI1?OUxN^sSdDRZ`VR7$3sP4W@ zP2--c%KCm4um{G;;fFPQBWRT1eBGKTWAZY4Shg^+P9233Z98vg{3xol#2nM8kGbOx zyzsJ{6D1>Ai!0fy!`V%3V!jJxOm|ymAS`T5mOVOoXW#e1scqbo8a{WlTuN7uGsj3B zzsHz^A?I#%-gCT`t}_u%lZmS?AlThxykYEm(LJi6+cQT-ZxA0 zU7^X%Sozj#RKT8k#5wxp)~+u96P}dThb;}1UoJN}%6o21@2nqZF|*3z%f3=}T;r)= zWg>QE+RiSnP*{^kX>Jm3$S6fi*9YP6>nbRg)^MaL*(LL=^t+qKe1nyR+$|_#=liZ4 zVVkb)*xY93_5C5IEVFH3ALkoXZO_iU1~K^S;)~V4MKFfv3$_P0gJ`iB=qvW|3%DYbh zDLa4VLb2t|qK521rvWIc|J(1b`~d{#GwG-yc%Gi*dE~K^FKh-SBZ@ckf$Zhx>&Mz- z^zal^;Taq%FhUPc!q1fh1xyRJesmURj}o4$XfR{>G%7X|p0_zOp|@x~0YGh8g^o{f%-^e!V*b05%nOHl~%_|JAUwNZL;;UCTlZKY$!-S0q&|58OSG5GUhV&8z` zT(KZcy?-xvaqJ|BA;h_d##b62ffgMie6`QF)sVUihww zIl#=Kfm@m&k*2g=gE(*12K3Lgx0{-2XKgul`Fg3FqgenqF5xp28P{M|%6@ssoP$UF z8I-pFc#USWAriASKoyufd`2Jt%5EM2`${}FyvWG9FoJZrGa#!|4NHeGL3Bz&TtImZ z0tW|F%;FeH76xO8L%EAci~SoW&V)0D5JNxSjKF_055z@Pr$8KtBje^@t)GFYi185& z_AkUxBoH5#e)_^2TcfUlsXt4|qHdVbo`a!aM(R0FH?Y&aN?{PaRGJ(k1LP8rWyOWmC@KfB*u+n;{qUzX>9fQWCte{F6Q&H<$lno~|7 z&|Gi7A&^ah%6{-yOpw)rIjFjqc2F(F(I{!>EVqeHg{H-Q|DqKwBmr9*_Pn|E?ZdWq zP(Gng_#zKn^)yhv<9c)H_4hx-g*^gz-CYU&M_3eK5>X4FB1ey2Tm|xh!@yz6rdKIJ z&=?yjv-Flt^-jO%i%Z=b2`a)hG@M-@R0{4rce`5|10v@Ja&DuMwAUa3nR=a(c-+A! zE-h_n(_ZAo9%!&yxWEY3R!{*R_b*P_^I2|UsMd+ zik!0x0o2Uw&d2>Ry!qAwV>_-53jU#K09#}Y14PM-cryk_u1alLShsv(JLC}EHDEd5SvpQPG{sDGcMpr^7E(} zex>;y!S$4dFa+&1I*L>} zK~f&HPS%L0eFbUzl+3k7I?5pda7kd*H32%`SiEK}5eXMLnrBsbwV95S-~o-#hxIZ5 zp}L}V}^fy7q+i%0Kk@UBB!CMd_J-?oXw|E7>$_c-L z;Clx70n&iK_Z)pmkuJE2#(bO25~aXJ$P5cP-2fpq1Xgk@4TCR$wJy z3rq8wRsXeF9qAy^B6?hNS??-9Xa9%{n*qujYTwI(^*rP6h#Rb+q;-|6k2_ zKzQ3Lbut@2Srn!iN=y~V*RfVu%M(RA|v9xXl{tXt?=%wNsLA!y1$$$>u_ z9{Qnd2nvo256Ep))5;bH0Z)kA)&C7J&H_x9CG57*nz&af4j$fUi2e=5VJzPaoT&c@ z`kNI1ng4%D(Ay2-{EYLFjIjttf5k0OmpSp3$m}&kglt83&8~Is z*9BrRy5xA{ac~v`47$5Z9^R+;RI9WdN?K0>rkerX)v9t8f}!U8!?In3P@w1baErTHSKm#oSyC-`LE;JO zJn#bGhFh$=j>#L1wHFiV72&`%q9Y<^C%TY^lOPFZXy25<<@iwTu$l$Pj$e!!IK@Kb z`?*j%SDRy5;oMbmLkn{ZA|_Ny`Q?4euXI%lfZl3s$}~xbYO!sIv*R5mnMhK_z@04& zeuZ{Iy#$ENuPWM0hNc#C;~kCqhGzOyBdNpaeu9kw%@nOM^ShXSvWgZ>x&|cwPfw zn&t}^Cj9sVWvCQ}>X~fwXE0VQpqBr7!HxHi>I?9MR}_gB2q|lYIsNaQIhCijy)7L* z75Gvr`;Br|xx?njAovl?5@3-WN@=p|uqB6M zJ`F)C8Q{_Thv9N|qDYNSe8pxTX&1^MLO zJhEt%*;a*r3xUM%f!$*|WsEZ9aS!b%us7TpJYb==J{71dVF-cNT>XWMv$+IgFOp;X zjgR^+`WMx2{Ev^g1G+bfqMF6E@6%Ch@@OB3x}LkwQu43&Z1D?h>&GY=%PX+S$dC{z zr*Jx*oM}u1v6Jb43x_N3flioTSw$(}B9K1CAi}Z_ zTUf4x{~+pGM)OD-^%)z(__P|ewuop6kdSSg>u4D#v7(P`FM4!PLTO2(y%XV&W=8a5 z8D{IB%ezkQy*t}tlMbOpR_PZwfY_0*d9h6F3RHs=!{0`%mrbu-4au8^Tgy*>zBse0 z)A@uy)~LgCB$L;qT!eN4x+g+EFr@+lv6;s@IkW3^+CYgda;}e^IyYC^LQCT0dHE)MSPYh}18n}H*+OOTg-1B&ozdgKspNjQpNB1}(z)1tL;uMDa* zYDjnJZ9&OY_^d2KwN0rI@=o^i(1G?bfIbkN0Fo_zQ{lca3yC49&Z}Hw4+grEgpPFb zZ-YQ*OI|CwC?T|Yt%jV7LJ!btBf8-7l`Qj=CJeteLi?pG+J}WW{T%}HQI{H06!o1E zv0-s#ygb;|g#pg+?3PGaX_+6wbPpkS&HCLSRfl z3DfdoBpr)JHI-5nB7gyMIBK z*~DbRvQy%GJg4%9K_oS1u40AH-A*JmKcVez#J4 zd&d-pG+Fy(%;GfpDyJInW9W_)&f2CKBeos1OcUD7U9^pymPZ_KIpLj1>Zmixc|Zv9 z^c#so^V}<|i4u`2W6d({+EHhgI^iIZW42nT=#EUc<1%zFIX(_(Ri6utvhEPpV|Tl8 zn)e92$^N^Jrjcp`25t&{(r^fAg~#8=hqhF>9KDHN`Y7g;YqK}0UP7)-#4=mHYi)II zWR!R7v9rk3lLO)Q*Tn#8Jg;;F#C8g1R?ja+-jhaP;fSbv0EEz$k(~*O49$dc7|!T4 z2qnxiyF^0z2`?H7Ag$tF?{C5yz)G?S>M!{se7HWOZT6@mqQU{e6Gf}52&dI4;_Yw1 z(+T&8rcp1WxK!zC$ifI{_8gabdVLU%Rq-cocZ0{o+nzw#gNWOqeMDX}kQR!O%f@53 zJCY@Xioa-PNvq$Bs7yzGzV0*{>J~P#k64!=H@P*)#WLOrQ#7I;8y%ID%;tm!NqxS2 zP!T8~4J-Lob$I0U7RJH#WWU6>b+oZbM}-(v(>7|k43An|og?|iE-iJ=Rg@H($_X)I zLpOju@xE4u9-P^HV7|Fp(O^JhPh|D2-j{X4HNw*0YF1I7Te7<;wblmNVG$A>cV|p| zqNXH--U8#Y3Slu0i_I+PhB&iP{dR$`MKJpKsCSfMz$WQlK24oX#WAFkW^a58`r9hz-)3cL6%&gsB964EHFvi5%L~-SI2u_5lCXb&rfsNM4!nH;tI{ij@e_7UstF3gT!n_Qypi?Bky3#WrrxLq9DEn;ZVyh(`^v@ zPLobSNm!ZJ-^h0%3nuL#$Q|%?4>UI4+OK}E^1)UkQMu!rk_i9WF~F{d0X{kv5JE2! zTDP;5EiM3$jYC5F<;Lg?P8EvD{d3xueyJAy`~$Y+zv}dy9s8kFdfAtF3=5~} z?ekEz$MEM@7Zv2rU5$+emBw@v0)jew0$(8wlvxyXmfq>9f+~bpG|hhX+%GJbkGgZ5 zyniBKGc>PCb;!NC8Hou23rg4Rh6E!mhi2}Tx%f#%Q3o<$I#-j*ioZWPoCa=~PO;xh zL;7MdOJhZl>oer(o#bu~40F2hD&R6HbVzloSm@VmlXh%9E7k?v1rx{4GLJ<3l;BTB zRs8fGaCOd>fDVYT;yySNr($dKJ<|o**v#0LBl>l0*EuYXfdnlC$7=YcHn>LX-6CdIa#OPBz;|EB z>$65pHkmkQQpJ|M9F?Fez5K_^=Q=$QmRjKUGP{9#a4XZA;{3ba^>PY0+r<#$4-G)a zccKKx5Zocs2v4He06uI^hE$w=ge;{f6YCiwZ7uoDMsbR;=-3)Nix1!#8iyYv*`!VZ#klHG^1vH!zEH6lE02-= zhNjmBo~wR^OeQn87VDW2*j>nZatvOsKIY}HBHITUH!=4ln0ugGp_UW?$|}Y>7jxWf zRiNwSf_k-Zl_R+U`|Beo%_mh(z|n!$k#x88z;W%W*WOYG?%2^Xe9SRW$P=e83;_81 zcLyc|*=nH!^XW76NpiCDiwt3R;0;#I(M8qf)h$tY3O7~2!q`Sb*pKvM;R@jDu|{Bn z=|CpWE>W~F(R+Im%*6I0ODZgrfN?0y#g2ok;;v6X{{PW~{5cd%6kAg(5yv~P9#yf_ zG0}uoQ-C1z;h=WCzTNdrB1OB32Xjh`bgMb6Zwx^QwD_N7qU@j-d!8 z->P)nhqX*}=lGQtYFtXzZ|wN@b~h&6*0890D>geFzgI9(s0?=n7))_-MdhIOc4Y3^ z0>5;L%;I3eh;HquwcO7UgH8@D`DhopHd9~_mgl+u3n_e_#nH+U4TImhd z8qdeRvXB{YLs;VrjNRE#UL6*1Ok}_%SWxcaArOAshtAQ{2bPhO$xN2d! z5DEzEXvucqn}1@zON)*TS^!IsaX|L8IM}ef&zFHePFl{xLKw%J@NY&swo4|?<(Nxt z9KvFJpM{w*_=6F$7F+se2ceB1zVoX9MJ3q<-)7H$h48epCUS2zI*LObr4!EAhBJ~H8}0?OIE@n?qUbccY$D`Q&f@g_UP&Bo zt|-hiZrK8iw8b&UqK?Aec>XZ~_c#RzacpDXU-AK_%a#U}12tl-QIZJTZ?lP%Li~Vb z{~aN=+OMm5r24bHO18R$8WAFaO%&8Sd{|7r)YlRTZv30#)2{C^d+A8Eyc!~5(q4gZ zw!NM!%xqP4oP38vxM-w4Y{*>F+go5v$y7v z%IQ$nd!n>U{ZjpF~s-gibtd42EdI2JS(Y=}`IK|rKRQ$UIuL8K`tNEsNVNmpv< z#aN;UDs_MX5}LFDsnUmNM9KitM(U`*AZ4V3l!3d?3^B=F_uh5?AMXFdUB53`W_bI1 z-gEYT_VYa3&W>c(@Yqk+D)M1!5B93(z3Z5gHbOW)8|%*TKx)s@o9~_4nn)s46{}TU z>t$UtN0sm)^VE=rF@Xt8?$F4c6?S<yW%B7 zI$@AV_*GcrT3H)VjXu|m0Bb{{bzczql0+0Q@$2O>EwJ` z+&~k~ZmAtNQgGOW+>}|JJ#bV7FAwo;s~pKo1`{}?5yb0)jpjS{d+>eq4{#3$#MaOB zafAWXr8NVf8n#E`Ld-vX>A<-X!l=3nHvg(p34npP-1K9lK^)FM6xZ+2FYn3+DmXC{PA={F!B#Pk-=f<#XuD z%|u@{i^bZI*-`4e{FQNKREmcRx_GqpZ+DPFhD+W=w5@ofj_VUvteS-ZIYTCXmo&?v zeG}J9up7xl7>!qrLf>V7+^y=3(~&ReOr*TUNEt`Z_s;R3lUo5WRX;7x`64lAY+SZA zn)@yAn=0k$(dHxNj8SO~Pq)Fx{xM6_i%8EGDm!mo17K6;_l_@rYb`EF$EhV<6K9tP zGWJS@l^nl+J_+T&iLoje681?6*s!P_Nk@hl1*H(5WT5B*;31&={RyhV7hU<*IxInS z<pxbpI`9l!gxtPU=iYg; z<=Wx?7rNOmLFkkTA#a5jczVj$U)}Swz+24;cUaRqwCWoyF=Xfd__48~93K*?YGrW* zmX0s3nPcKZQS%hMxQ3NfQfK)Om!@QBQJC(fvO@m+`~{z?H7w^!g3-u@#5i8VnC}9o zJSV4ZCEsX(*V};#_!5#%US+u{2CL&?xt^VR=d3tjms6o%0a>P-1F-79HW#ie=BzHN z44DE(MD($Fg%IA&`9$KmN|@paEyARizwg1PKG#*VvaENDIj8?onl+Ac?E&+(Ow#9M zA<gK)Z(msn`vf3u){StKBjafHHO(r4X-%l3s;B-l0 z_L~AM&1_nc48(Mda2RkUrR%mMt&Pk%OcN3{ z&5nu)Q=Gf3jtAOQ`BM6~Rfy%M)rRGP*6!q>kVCzu#q?rr!;L;5KPx`cY>o+++ib9%wQ*(hlojTe!6h!)E6{ocq3xP)8h%V(oG!CUp_n$M6VzA<0C zjA8tpO%QpMR7UcPxBB87TNDcduY^I(Cy_qr)0Hb6t;D7+*rv^`lPpbxBRTl znsvp>RKAQw56BVfwP}TaNFIq-phnaHV?h6DK!QQ$O$q6{>jL+2!VK`^*ztey=>AzX zW#v1l93%WA6i7JTBfb@c5M=h(Qo^bsu5Aj(gaNdr?BcgF6Ur)yFv5oq8C3jLxnk3Hm)%4^}5)e+IIQSo;Z}eNI@N9_D6-ukFd&v=vn+8*jG@|6}hhSr6bu z=igubR+d6h_AoFVwTE7g?EB}5^Z`xy&*S|2+5J}@=P6tHvJ~`&4rt;X9UXInoxhEM z?~Xze$tyr$zrDaM4a{)mhOb9iW9Scvq`Bs9hXdzo-+Lgoh0oc4b*w-RUt!4MYrEM4 zsWhVx-u|-}XFIxSIoiJSoqD}GL>BIt0>m|wn1p=xYS^%)eEE%3J87I5MHtQr+08q# zn(rZ=8)-1_`+2ohBKbDX^bS$teMk-dNbPo^UrxL-+57C7aP*_}9KQQ|4;)DqGuRQj z{-J%RS^cP5}(;GeR zx-BYu&na1tOD)Ca^Ca*y@>fG=%|e@28zhuF7IT1fyls!fvl5Au2JGxEeb3K}P5xC2 z@61-u=Yug>MyGd9LUifMs012_GlKY|`O1PNtS3djvdtpH9@CIK&YT!dFKYHgFD4rB z7rElleDbMJEO(=npws>du7)K zgLa@Y^COSPk(kcaq>dXZQkxxE!=ep%o@pxv>)0RnXHf|Q-i1Z83fnq+CwXhwRiY5) ziS&wDc{Y!NzW=RM6vxq_i>VEbDgB4O%^CB!9QNQS$1t%^TpqhN8V7!d?nL4#?Eh;V<6V|nacPe`5Bg&tE7CdAdk1K1{kzLqD!g1rwVe8lV^jX$Tgn=|ZZrMBQ zKI=8$@quSBHGZn6H#-JwL-&QZhZV)#dsKSOIW?Zl8<-@N|Hy+U95SE4+k4;J9r`|- zg57V7uL(K)2zPf1u2P}}Q|0w}$lZ=swUAU__rib0g*xv3Wqf?LuQ!Bk@7XGZKk5?v zWeLm^V)eA;Qik`hQ~dhj&R`D&X{B?=sBWgQcDnT6?jJE~99+&W?&~UC-W%h^6Caw0L*kcO4uzE3A&E|}o{WnOCM9;tp#@yfadeCqo z{FlZK4VOc4u)YLUQGKkXB?D>Tw$P256!14|K8axsKv zbuwc4mp~+GP10(Pb_M$o84g*g7D9TC3|yD5{e)#ig7t0{Bh;|t7 z44_zIe8gD`@9_L1$REQ~RZ7ofG)S4a-kOqNlc&yjNAYspT_vUXa?X=P_S(Hf(qVlo z?9c5Atg~rB8a_+}*6Et3HV9NqH!AS+$^&(!-Y>q)KGe12a$_TY*qFAc?@}*XEDgxG z(4%RK>I9L#u=4?VUiQAEX>cVJ9klAYIfOZ7C?lKsu1LB64aiUl-4CSr2NyKVhRYfV$IjE@L?DJb< z3wTZ%p(c9y(bKj^Y zyRO;8EO=v^_}K*xzh{zl*Al9-&RA?eE7fACCRg_OY*B1PfRvuqw5H|ZA1Ko?+!W0x zR}HQ?giZ`D=J+#5DN;?RrKWA7!b$g@HB zEJ(HP63^fWd{aG zZR$YT<4X^Zb`ZmhujV|I|!`x2V=AvMbzfhy3TnsYBMPtCDx!59y^Ip>lru zxr15}Y;IQYx1@yZ4D6dIJ^!5}O7JyWlSq8AvnoMwY}T2q^Mp9&;M%Y)81B z^a;L~ZZDEAJLVUCi-2|=xEi=m!6W6eL!A5LA{E?$n+3*l9;3VQR_~Ir^&AojkbW8R zRk@fk#&ox4U3>BSm8jJdKZ=Hsy=7N>SM3+xZS>DQweOhQ*hat6TB(aXN*-fhLXCrc zhg4r~@a(Df%^@KdeQ1N6_M929s z&8LTn-bwu5HVOLyHj(_2WZLk<6Kl2UzUERd$L$jNZ{KC+RN6iqJLInYkLPfOe1V>L zV=6RDC8+kGiV}S96o4h(fqd|=<>dtTr2K=9eRqas_#O6je>(y89^z1kBJe5jvFx;dympzk ze}fYa9D*oNLTT{No9<9)5gXK`DwQ=O>WsPTIQu=7OVSz)}eo*nT^+)`1Ie- zsrHtP8rtEdL!Ww?839oJHcjRQtJLdso}2}X!FQ%YkKVNT$mn1iP`(Zv8jq3RNHiHU zYFPFi;)49Vv7v)I=UZ=u={QRzg!s9@d@PIXWAeeVW$NKGk(sdY;X+Se^)EIUy?=T= zedN&eebSkVhFe#eR5!+6&@U?9ZQE(Nqol0=<357)%#e={W_ZHm(z@Q3&O~&^X!1N%bo4UFk|KsPZlEz%9iwy?hl7s=N!ckH}^yMnQn5T_hZGKU~Tc%e-3ey3BVVFaVA)=fy9@z%8ooEoGdO1PB| z(*hx2;+H+z$Ij?2bzy63{h=dG@y?S~liyce4q0tCV_ubucb)?vLcpO(u-ou)A<;IW zwaItBx7Z|M+NK)32x=xji?}}dTt&v`rYInP%Ki;bKwfQuXOuOyLC)5ocznM zk%-DAqzUG!n^q(C4M+bD z>9G5us$^e+^BPXE%-LknS^EH|I{tn^8EqEM#lwr9CpJ!xei*A-~@;CL=3~ z3%V!p{=zN~uuzNQEY*zAYQB7@sU+r7nqfy`2Jxh&z!r^>lV(ec_EgTd;bB93CVA5ipDoe08++1_{Mzeb5gF1J# z*xoycx2mG~%yu=7nAz6n5AwAbx3j=7dm8;+L_8A=*Lp? z2K3ans7oeT#A z(@BSn)ztOAAWwK_F=bm7d6&c*)ocad97dL*LbrZs=!i$^QESWI8UCa)hKkRX?kwEj zle?-q`GQqV?}drTcjegc)f2!$Lcw{aFPraI&kD8uq3RPI3tdl4dYvj8V|GsnAO?^Hf%BDZwbFBp*V4RXyrKJ0PLpYBT;t4aj(61gK9{8-h>ye+!ji5*dkt>XhahmmV~ zQ&GE}>TH>|u=dgo%et}WeL{?HnBsx+f&0q#|J*6}aC@r&rd}u)W_QW0#HT$FA>5XW z{r!u_M*Xa7mx~}}~KSgNafG z#;i`!i&BS!8u*Njno-1&?wsigY$Hrn&KcbbXT5R#Y&jhJyVBx&J7cVp_=voI^Px*2 z?xg)a{F&$e9&%d>;N^;b7mLjxzfrk9~;GBY5TJPCaLA?(-p@y}hC?zK)=+6xjhfGzk2|l7p&)S4v^&?cQ5|{e?FiSSfYAdE8=jIcSbww5#vh$ zGpYC8jQrgLE%$yMZMXC8P%>E8HT|O?>*+$*5H7%MnK|p$0VHgMZ*P)T@c+c)&M$Gb zNchoO#7aI5ufQ`P4vAuux7jN@zjDMa$F?iC4iL2d`c5UDL(XxDB}SFFj`7c@8@P|Y zn@d=l{R?J6cMkqSouSW8qdTz(6w|ZvAeo-5RqQy)ejj*FxY$PzGLZ@<_>qctoqcMZ ztng~}OKIGi{w|Oc^xS^gU zSL5f4?87J8Uiv+WkkdSCONN@~LjW6955Qa*``^$-Kd zc_1E$dTjb9{O^GiftrNg@g%;EW0W=h-x~yMIeMUe6bBxXq08hw-NvI_(8%J{OV@G1 zs;Q}?9(uI>ytlmjn{<@zfxA5_7|VJFr`9)FX!nS+R8rqYwMat1Q?N!f0Ji`>UgQnT zdGpVkC$A4hsg?N}so~qO@U@<>)oM9{9zi;6%Ri{k{cT!;e+VEjvvb~HoPBrfKjkLF zEBx=?G8Z4n<1@KIOX;t57LUB&d5xz2PMBZ}ljGph}tCCv^2LF1fD)&L*y5Y6hKOe;w5fea&{=2taCX#nn zehn1h`8HGhuWu1XM%-!bf87L`rQ0rGwdVYGBY|=o=VGR_SdR?=>+#j8`Lv?(HOLxn z5mMnl7$$#1RA{X@4DWl#uCjkU>0H!7@&9(qZL1;jEr zBl_aqyKE>mN^>u)AaC(Av7(`aq92rIlalEAop{_Vaj zoE7f^>;hT24sE%}sb`YUgf!@9|MCKeE{7gX2Xh^JUcMMeUt)~9I4I4URN4`R^|NjQ zVBUQi0CtAPMFnTAk$Ml{D@IUW6l5A?4Cy%*wN}P-`D!!eXomh%2$1$GeBowv#I1Gi;I-3T<(8q?4 zB9nw9VFJQor)u~TP&!&~ct6r8u`d+BD8`{TX_3Wk%mZnQD`VR>k2hE!Y$dJBTVALF zRABMT3Zf*EYRB~faOjfD_Xo_hAzZJ%asG$FT%h9dF%Y9>$f}Za2&O%!$wpVfRsnxPj4g4&G0m&o zH>=y7IZf@1H%aj7(k>L|=zHcYM)l|*l1~is`|D;W9loT0ex6S-IkUzvTwvf8D#ghr;T40>o5JNl6a1{wep!evp+@d(*r*F#x9&~ zpqH{cGs)rDRqORpNBuYMxa;z1YkHLvbHxwZ&Y_Q z5}b|aJs!`)Vm@UntYb3T3^3FQwn%-^EW~`H{QOHpQ zqUpss0A(Y`PwNo=ytliJpFfx42icMg1CTJs^!!FgPr7`^3XfxrfC)3}FI?y5El87!eDW21rVIA7;3BdRdI^7eSi{V7Cdd>> z=R4MkCq+RTZ@Z@nwRrwhHt=Sgg4)rt{WD*x;bi`#)ncSNQ_N;uhSuXv!I>J|^h*!LD&?XA54#IE}dX_km- z^d6nW@O5HtKK};jJcT#l#=mhD7X6XcsND>M(7bM5b$R_9K)nI3KqZ z?(Y7DnqJL)KASJ0y`VS8agH(V;?UGybY@A;K(P^KP`nWq5}-(h>;!Z*&BbJ{%89$X zr3IQBrXLeBjPx+X@=g7zv5vMN!L9->T8L1ZWLLBhkzwFyaYfYf=sK-{Q#TQtsh;1! zORxjx2M%nw6EMjT(@Mf8By0Sx~u#)X1S(nS9kL8bd^}AAEr7JM&rk^oJ_54Lf zgfHI!+is06(==vLj@EfJiZy-P#jD+QRUeNYb6G02uTf&y=nNBup4X@8R@_wX&XKlu8gmQ%|e;+0)mj$D^zT_LN(`h&^o_?Lu2<^tkvTEDz=0x)%FW z)hmNtk;VGpd(u8V>#2whzAJC{wkBf>)ZMDa#ulFz@u&E54Cq>qG)-a?hHFfQf%zty zhT4}+_0^S3xyNgC&KlOwNV%oa(v<^e)xiDGS-mSxWd?zbp1a@kuhUA{Em3sm53DPa zZ@kcsAECyFc!P0Iq_Z7y*n?uw-P+k4Rqb_C`!}1@~ zD`IY#an%(Z!_4|!%f5a2=Y3u0c=;?4cKPl!R44c^T`USF^Pj3s(>BbG5?n>}<@l6_ z7r2}QHoNy0PV*nTxT~Y&ce|W^j-K7p?#6%0xV6deiL;c>QR3^2|D!8&%p~DMiI{xb z=)&8@p`3h5?}*Na=nsX8lv7F3t0QL2%&BF|AR6}buui;qww|S)8mVHuaw!G`oLsMF z2BFRsz3)MXyi0kAz!0Ggz)9~&mmFD-Bjaffg^fMkBR$y(wNY2loI&&asvba|f!rf) zJbv_c0%n$nBd9^CdcT};1<%U6qPNWt1#{nGI39MN@yHhQf3xT1>{eI1%0HLCzz7|k znQw=Z0C=C-U{dR~yAHP_m&@}9L zQduefg6ypHsN^;7>S2i81WX|=driA>_@7Zexdr7_jYeAkIvq0H`9E)oe}_d{S%Cy~ zjj*m9^)ek~0>tXhoPC{bmlT{%KDSe`Z9wWKGOmw7+N$BYkz2zPKDKz@$f3k z`bYtWZCZh1f};?%t&;z(X2ue6p|$9gn|zzkWPKrVmO5RDg8vWWkhu7PNdoW{GuI+$ zn*!|abq){{bwc$!7QyvU61hoTC%pb0@Jy|z_{ZV<@3rrFc{}wS^7eHBeM}S+iZOY$ ze!9xk$l$}u^yA||S`c->L5vxu(?zr?Wp1*!nMW%tLcmjWPCTdjE%);{wXBqy#YQkq z!lqpcMHv^oTS%t{V@_}D+EKTs=ZI_bnUwhVIZ#sbS>%|p^Y(!vElh<#p>|H2WyvT7 z&VE3yxGt?)CuKn8~cSDyyI0VN!is5TGsiU-D0Ua!;(oR)O_gd78J~A;c zWYHH2M^4TnLY9`w=?QikZ$fw~>Xv5GlzS1Wfq#C(&eNAhE?$bVDMeKlHKAUQJ>St# zbpXX`i%0ID=93m7XLPL%-3BCwybBq^W(yZ>Y5;>b>)Ll8@rX~;yg)P^?mXQOBn`3d zbbpXY@sDR*&Aiy$$KBItVu3t6^p5A=pqhm%q!o;pYT>m5m(50!m+$mH;0uYC$_aZU zerdc05VL!_osjQelv~+4Mln=7L4%BRzHy-8*;b3aQc3pr4>?~z42Xs{qD?&aU5$*Y zR<{!g+r(Zb8kRU)${9?fULq7^dmCc;TI|ti+o*Fc%}!(eVP*MxjdhVvtCHOhcPdYi zH^f2#z)9cCxtutXz{Gl>NT(q^xP?A`2MOSU@u?x&Q3L9UL|*R%-#MYAFru$kJX)ET zab`qa^`H-6VEn>l=@rk)tv8CRTF)0+ihIJ^j6(;zx~%-o&Y$~H1%D8dsyEte7KI1Y z0_AT+u6s_DyjmcI1A+tp*Ni~jplldZl@=hb8Y|0&S??>bOH-mnKczN?Uv9(t z4jTlKOIkp|Ty2;p^)Ol~Wf`atQSWMk-L1l9SIs3X={~Cb-VwOzqZb-JI~0dc>6vyV z6p_GHqy(10SfGf$XzR`#bF`ELoenT&0GO&LcIcpa>lhyEqLRzs;0GL5N=CMEV%WR4 zjaSV_+uj>;d31J$*_G`z*uj5lR8uaJEShwOak;M1r&EWwQ~Si*!c}I~>b$d9tKl9C z#6YkNFVc?4ZZ+wqb}$aE6Wldkkl_iSc()%|1Pi+jlr$&*Y_)aV$05h^D+0U`cnh%vYMzS^r8EF+kG8>o$8S-;5P>i=ep`f?_E6NJ%rBc$x*Uy=xZz6MZGBk;) zgnO|nC!0*^C9T*v4rx6a5Qr4SixFa&z8_9H%b>DIa&Xe%QvQHC;WN8uEKVep|DHq~ zX=}k+%`&427-O}C-B;QToGd1Z$)(C~3mNjG?$(}i@$V|J=Cg%-+@4xXt(7xro0~+nFgN#!-Ax^%}6nRhzr` zygg+lgp=vlQoRM*ubSo)ZIU>qB%cvqvnvqIQ)<{AQxc z-IM7KnS=D7>qtonj0i6mv09pe|Ih~@?zal#KPC)U%zLpIb>_#OBpz?o+@oSyXQj%a zJe#f)Ar#9XOAYOL(g!Ap6mVpq`9LF4k@c-$p>W02V+zDL(&JE?uYZA!-&Ruey_xEM zg?f#|$f7>5*{J80t}LA=T{a zPv6&$J+gZz)xLE7sLQ+btB8pw88Mf5XUB1dKL8Kb6=lvs3WMXBDQ)^(P#it>fzWWd zT(}DgT!EaszE*Q7HxKY}9&iiPtf5H76H;Sss<~w-7)2H;C=v>(t7lL98jlzP6j;L? z5GF#0Wa{{M_LxkB`-OHPmME9bIiX2Et)jig2}%+LEeMx1q=O%Cp41&h7k(aH_-Ct8 zwh?3WPwVx}1t&=(U=-Ca0)&dV@%SRNR&lbxL-8BK57!Fm0#_kEN$xXb{sq~g_=hZs z?0DAWSisM>aER6T_`i9}1DpSYq{5;M3&H;yNyX>LHOZ^vA8ypoHfl%hdMKbMZ$EVV zHK(4)Z=+#i6DO6Ic_^c1N=4Qu71X5s#{`wzvbtt2G%ahd1-h-GFg!}ftNNOLxHa~| ze?IgJ_;Q$sm}qkz@DJqi9@mlKIJBgtJh`o^&q;p)g=H|hg%IEUqZV~=1-nCedE(h* zh{Bi81^T5m?2!B<_pA2eUu`uJ zb(VG^L}=eNh%8)yDH+j61CJWLjKuENqs=<&I>#RjNtX-@vtd*TR3hkpbtKRb8NP}v zAs}-aqSPjN>-*$njs`B3nBxjI9wuv4-B0itaMfH*-}57CL@4bR(J7xH_XDnVJEn}I z;iP3z!qdjytza0>I(W1^#P4mr&WDd&`2dKaNK`q&Xaj(=zL%QE)OSzbLW1HXYG54%xsr8&E@!!B!A z%G(Tun@+7iPXIn_Xfp?`zQh9Qh-y19D}bTwI9j%cZz6tnb==W+VKRZ~@2)^+-yxEP z0lFUh&>UO3CCjv;v;?Fqbp&K)umD~JH%9Ea1n<8w!;hcy_tY2Ej3EAbeT^#=jS5sd zpR{7TFoq6uft!y+bH4-XE}!V8woQ48oEYJ`mHYXr4e;wPk%npJof1HpRt2QH728%enVDVfRI&%$P-){m=6r;I-Ykds_urVxiR;4zAQ?ryhPGjWhT>xRi&T!HdRL`y z+0%xNeYeigF(2nXk8g1CdN9gzEDU2se zBSbIeOHn1Oa1r>!meccebIR&+Y!#sX)w-Mztid9bS+D&G*Di=cH{RR_7@gIH@ss{v z@6eo9tcJ|QZ-}y1p}^Y^;)RS7TOHZkeTN|&UrMT4d89jC@4ql6S)jr$K#aiGa=Gdp zyTv?K^#ZS+rJdK@~UkM zSV?1klp6*hw@AO8YqO^3#!{__Y%aoo{f}}R=`qKR#icEa)-H0bDi5 zR}21M9)C?qdU#TL-t5_Lf{a}ji-*w!4matwbw=uHk*R&1eZ&N?4g!bKsF-I`S*>Dt zCDK%s;qk8jORN#?2*byf=3l{{(OpL>su`a7w1Nh{DS@b;2)95 z`MIFamW{})o$V*UZvKDUa!lr3MXMb9&heiytIIsD6hG zIvX)0>9W)kX_;1GLIMjCQDBBTO zlLy`g$Q>G^*|MycDX}*&c%9#n0?lTQ=D$h@nT(Jn{Tk+#W(pDx$F6?mSv6m1tTkh{ zn~7Fi7VViB?8176D$ZS7uuz~DZm1lfT>0jLc zGt_ac9&e%lp}Y{LEOf@GNO|&Coq^0`)ZEcWkh~1`MG4`Ig8_~J#VzQp4l76Wd!|Ya zg96>rZ%WWV&mLKX4fX)Lvfht5S-sOQ7n{(jvw{i{T6Gt{*PR2`F_XNaQB6Z02pBF- zL_cjdF=&NYr`2aVmX=kP!sNKnI)~tp;WP*4oJ;C%)@1%~4#xq_4=7^c3nCg=VyBT3 zX=PZMnI@Iyw>s}%Wj_umec8rTxu=}3u?c|x+~zp6N5l_E-ger+RYe$kwQe(s2U#HY ziqMpo@|BG%rI@momZgrmil*-F%(ofE>Z=RNt3!rotW>Xb3sv9LO?@XY+gjweWVm&P zWosLW_yhw3$Jv-JbDJwk%-A53G97KW3$UYJ8KKszLl?ONH3MRVkTl(Q;pk0@8&SBu zgBJvZYD#91@jD-DbqpJMVAI(-%hwbv(EhYGFMm{Rgdog1(OQ)yfcy!9Ag(6;TGmtt zqkCi|uP`y#WaT5z%60Bv8!&Q=cQu+0l(#ohN$w#c^u>c;Ez3yPVy%y(F2qwl7MR9_ z)rAD{R{gLvj-@GiqB#nG z8|qiMLk21A@a6ozuhi8=vnc8li-&f&mx7u3bi?{X^`qw`+hubOsn<BvyZ{Ct`oj&-^GkKZOCBz_E=`o_9F4@pwua=rs~9 zH8MX3DSsx^{F=h~RzX;bQn=|pvC!(ZM9!AsNWr8~&moJ{LaVkDp1WVShhJ_$G>Y<$ zqcxI05_EhWrQC*?5d^VVl@o)OSE<(vm+W_Wng)9H6xpzYx5$&B zp3v;AOJXs(y1QX!)!G=bDPEO1jlX5GiE_?tOY6?Rd+^-RF=?mEvDHb z5(MYsytMD1ESNwWgdl#~GFK&#<~0>8kZ^L9x_S-IA;uE_s-x&7ERQxY&IT7EWURkz?KwVF)->1gyl!sM|k4PnmYid~IvGQ8ker05Av(@4J zzkKpP?*%cQ&<)g;Q156i^X+!udG{yfzFf!GeCi}_!@KykEJYq+Oe-5#tSC7%M9=^2 z7VEM4OwBNx-075O)&{2Eh)4-=UrIj=L0o`XJsryl({s5Ar>FP-xPcvPN$f+b|4WKI z0$}?v{@!m_+m{B)tQTvgoPLmGK>%=lsK?{OZWbZwv*<+exN_=ztw+;Ln04P-i?tV$ zdKR*>eZ?&1n#6r_XtsUPYJ-rXRJcpo)P=vsf*(XC%wx5vl*PdnZ6`lwG9(apex4Od ztP9+Ta_|89ZWt4qF=Uh0>qGJ7h3u#Z z8wsx7C2eebxd#xL@9$>3e;{j?zjkO&0NO*|K5qUFO?|~G#WpW`Lqt6k+Se4x!9{TF zg7MG}RPP?f%A@?W%yq_=J!He6k5OE5S-Lpf#`|-Db2IPf=8o(ib!HEJ{6n}YJ=c7< z2zZmzAbhSVDjq6pWZ}v1Y*hBW&vaZ%H?+L##z+p|-pxPgq#rS1jD zLf`(!2L%Jc@mLy?O$THhONWwK9LKS;*^hs6nc0V&`t-mt?a<%xxCAE}pf4ZGzc_^9 z5UZMs$dQI*eAcm@2kKlDn>1oamQi7b+=qY-F~|;f021kQ?)9VOsjXu@B`%)dC%Qw; zR2%>2!+m?Gf`<|0{TCN5SA(~D5WH_RZW*RkZ=!=_)UgqAUpm;6J3w^FqrWzu;6bG7 z!mx`d#-(u`e=;Q6BtM!Z&}wQ|3SnD}xX;IJ@r7Butk{h8nWq|k7}s(ecZ(;l!yuMd zK1hiO>!!hn-3Bc}^o&f>Zv2i9$JQVuxFU;f2WnZQ=5xE=&b&uHE-a<7jc4B!fG-6B zO}p&(t@R>-FU%7jNNrh~m-J7-fdiegc+OGv7m(pTcJg`kc5v6@eKHFeR5~5Dh`(xD z;WeA*_7zI=Hb@sV)!6qQ) zB_^fbiK&zO-X%dZq|u8E&C1FICFCbPi1*FP)owzqufiF1?7 zne-eC#lJ;eP=-bJS+%I6aoBXD zvS{~n`=C0ohV)Kl5C=Qv7UF%@`K zhyz5&CAIF)J&zWQo;|G$J}7X&8-f0hR|I#}zr_dr32=^8vC#8b|1UuHP^5spPp8GE zGyAC{k&<0MLx)tEij`o2X3bqOHebvZTcWJmBV_5FBPO)Y#$bFSwbIeeu`@5FprlB# z$w?Vn@%6^^&0g!0rn&Zgb?7ZE)a=ix_7D{rYWbqMv9Ux`kJQaLO_J%SUc_?fEXzcS>nluvw& zl>uGm7UvO$#_uA%AUKH}P9b{eH(z zg_bVA5*1o)ru?W3rII@w7BeC_U)3k(e%0~$Jx4`YrY+zz6tTqjbx7{ty+v_(;#Ep8 z_9>CiZ}4@QGBWngi%2h);eVo8N83oK?AE!sD~eZq)%gytP_tYg~GzI)YXSm2sClf_bfv zJE`6(!GcAswF=r8Fj zqj#~J&y~?(;?JD5JjoZRj>xYIl^~X>o99*T`?>$|vx-emD@YWnB=y-Q-=fyWWu4SR z_0zu*7NeiO(M-}IQ$>4WP8z(mT?{#9NnVV3+G)z9Y(y;`IJBjhb5J+4cgDrfYnJWC zrdA&G5xl7xkDcUJuUK7K(jdwtiXr|R&5U3b_wEyxJghPDcFXOMl+fA51$TO2kt2yJ?Rz zT~F|Gt)EAw+uiBdr`(E~JbR9Igs(eUjl)!$4p%`uudAIB60`d!t8mGL0~Wvio*e2s zW-g<#Kyt{bzM)8X*EUBpqb+t6*QFzD7OJ@N)9J${L-Coh?D?I136|k=*RQ-ix}|L> zUD9jfuDH+j-KEJpv5`35GX|=(sYCQRMt7_xRkQAanfEU_N}GuRLP_?sdUvYs23w`Z zXIUv2WF!R`j-^TQzpuZE$26Y4Zq>e#7{J3?vhX`M&UL*S z#cAVz#7bV&Sa3~UUKlH^ zv$yUWqgr!HVF>slYQiug6b?=;E}IwUjl94%pr_6Km~y4>U{AYEB}a#?@`%iW_V-gg z7TdRW59vCQYBPm`#NQQs0G1x+^D0iynY@+tb`eSrxlpwL+q2mH3 z>G+e4E>nlhxBnsE?fL%t{fKjJ*z6EpI-xrFfV-dMU0F}>2&$pqoRphPLb80zHFqQB z-98BkO@)-h zVD4JG*mKi5zVw5RcZNe*aH;RtkN2Y}r_*AFPIM<=D?6iDAskr(@{w0Gq>8#PZW9v;DYmzs;OBZx#TwSiqkC>Hf3XU$CzLI0=Mzy+9H8)$b^F*z5Z{v{p zsN33rJPTL6EyAVu^_Bsir*6OSHi>`*`WQh4lcuTAbki(;ss5}o$IU6ANI)OlY^+u- zwd9&s*+!upu|sV;7P~x49fnGPzNzI)2r}k6oWZzI;dYhkgJ-J*>qE?aTp4GhM?qB%v^)8_t!Fjw|`ll$u7e}O?abK=q9Z+wM#6LF}i4^Da zOcz zXOc<|&4=Tqlo8_M%-I`HC2Mi>Ygc8OAB^&&y9v5@zCMwZL&%F;Hw}za!d2C7^4uRA zmwgLa(hmu46o|bH^?I=xvruEC$|u=%$?T8H&3~4-KE*C4IXp$(OUcc!y^O2w#@)OU zR*+$abZ&3$`M9mku+rX`}TS8LhQJ!l?vC_d%cn@hZzCpxYe1ZOS% z9i~D=4ycpdOM>vX?a@CHk=|u<@T4VUgtOn0GWhGjtUXkA!r6BoxX-G3p$Y){ z6_3OqAFN1!CD5!r0*}CxIwG-St<=aqH2DMkSWPkc7TB_YXGNG_74fKR z%$?YhwaVPuq5szUt(_X}!RnlQ(&bxcBi2D?z?yw7;5Mrf0qe@qum4n&e7o4skb;h_ z?`N4^eY@B1pxS(>r(xf}o=EdKsCaI8bDe$HYZv>U2f8ND`G5RC$)<`S3M}|%&(Yi) ze?H_Gc}xN>KqFD1rf|56^@(iQa-jCmIt~4Qy`N}f;HLB_H3CM5{~a5ARI80~D7Nu{ zI!~-|%5U&`4?G0n+iw!bfBAdt!AJq3#Ekbp$0PJe0`=sM$GT_!H6&m&h5u~!?X%eh z@`C?7kpGO3wE^+p@X6$IVOXELL#Go3Wcu1&zJ?ZAuwt-9GO(R{;p*H zdC;N=VwcQ4kMo)Yo^~C*pl5#Mq$5-7Qsi(Ra``GhW3?EaC_aoZJZ+_g0m&2@|3+o# zjTB|haWHH*_6VSL{0FkD?MO{BX+(D;7eGEvg2d9SL|_3Sd2}e>3a=joPhvxoP9L@a z^@|9TM-jHbkgi&VEYv@lGny*hI)~H6^=_Yp=656N+%`_B8>#89`rUbIX6MD{Z~Txx zN2M8Td*`x!JtmA7@7AgQ^3ft|Np(mbdAz%af7&Ipg{6x#lH{M{J0_3Zb>oVoJPJZt%* zr0*Ygzh>}DFt62d8-_WiLjG>y3U|{M>!3I8^1xKDe_*a?n&T82V^uc-L{6Nl;GQx5 zlrii>hx+h{5BV-95!YU4-UT-i;R}yD;k6~yPAZps!}1GFSGCXkaeWVBj^w8 zBhQsQUtgk9?CDw0y5I|Q7df`NC{^2T{~zsrXH-*J*e+spU>pU((ZL3e0wN$K0s;!? zjH1${D7~lwI z``xw1Uw}F1?Ci7myWjG>&*S3Qx0=BTE;4uVtsuk0QG4rl{p!()v&a064BRunNnuL^JT;veM>9IvM zE%EaXk(vN6K3s8M*3lbjq-D$7{%x?DZyXc?9PzbMjc&Hzy@{{>t>tMY@OD01u~$h$%zJfgGd%ID@AfC9N2OoVu{i|gkx&=My_ue^C+6NmH5 zyhoMH1I^w6(^-~#N<$Ijbhdn~m;H<9H}tgq8_!akMenW?HliE6I-ze-^N=Ek$92cu z4lA(iXDQlwLpKu|bjr35--iR%xT~JK!*IisOYH@P%k{5)-h*WTUiL&*b_uOw^k|6eEovt0GN!9&z4Gmo8H`_JZkR zlM-T4r(cKo%d(*@K|1k!`;Bv=W@Qbb#(sD4BmeUD1_A%Md&M`MEy(-DoBIi|#@6?K zzxi98YRV@7iqx&_9a+a-pr2jd(?}&-w&dh5Ujq$f^#6se`ERJJ?)87O`hT!i#m;9o zad0Szp8fTN!RnI!Z{7Ys-(vWx)2YHcPok**?qL*wp341Y!|JxCTt~IM_X}U3i=UTz zCJHRw4xvWLWJ$zD)9RA+LhDJ;)~U}d&-euBxA1K{`tNMWEdYvs_hUG!-hZ|l@PiNh z5k4uinG@l4Umb&jXX>Mh`EL?Qt>B^F)y}?ojTaFY);%Jwo_6e8SiOP&+#h)N$W@sQ z34n%qE@8n~yLYNKm@khKvpYSU;MbZlj*l0%ci&+H<35ie4>`=7ug&BIICDY&!utBJ zQfsCz1Tm8Q>Jp5TsG=Zm6IF5yoL(oU%o={x#iLlm_|*rhs}T_ah?cdP0nw$oDcQa~ z)omFHm5lL4=;bwSLaMRY;fGQS#&jI$mks$*b^0vCoL>TU1sO0nOdzX^6JtTAk1Jn@ zM{1#*O{jaOI4zY2LHv(;o$6UjONAf-Sd7baq{N8mryY>b^U5SVNT$wx8N6J0BJ#JJ z_FSc`8#3c;07tN-48j2FGDw{CS)a2bp)NEHUWULC6`9d_^Jp5hv1J`u$tE{BTqKz(Hkr3V9s0{e?MH;6c+&?Q!%!=dd}=jp zcSSib`-E;q0}`QO0IW=(Hj6aKswcpwVcy`fplqj+{lilv|y_y(+)DxaNbk~Z~i&gbR^bo^1SVrF(~%ba8A;{jQf zi*e`X2!onC@gD~(VrwxbcsxS433WL(u!4adL`U(0`VQ1zRYWE{C+<7ts+2%z`&U7G6BJK4uXij&^y#csUkX@_rFp?}*CZ6QZPea)zBQsr8h zWQDA33<34DhCiTc`ZkiofL_9Xd@f}G3stDm5nh7QsPao~UJ2!Pk zB+24kNU;$dSpTf{KdzXrpX#<{lfQpHt5=zMDjr!7Ii&YL6!;^%SreEXIcW%%i6Lfu z09QIbqhuoyzREm3zsIE_$I13j$L-4vk56zLR7mcO&~1Ej&61LK{c@0?@xa0Tl(Wzr z`;5bo`i(pTAi;0a=Yk0z{QOCi75z&t8AaOfKz53sW@mb{W#)|S&AKMt`Vgwkh+{&o z9|U^T$0vw>E9lH8fD6D!bD}Rp7_?;0WE`oyXF25}wBeS7h>h-2}-<@cfbT!`mLR zvP4}f5M7HE+T%8V&+5g+%R@`8-)Twg^P^1RB&woTB--nc`?zvv&gPjT-Eo>% zO;k|?RS})v8+x{g?r2;+-I6M&k5R)U-Ffn-28u&N!QnN>g$?kUpp4an^27 zayYE-%zqSjn}K|N%8@z8JmNTNVy^sIcv?6wXA{O|rwd6DYz=KenLk8iqBI2)?H#W% z9~JSVx3X4Uj!yR)iegB6A)tDRoyZogWW&qHMTLJK-Q`#>zm0t8^Y>qvI23O3@8Ljf>T5jCn3!5$*`@88A!?~hIG?Nj%d+jEHLUoF8ynp(zt8(Z;s=RulF=peViN?2o0}IK6Xiw@)@W zMeHrZ1HBj9C@$y<%&=Zw81KC9_^Ly=k7*j+gxl zRsH;C#kx*wWe%f_a9GMF>G(zhXjn}I!7%(l*qYS{Z^v-^XvR7lMVY!V)DP6)`wLRY zjoz+&CtQDRVG;BwSfWNo4oKO4IXnQiRvI*GeYkaVQ~fKDi0nu4RoO^p7WIsUZ(x3` z?kLhPM*<8s`B}Tp&85<|+`e}C`;UEP$15T`n|)8+ic_@j&aYq}9}^vj96S&;e; z#An1GhLgBU9@&YqTi#4pXn{BGs33|xhOV|2%{}o8Skd1Kb<>hxbY5`uCt4^S7IV%W zSAAbPc#bRu0GG*EWgm35uQXK1NG@Uzd#!R>uqRsNf|Y~1EYGnIl*vU)modn#wRH^G zrs%*ZNufHe9?9cM;6;iB7!5&2AfxDLUrdeDQ?P+E zl|Ev7z`c35d6hKYeqkwVN$z1;UjGizFM%jae@+v}nVYR0GvL;ld7%+h$ZLAyd-JcT zhYtwv4?4?e_5c>909H|MgJ%^f^k{K&vj>Su^8gWBqOn@q-rO!+#}WT*|MWV?K!;_& zVYB%cqh^xH@d{lb=cuFS5^ntS2NQbD>bo7gR1iNhMMD4=jFrIc-n>h{xIKJFXNPf_krd$!+%wz(~TT|!KD2?>Lp;_D0Vz&jJT+$Q(OTx z<(iRueBPKw^H8+d(^gyP9HBP`ExorVRe9P-g-fV4Luq3CV3D!uH;XBT(y@U!&G@@= z`RC?K0n4mJso5!Jtm}IpSM1LOs}aOHwBM!ys62Mafy_WcBoOzK4?Bz>pwJ(y3E=4l z&7kR;9a7K-2fA=xmN7f$x0M>#On!p&Borg9j*9JPOHeVi1I5Ai9&M-z%<^RZhl&X^~2@VFcF(WSBono9Sqcu zIQm~ZbJcTYX*S9cSX&Sp1Mp28J8${5g+2i(u@am>#>7aJZ)}8azf!ld-sVnFnEo=+ z{36m90RF!T>lQUI`XpRCrYsVCHaLdyUGzW9cQy1_982BkMEtW`dx`02+1^JQ0$_?@ ztYREx0x?}bH80lp;(V&PPD_nwpF}J-ZHFG#QD$X7q_^xE+DTI1vEeX-axW#6D*~)d zt|=ESb=RreE92Nmp1;~jn3!v98n@4-N~2@=44>O)Z{~&63kx~bQ1{n8EZ{gle2DCE zAlrY_WK4*rqq9^%JFHH{@o^>D`PV#wrT#Zwc#a?f5%bgLJ)$F*6~?cyoNJJBiH5B| zH~nFEvkh(qHNSSfOj+c)iHRZZjBD*%si~^qE=yJ#*r>u|V0bDkV^ieGUv3KSI-&bm z@9Kk7KZ^f+=4rEu*v<4memqew{nUu}(aW8YLED}<8yvj3Nqpq5QyL%FZ+Lny=it`A zOJ(%PA$KdA1N&xsy%J)T8?a@w4e`pe3io>#=<)8%xc;)%MfWE9B0w^-Y=75K-9h?g z=Ne2rX_!+(F5H-8s+q2NMxur)artEZC#}opIGjVTC{`;zE9)DKnLg4V)v^Ft>?2;e z-ii0)Y^oS;jhFVGx^G#=%o`<)s*;Dwo0u<(txLV>kE@!G0>#Qo=E}8wgGd9GH22-j zQ=?%s>>dEN=k}INX$z+JCW*Y3CI<+|$yxGu7u@UTNXJl_LL%wb^5n9d*-{*lxe)78 zp0O-*qNL15(dj)$?1!inGQ(|;4M|a==Tr6-Mqp)E+ww=xs9kfKF~s8*{{AZx*eDzB ztMiLM4?R`Y@VSQ(tv0KD8weuSkb)1N=7`+291?c%q$%gyC%ZU?-WPCt8<=}O*gHJ`_|L~x4C3x~^4dCoCv#ruYy6tm!l|)UmcNiH+m5Kz zJSgugSxxg~=H?*Tn z`V*h+_Wc0k)i+IPw3(awxyaIVEfSnEx3ckQc-va7;MP53Q+w#C3^uZElTPEH+b?m! zBn%Z(MnCkvrzE!=pq=VrTxTl^0*}|pjcV_z4Ro228K$+pgs$f5iuZ2yQ_gQc6K@yw}gaK8&hw9z_f+?@<{aZcg+E$F}z?7P0U%85j z`!SFMm%yiR5k;{F24Cmrgii16y4bfo$MuGhT((@axc@Idlh;5&zL=78_eFl)cGZ0> zqg}r87OVF-$*c`5iNkZ?1HPAd?#2jcbL>2}fy(#hqk1cytktxJDzW0?Jjvuett+Us z$$HL}F=_V`bP0d8V&tu#o;qa;SV)$I%5;s&<=%p$jJ6*@z;B#mAvva_1~Xn zw&GLi=rP=N&UOvPk#_@>)J$t7_8m|NZb&{AS_12Su=6z;#$S7uL5;sN-2T1onogj! z#JYX%MuQm&dobtfH6QfGcR|m?mw`~GV2o40IT)SCOWbZ^X_1FMO38C#v@L|Zx|;^^ zlcP6G_YxznOqvu+7)p5!N-PkTAU-_MBJ~{ zp3_iJ8vx%o+YwGbFNGw}3w%b}c!cAVk+FNlPav>K)78w0J(a+k zLAO5qPr5yejzKlLa*}KH91iD^`8!2+)0@zqGU5xUL7g9}wQC&L37prD>|8&10*fC3 zHOyQ@8pnLyNt3EELjlZm^)b(b3RlEU=f;3CAsvB(3^c{C=eRIG#)O039?xJK0Mc#olA)JpBW5^=!@l~hRtLG>SdqVhuU z(>?YpyZ+-*hy8hfqnhkkYo{t!-7soaGXJm3*N+cISbn!Cq9lmeyhK#F@zP`Um^fY1M7-D^-B7b?AQU@KC8<|44G$ zh-$N9fLzOqnn{(wS{CDvVm(VLVOA_LAkukpch-plmq4l?R2+x$bX94?8M&2V>h5r2 z&yEZl3g#G0G%0E%7VMu+y|nuff~&I{q5k@M|@NtN72mfWcQOulhfdYeGiSznf; zadDgFN|gy=YZuTd)ozf8GG9`~J9}@SO5EPHA~E5m)@&<&UsXTrQK}Ds$-4~Vh?P!= zO)zFDJm3?s;+t223U8-FdiKTjwnaOQMxS|3^qDui$kY6w=b+O;Eo*sswx+8h@Z(*} z4uwgR&9^(D(ZYO%y09p)vNRFcFX2O9cy@=UYuJ4Ar3)7otczWz17oH)|K(O?uhL8~ z{;MO(bEvfNl3Y?Jwh;M0F+S~Kz}4m0Q40@G0jk z-I_Y{ED|SPn7ZcJI=g?k#iL@>%%#v1If&ITtKCn|wO?>3-90S>%$;${M4-`vu zKhPpvXp?bOM(3?K&4Q-LXRQj!yK=jmub|trI`x%Sa$9by@%P)qC1Mdt=!g=Z3gm>| zwLO`a8*hBr_lZ!h-BBm^BYoh`W!>5v8!yb1|B2rG$h5RQw2<#QI1oeM{PzVpFiHen zA*=&8kyu5azRfMqZAl~^Yb9JE3kS=U^yYZB?Qw+N&1}77lX|VvY0DW7&HGwQ0OsWx zExz|Xsk)&kp8IcZd*9jjInPE$+By$4w>ReU5m-F>+)msy;iCi}zRTp4+{V#ev2~H^ zqQcT?Rdb38zfuYAI^BFX$giLS+EK*?FsDnn-==C{iNp-XsK$Yml?Ns46i&UpcuFdC zu&YI$=Dk|AEax&h+;a+Okl_7YJ&EUgf~M?6-SS*>Ajy(nMW-+_$>{x}-{V2i4asN2 zmJcpe1EZLd9qvWZTvl)o8uzs9OH5}kJBrj$o+hfFO--AaCC`-D= zvK4_V%k#rXm74$Vfx-kk4Lj$aa~0Q*d~@chxm$? zD{;jiAP^%-M75ZTLV|KW4tL$JY|N)8#`sxq7m_(iIHB31!AqAId?HD^6$@tEUWoWtep;S~~ ze5{aMNujv^A#(JfzvhSLhfF#JC-A`srxiQxyZTZ^xVebo24n2b!wd}H6SsSy=4^a& z*ym8A+)W1RYVs+TUWdgxGqkr|oPegQ`BK^T=mPIV$#KP2Cx@o+l%ld)nRSwnBs>nh zN#PN*B*i-=?00S)d*egEq)7rOz}I4g@vKrlSF*8r(3HCmC753oV`yf|F^nGi8Ij*b z4vHMWZI>T!ma#XSH%Jhe2N$|Sb$GsMO46U6&)t}j z_u;MkQ+Uc;u!+O@LMi@=^82K^lu5Q4yqZ;@oAo z`Em04x54V1nhL)^cTKJyE-RMOkmdJ_!HWgf%FiH(qJ0Gh=yImQz2rSGr8iy2cbc|H^2y43lPXnS1aF{=a7f;F41bsKZ{tq z{4}8{J`+{^?&#hA)3@pGOwy)WHW0=a4ClqIw=Bix1Y)=fxl2XJTD1XdejMzww-Q3u zo+A2dUKLu{-I!pyyuiYm!wm665;^ETgQ)q%`C_ksUORqX*&ge$Rq_tBXAzvBiSs1R zR(8YcZ=XM(q@`Bx6rTU@3YO9)E@FJcwzaegx>|nSzW)aK-%S3uCRexV|GZ#!4~_BqvkcOQg9GPP&LzqK5ByhuFza`X|TBSPMK)Mx6g(;>Z0_${2c z7G*0%q>R3vS2|oV=tRecZ%xzGDYg1B+8Mj=zBM@;g@mlPSn+z`SMATL>G0pmE4v`e z*Zno=fb7URE-vEwpMEv}*1D)!f?=x;3HD?gs?Ybe;s8s^u~B!&w+5?a2bdwL{g2p= z$mg0O?@vKN_x~H_*O~usHve0js~h(JdJ7GF=WqY}>aN6}dzG%I zmqUb}h4^j`I2e2e2fpN5hb8WMl*ztLbQuLME5~_{K@s2V71RR)qZEQ_6rIo#ngtc%@o3K~h^9K>E1FI;%ZQrXj4ekDMuKJ)0=>-XWQTwRLVNfrdemi|!0=!Ez; zfIggju4)ybczheLtnbl-G3;E`DF4x`18V*QQ24+ey87wE>ybrPb8aAt>#}x}y(apU zTKrj!3dw_fZxCCo(i6~@t491h8wvixV>nL_7PGPQu2i+r zcwbcoRR(c(`I`|I2}4y-<5R1W!saW!2arShGh0uHd}BKiy5>%2a9iOLXq{(;&90^t zjWFwIHajjw6tdq%CR8%gsaipkqd*;>`^@5`$xUl+w@dRwQh~;f!lfwP%+7?2zsyhG z;>>;321x>r1QE}Bzo~z4h<1%E{DXX$c%5jah!r!g@k2C&Mo(q#=FV5N9id)>M2MG| zMGdasa(Oap1d<0{{!ZP|&MV7H(~beOId`+}X@JvTfOLuZG+^;VRA$*Mxm+k;JRR!I zsxQOUXEtVLxSlpgpV~p$e989E74JkD; zWna=IM0n++stBs1kQ(hd*z)|q5dRx0;X@4N2JpFeJ}oL04kj2kGqbNyQEeDi3`edS zk2H)T#nvuJSXt&|haf6F+{J8wb9G>4Q!nf+N1Q0D0l>g|mVrcKlPoJLZx%95 z(8CV(#;$cT``nM!4+Y`uayAt%UZf(}ilfU+I0vIx_!!QyZ&6fL!PcDRPHNxXU)v*R zfm^(xx(Z-cfwhao~#C`OKUBMK-PvC3=Z*xEX3Odia;>10W28u^g<15f}!W9?% zAiuA~Sf@|)CBX*7jg*^sGNiDvjibN1-zWNI#ejpyySkXN!>$Ev^D z|AGsrL4Yt9q3(~xnUb2Ey95DhaJ64TOLS0lS8z#z>UXMu>hmD>E{Z1(8HbdNWA8r8;FI7VDG+{=S}nUtsUfE$Q96254eLmZ{oy#!nmUpe=)=Y{CZx1D z1H&ewyUppM5G}$|Dv3GShhcO?-Zu-?V4Pxf&sQHDpXhFC>vv2Hh0XRi z1Bp&T(EYL-S5lv2!JygxUiS3DoJP-e3Jh)ixs!&*8%?*#-ZCO&68syx_!ec`6$8vh zlpvRP`nA-M?3GX9iHmQduh87On1kot{1}PnO-@?Rq`e11hJu7ni7rE2V6h&qk`!@P zux{4Y@}|vuCsP8aj?(*;7kv}`JLVR=WD8PiPBI(biv1Eq;RO$JU&v#XxWb_O-fh>@ zDwApowbWtH%UEl7xu+}$6dB^~l)JCh0^yHI;Bl&bcd_JK zk5iflN*#Mgn*K#Gh5sI$*gZzCz5(F`QI5e7ywjD}3gNJ1HXZ`M__~z`J4N;7wk%#= z+2Kd;*}PU?eb{vHVO#K>lHa=8^k3BK{~P4$6#aiSlUWs%*U#=$?9ltBCmJsYt83>L zRf$*%=zAX-nS6TdJN1dr9s%354ZMLB6z;D7=T}!A{U8DX6aV=!lf!sP)8dXjKnGG1 zl{M(|4b*HsT((*5!q8=ICpe{CJ|IR!qs?6=YfI^i!*ShV1L{cW6a^N31)cFf;#W&R zDD0weYzU^W+mdsCU z14-NlQ?%__cKGPl8;c+7MxW7fBz2n1=Lex_grruu`Iz}hJROIsQy$I8g&g_t`k6+0 z5^e2ssgOELW9j=?an^wJVJ7&gomGfv*@nD+m*uOk9VRbtz&s4zi@$`&fa6%`0KSUH zUF83b04Uia>4oDeF+Um}ehwbZ5?n9&mPje;vQRdWuK=%Z1favC#+&3mGiJjF)dJxS zEQj7BH0oLbu}&w_MlTWSvPC|cyVE`WZ*Xh7;o+#fsgqAZl}K~I>P#EwLPFl5IpKI( z=?3jJ3OfJ?2GgJK!b+-~;w)2Pfv>lBL*p@376`3)VUzUVOdR?8-S3KltIjO7pBCe* zzoLa28XZu5Pk{aMMzV5I$@0dZCIf3%V76Fn8=@dss+%X((=57C5lKH0jG-pCmcs7mMi6;(Y)+BBqccp4<^2>spVo3gXB zJ8o~;eYB9y|tq+!m)Mn{1@`O z)zAL~!{7hC>z6Z-z38k=;I;mLI>mIw7-asoW#^+F8K|xzP!G1gZqJ1x@rXYT& zt?fQFD<6nT+jnLDD=CIqPNeXhfjPC>ido#Wj7kMxj94aBGu8E9?P^?fO_)l7lt2G9 zZEE#XUs%0A+tJVFb}M`OznDZWV)gp3dmH^AjfLsPMI8FpYvEY8ul);s_~#38BCxzd z^iKWjS|RKCEngM_`=Jy%tYV^W=Bk_g<-dY8+3Mk6euV~ZBCI6rxgSxByt805Y Date: Sat, 25 Jul 2020 00:49:26 +0000 Subject: [PATCH 076/288] Added RegexSubProcessor Attribute Processor --- .../attribute_processor.yaml.example | 7 +++ .../processors/regex_sub_processor.py | 43 +++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 src/satosa/micro_services/processors/regex_sub_processor.py diff --git a/example/plugins/microservices/attribute_processor.yaml.example b/example/plugins/microservices/attribute_processor.yaml.example index 8d946f684..a20bb2faa 100644 --- a/example/plugins/microservices/attribute_processor.yaml.example +++ b/example/plugins/microservices/attribute_processor.yaml.example @@ -15,3 +15,10 @@ config: - name: ScopeProcessor module: satosa.micro_services.processors.scope_processor scope: example.com + - attribute: role + processors: + - name: RegexSubProcessor + module: satosa.micro_services.processors.regex_sub_processor + regex_sub_match_pattern: !ENV REGEX_MATCH_PATTERN + regex_sub_replace_pattern: !ENV REGEX_REPLACE_PATTERN + diff --git a/src/satosa/micro_services/processors/regex_sub_processor.py b/src/satosa/micro_services/processors/regex_sub_processor.py new file mode 100644 index 000000000..85b95b50a --- /dev/null +++ b/src/satosa/micro_services/processors/regex_sub_processor.py @@ -0,0 +1,43 @@ +from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning +from .base_processor import BaseProcessor +import re +import logging + +CONFIG_KEY_MATCH_PATTERN = 'regex_sub_match_pattern' +CONFIG_KEY_REPLACE_PATTERN = 'regex_sub_replace_pattern' +logger = logging.getLogger(__name__) +class RegexSubProcessor(BaseProcessor): + """ + Performs a regex sub against an attribute value. + Example configuration: + module: satosa.micro_services.attribute_processor.AttributeProcessor + name: AttributeProcessor + config: + process: + - attribute: role + processors: + - name: RegexSubProcessor + module: satosa.micro_services.custom.processors.regex_sub_processor + regex_sub_match_pattern: (?<=saml-provider\/)(.*)(?=,) + regex_sub_replace_pattern: \1-Test + + """ + + def process(self, internal_data, attribute, **kwargs): + regex_sub_match_pattern = r'{}'.format(kwargs.get(CONFIG_KEY_MATCH_PATTERN, '')) + if regex_sub_match_pattern == '': + raise AttributeProcessorError("The regex_sub_match_pattern needs to be set") + + regex_sub_replace_pattern = r'{}'.format(kwargs.get(CONFIG_KEY_REPLACE_PATTERN, '')) + if regex_sub_replace_pattern == '': + raise AttributeProcessorError("The regex_sub_replace_pattern needs to be set") + attributes = internal_data.attributes + + values = attributes.get(attribute, []) + new_values = [] + if not values: + raise AttributeProcessorWarning("Cannot apply regex_sub to {}, it has no values".format(attribute)) + for value in values: + new_values.append(re.sub(r'{}'.format(regex_sub_match_pattern), r'{}'.format(regex_sub_replace_pattern), value)) + logger.debug('regex_sub new_values: {}'.format(new_values)) + attributes[attribute] = new_values \ No newline at end of file From 211005af70f4ac9b76460a94760a2d072a49b975 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 29 Sep 2020 11:39:30 +0200 Subject: [PATCH 077/288] entityid_endpoint in the example SAML2 frontend configuration To demonstrate that it has to be a top level option, not nested inside `idp_config` --- example/plugins/frontends/saml2_frontend.yaml.example | 1 + 1 file changed, 1 insertion(+) diff --git a/example/plugins/frontends/saml2_frontend.yaml.example b/example/plugins/frontends/saml2_frontend.yaml.example index 87bc4203f..40c9000f2 100644 --- a/example/plugins/frontends/saml2_frontend.yaml.example +++ b/example/plugins/frontends/saml2_frontend.yaml.example @@ -1,6 +1,7 @@ module: satosa.frontends.saml2.SAMLFrontend name: Saml2IDP config: + entityid_endpoint: true idp_config: organization: {display_name: Example Identities, name: Example Identities Org., url: 'http://www.example.com'} contact_person: From 4a442b6f6d0d023ee78e128e96a896e1f89f9cc3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 29 Sep 2020 11:41:06 +0200 Subject: [PATCH 078/288] entityid_endpoint in the example SAML2 backend configuration To demonstrate that it has to be a top level option, not nested inside `sp_config` --- example/plugins/backends/saml2_backend.yaml.example | 1 + 1 file changed, 1 insertion(+) diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index a71dfd0d4..d5ec7cb56 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -3,6 +3,7 @@ name: Saml2 config: idp_blacklist_file: /path/to/blacklist.json + entityid_endpoint: true mirror_force_authn: no memorize_idp: no use_memorized_idp_when_force_authn: no From 24a7651f0f598df0e58bb30d1c634edd4842f113 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 29 Sep 2020 13:45:49 +0200 Subject: [PATCH 079/288] Add sub_hash_salt to README Note that `sub_hash_salt` is regenerated on startup if not specified in config, which results in varying identifiers. https://github.com/IdentityPython/SATOSA/blob/8b641cebbc4910ecc5ac897a67e1e530cf408c24/src/satosa/frontends/openid_connect.py#L99 --- doc/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/README.md b/doc/README.md index fe87e1f97..b41bcf3aa 100644 --- a/doc/README.md +++ b/doc/README.md @@ -433,6 +433,7 @@ The configuration parameters available: * `signing_key_path`: path to a RSA Private Key file (PKCS#1). MUST be configured. * `db_uri`: connection URI to MongoDB instance where the data will be persisted, if it's not specified all data will only be stored in-memory (not suitable for production use). +* `sub_hash_salt`: salt which is hashed into the `sub` claim. If it's not specified, SATOSA will generate a random salt on each startup, which means that users will get new `sub` value after every restart. * `provider`: provider configuration information. MUST be configured, the following configuration are supported: * `response_types_supported` (default: `[id_token]`): list of all supported response types, see [Section 3 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#Authentication). * `subject_types_supported` (default: `[pairwise]`): list of all supported subject identifier types, see [Section 8 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes) From 6214b41b0a8ae4b20792e6c341d0e71f7f2d25cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 29 Sep 2020 13:47:49 +0200 Subject: [PATCH 080/288] add sub_hash_salt to the example OIDC frontend configuration --- example/plugins/frontends/openid_connect_frontend.yaml.example | 1 + 1 file changed, 1 insertion(+) diff --git a/example/plugins/frontends/openid_connect_frontend.yaml.example b/example/plugins/frontends/openid_connect_frontend.yaml.example index 6c94ea758..1006302e2 100644 --- a/example/plugins/frontends/openid_connect_frontend.yaml.example +++ b/example/plugins/frontends/openid_connect_frontend.yaml.example @@ -4,6 +4,7 @@ config: signing_key_path: frontend.key db_uri: mongodb://db.example.com # optional: only support MongoDB, will default to in-memory storage if not specified client_db_path: /path/to/your/cdb.json + sub_hash_salt: randomSALTvalue # if not specified, it is randomly generated on every startup provider: client_registration_supported: Yes response_types_supported: ["code", "id_token token"] From 1c4e316237b6ab6284451e4f95121530244f3982 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 6 Oct 2020 11:55:10 +0200 Subject: [PATCH 081/288] Sign in with Apple backend --- .../backends/apple_backend.yaml.example | 29 ++ src/satosa/backends/apple.py | 285 ++++++++++++++++++ 2 files changed, 314 insertions(+) create mode 100644 example/plugins/backends/apple_backend.yaml.example create mode 100644 src/satosa/backends/apple.py diff --git a/example/plugins/backends/apple_backend.yaml.example b/example/plugins/backends/apple_backend.yaml.example new file mode 100644 index 000000000..4426c8cc4 --- /dev/null +++ b/example/plugins/backends/apple_backend.yaml.example @@ -0,0 +1,29 @@ +module: satosa.backends.apple.AppleBackend +name: apple +config: + provider_metadata: + issuer: https://appleid.apple.com + client: + verify_ssl: yes + auth_req_params: + response_type: code + scope: [openid, email, name] + response_mode: form_post + token_endpoint_auth_method: client_secret_post + client_metadata: + application_name: Sign in with Apple + application_type: web + client_id: 'CLIENT_ID_HERE' + client_secret: 'CLIENT_SECRET_HERE' + redirect_uris: [/] + subject_type: pairwise + entity_info: + organization: + display_name: + - ['Apple', 'en'] + name: + - ['Apple Inc.', 'en'] + ui_info: + display_name: + - lang: en + text: 'Sign in with Apple' diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py new file mode 100644 index 000000000..633e22c19 --- /dev/null +++ b/src/satosa/backends/apple.py @@ -0,0 +1,285 @@ +""" +Apple backend module. +""" +import logging +from datetime import datetime +from urllib.parse import urlparse + +from oic.oauth2.message import Message +from oic import oic +from oic import rndstr +from oic.oic.message import AuthorizationResponse +from oic.oic.message import ProviderConfigurationResponse +from oic.oic.message import RegistrationRequest +from oic.utils.authn.authn_context import UNSPECIFIED +from oic.utils.authn.client import CLIENT_AUTHN_METHOD + +import satosa.logging_util as lu +from satosa.internal import AuthenticationInformation +from satosa.internal import InternalData +from .base import BackendModule +from .oauth import get_metadata_desc_for_oauth_backend +from ..exception import SATOSAAuthenticationError, SATOSAError +from ..response import Redirect + +import base64 +import json +import requests + + +logger = logging.getLogger(__name__) + +NONCE_KEY = "oidc_nonce" +STATE_KEY = "oidc_state" + +# https://developer.okta.com/blog/2019/06/04/what-the-heck-is-sign-in-with-apple +class AppleBackend(BackendModule): + """Sign in with Apple backend""" + + def __init__(self, auth_callback_func, internal_attributes, config, base_url, name): + """ + Sign in with Apple backend module. + :param auth_callback_func: Callback should be called by the module after the authorization + in the backend is done. + :param internal_attributes: Mapping dictionary between SATOSA internal attribute names and + the names returned by underlying IdP's/OP's as well as what attributes the calling SP's and + RP's expects namevice. + :param config: Configuration parameters for the module. + :param base_url: base url of the service + :param name: name of the plugin + + :type auth_callback_func: + (satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response + :type internal_attributes: dict[string, dict[str, str | list[str]]] + :type config: dict[str, dict[str, str] | list[str]] + :type base_url: str + :type name: str + """ + super().__init__(auth_callback_func, internal_attributes, base_url, name) + self.auth_callback_func = auth_callback_func + self.config = config + self.client = _create_client( + config["provider_metadata"], + config["client"]["client_metadata"], + config["client"].get("verify_ssl", True), + ) + if "scope" not in config["client"]["auth_req_params"]: + config["auth_req_params"]["scope"] = "openid" + if "response_type" not in config["client"]["auth_req_params"]: + config["auth_req_params"]["response_type"] = "code" + + def start_auth(self, context, request_info): + """ + See super class method satosa.backends.base#start_auth + :type context: satosa.context.Context + :type request_info: satosa.internal.InternalData + """ + oidc_nonce = rndstr() + oidc_state = rndstr() + state_data = { + NONCE_KEY: oidc_nonce, + STATE_KEY: oidc_state + } + context.state[self.name] = state_data + + args = { + "scope": self.config["client"]["auth_req_params"]["scope"], + "response_type": self.config["client"]["auth_req_params"]["response_type"], + "client_id": self.client.client_id, + "redirect_uri": self.client.registration_response["redirect_uris"][0], + "state": oidc_state, + "nonce": oidc_nonce + } + args.update(self.config["client"]["auth_req_params"]) + auth_req = self.client.construct_AuthorizationRequest(request_args=args) + login_url = auth_req.request(self.client.authorization_endpoint) + return Redirect(login_url) + + def register_endpoints(self): + """ + Creates a list of all the endpoints this backend module needs to listen to. In this case + it's the authentication response from the underlying OP that is redirected from the OP to + the proxy. + :rtype: Sequence[(str, Callable[[satosa.context.Context], satosa.response.Response]] + :return: A list that can be used to map the request to SATOSA to this endpoint. + """ + url_map = [] + redirect_path = urlparse(self.config["client"]["client_metadata"]["redirect_uris"][0]).path + if not redirect_path: + raise SATOSAError("Missing path in redirect uri") + + url_map.append(("^%s$" % redirect_path.lstrip("/"), self.response_endpoint)) + return url_map + + def _verify_nonce(self, nonce, context): + """ + Verify the received OIDC 'nonce' from the ID Token. + :param nonce: OIDC nonce + :type nonce: str + :param context: current request context + :type context: satosa.context.Context + :raise SATOSAAuthenticationError: if the nonce is incorrect + """ + backend_state = context.state[self.name] + if nonce != backend_state[NONCE_KEY]: + msg = "Missing or invalid nonce in authn response for state: {}".format(backend_state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + raise SATOSAAuthenticationError(context.state, "Missing or invalid nonce in authn response") + + def _get_tokens(self, authn_response, context): + """ + :param authn_response: authentication response from OP + :type authn_response: oic.oic.message.AuthorizationResponse + :return: access token and ID Token claims + :rtype: Tuple[Optional[str], Optional[Mapping[str, str]]] + """ + if "code" in authn_response: + # make token request + # https://developer.apple.com/documentation/sign_in_with_apple/generate_and_validate_tokens + args = { + "client_id": self.client.client_id, + "client_secret": self.client.client_secret, + "code": authn_response["code"], + "grant_type": "authorization_code", + "redirect_uri": self.client.registration_response['redirect_uris'][0], + } + + token_resp = requests.post( + "https://appleid.apple.com/auth/token", + data=args, + headers={"Content-Type": "application/x-www-form-urlencoded"} + ).json() + + logger.debug("apple response received") + logger.debug(token_resp) + + self._check_error_response(token_resp, context) + + keyjar = self.client.keyjar + id_token_claims = dict(Message().from_jwt(token_resp["id_token"], keyjar=keyjar)) + + return token_resp["access_token"], id_token_claims + + return authn_response.get("access_token"), authn_response.get("id_token") + + def _check_error_response(self, response, context): + """ + Check if the response is an OAuth error response. + :param response: the OIDC response + :type response: oic.oic.message + :raise SATOSAAuthenticationError: if the response is an OAuth error response + """ + if "error" in response: + msg = "{name} error: {error} {description}".format( + name=type(response).__name__, + error=response["error"], + description=response.get("error_description", ""), + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + raise SATOSAAuthenticationError(context.state, "Access denied") + + def response_endpoint(self, context, *args): + """ + Handles the authentication response from the OP. + :type context: satosa.context.Context + :type args: Any + :rtype: satosa.response.Response + + :param context: SATOSA context + :param args: None + :return: + """ + backend_state = context.state[self.name] + authn_resp = self.client.parse_response(AuthorizationResponse, info=context.request, sformat="dict") + if backend_state[STATE_KEY] != authn_resp["state"]: + msg = "Missing or invalid state in authn response for state: {}".format(backend_state) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + raise SATOSAAuthenticationError(context.state, "Missing or invalid state in authn response") + + self._check_error_response(authn_resp, context) + access_token, id_token_claims = self._get_tokens(authn_resp, context) + if not id_token_claims: + id_token_claims = {} + + # Apple has no userinfo endpoint + userinfo = {} + + if not id_token_claims and not userinfo: + msg = "No id_token or userinfo, nothing to do.." + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + raise SATOSAAuthenticationError(context.state, "No user info available.") + + all_user_claims = dict(list(userinfo.items()) + list(id_token_claims.items())) + msg = "UserInfo: {}".format(all_user_claims) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + del context.state[self.name] + internal_resp = self._translate_response(all_user_claims, self.client.authorization_endpoint) + return self.auth_callback_func(context, internal_resp) + + def _translate_response(self, response, issuer): + """ + Translates oidc response to SATOSA internal response. + :type response: dict[str, str] + :type issuer: str + :type subject_type: str + :rtype: InternalData + + :param response: Dictioary with attribute name as key. + :param issuer: The oidc op that gave the repsonse. + :param subject_type: public or pairwise according to oidc standard. + :return: A SATOSA internal response. + """ + auth_info = AuthenticationInformation(UNSPECIFIED, str(datetime.now()), issuer) + internal_resp = InternalData(auth_info=auth_info) + internal_resp.attributes = self.converter.to_internal("openid", response) + internal_resp.subject_id = response["sub"] + return internal_resp + + def get_metadata_desc(self): + """ + See satosa.backends.oauth.get_metadata_desc + :rtype: satosa.metadata_creation.description.MetadataDescription + """ + return get_metadata_desc_for_oauth_backend(self.config["provider_metadata"]["issuer"], self.config) + + +def _create_client(provider_metadata, client_metadata, verify_ssl=True): + """ + Create a pyoidc client instance. + :param provider_metadata: provider configuration information + :type provider_metadata: Mapping[str, Union[str, Sequence[str]]] + :param client_metadata: client metadata + :type client_metadata: Mapping[str, Union[str, Sequence[str]]] + :return: client instance to use for communicating with the configured provider + :rtype: oic.oic.Client + """ + client = oic.Client( + client_authn_method=CLIENT_AUTHN_METHOD, verify_ssl=verify_ssl + ) + + # Provider configuration information + if "authorization_endpoint" in provider_metadata: + # no dynamic discovery necessary + client.handle_provider_config(ProviderConfigurationResponse(**provider_metadata), + provider_metadata["issuer"]) + else: + # do dynamic discovery + client.provider_config(provider_metadata["issuer"]) + + # Client information + if "client_id" in client_metadata: + # static client info provided + client.store_registration_info(RegistrationRequest(**client_metadata)) + else: + # do dynamic registration + client.register(client.provider_info['registration_endpoint'], + **client_metadata) + + client.subject_type = (client.registration_response.get("subject_type") or + client.provider_info["subject_types_supported"][0]) + return client From e98172bba4ec35c638523183a958286b84a3ab2f Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 25 Oct 2020 23:21:53 +0200 Subject: [PATCH 082/288] Remove the metadata_construction param Additionally, the saml2.assertion.Policy object can be initialized with a metadata store and thus the .restrict and .filter methods do not need such a param. This will remain as it was, until some time has passed and confidence is built that peolpe are using a recent enough version of pysaml2, before dropping the param from the .restrict method. Up to that point, warnings will be output, but functionality is preserved. Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 4 +--- src/satosa/frontends/saml2.py | 20 +++++++++++-------- src/satosa/metadata_creation/saml_metadata.py | 2 +- tests/flows/test_oidc-saml.py | 2 +- tests/flows/test_saml-oidc.py | 2 +- tests/flows/test_saml-saml.py | 4 ++-- tests/satosa/backends/test_saml2.py | 10 +++++----- tests/satosa/frontends/test_saml2.py | 12 +++++------ .../metadata_creation/test_saml_metadata.py | 4 ++-- tests/util.py | 2 +- 10 files changed, 32 insertions(+), 30 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 2c37e6a2b..d855080a2 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -104,9 +104,7 @@ def __init__(self, outgoing, internal_attributes, config, base_url, name): super().__init__(outgoing, internal_attributes, base_url, name) self.config = self.init_config(config) - sp_config = SPConfig().load(copy.deepcopy( - config[SAMLBackend.KEY_SP_CONFIG]), False - ) + sp_config = SPConfig().load(copy.deepcopy(config[SAMLBackend.KEY_SP_CONFIG])) self.sp = Base(sp_config) self.discosrv = config.get(SAMLBackend.KEY_DISCO_SRV) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 752ff431b..8c788d749 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -116,7 +116,7 @@ def register_endpoints(self, backend_names): self.idp_config = self._build_idp_config_endpoints( self.config[self.KEY_IDP_CONFIG], backend_names) # Create the idp - idp_config = IdPConfig().load(copy.deepcopy(self.idp_config), metadata_construction=False) + idp_config = IdPConfig().load(copy.deepcopy(self.idp_config)) self.idp = Server(config=idp_config) return self._register_endpoints(backend_names) @@ -290,9 +290,14 @@ def _filter_attributes(self, idp, internal_response, context,): idp_policy = idp.config.getattr("policy", "idp") attributes = {} if idp_policy: - approved_attributes = self._get_approved_attributes(idp, idp_policy, internal_response.requester, - context.state) - attributes = {k: v for k, v in internal_response.attributes.items() if k in approved_attributes} + approved_attributes = self._get_approved_attributes( + idp, idp_policy, internal_response.requester, context.state + ) + attributes = { + k: v + for k, v in internal_response.attributes.items() + if k in approved_attributes + } return attributes @@ -637,7 +642,7 @@ def _load_idp_dynamic_endpoints(self, context): """ target_entity_id = context.target_entity_id_from_path() idp_conf_file = self._load_endpoints_to_config(context.target_backend, target_entity_id) - idp_config = IdPConfig().load(idp_conf_file, metadata_construction=False) + idp_config = IdPConfig().load(idp_conf_file) return Server(config=idp_config) def _load_idp_dynamic_entity_id(self, state): @@ -653,7 +658,7 @@ def _load_idp_dynamic_entity_id(self, state): # Change the idp entity id dynamically idp_config_file = copy.deepcopy(self.idp_config) idp_config_file["entityid"] = "{}/{}".format(self.idp_config["entityid"], state[self.name]["target_entity_id"]) - idp_config = IdPConfig().load(idp_config_file, metadata_construction=False) + idp_config = IdPConfig().load(idp_config_file) return Server(config=idp_config) def handle_authn_request(self, context, binding_in): @@ -1033,8 +1038,7 @@ def _create_co_virtual_idp(self, context): # Use the overwritten IdP config to generate a pysaml2 config object # and from it a server object. - pysaml2_idp_config = IdPConfig().load(idp_config, - metadata_construction=False) + pysaml2_idp_config = IdPConfig().load(idp_config) server = Server(config=pysaml2_idp_config) diff --git a/src/satosa/metadata_creation/saml_metadata.py b/src/satosa/metadata_creation/saml_metadata.py index 1a9e1d730..f1b294759 100644 --- a/src/satosa/metadata_creation/saml_metadata.py +++ b/src/satosa/metadata_creation/saml_metadata.py @@ -17,7 +17,7 @@ def _create_entity_descriptor(entity_config): - cnf = Config().load(copy.deepcopy(entity_config), metadata_construction=True) + cnf = Config().load(copy.deepcopy(entity_config)) return entity_descriptor(cnf) diff --git a/tests/flows/test_oidc-saml.py b/tests/flows/test_oidc-saml.py index 2d51c9dd6..e5888fb8f 100644 --- a/tests/flows/test_oidc-saml.py +++ b/tests/flows/test_oidc-saml.py @@ -78,7 +78,7 @@ def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_ # config test IdP backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0]) idp_conf["metadata"]["inline"].append(backend_metadata_str) - fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False)) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) # create auth resp req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query)) diff --git a/tests/flows/test_saml-oidc.py b/tests/flows/test_saml-oidc.py index b0068cc50..e242ebb89 100644 --- a/tests/flows/test_saml-oidc.py +++ b/tests/flows/test_saml-oidc.py @@ -32,7 +32,7 @@ def run_test(self, satosa_config_dict, sp_conf, oidc_backend_config, frontend_co # config test SP frontend_metadata_str = str(frontend_metadata[frontend_config["name"]][0]) sp_conf["metadata"]["inline"].append(frontend_metadata_str) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) # create auth req destination, req_args = fakesp.make_auth_req(frontend_metadata[frontend_config["name"]][0].entity_id) diff --git a/tests/flows/test_saml-saml.py b/tests/flows/test_saml-saml.py index 29f20fc0f..ce6cd6960 100644 --- a/tests/flows/test_saml-saml.py +++ b/tests/flows/test_saml-saml.py @@ -28,7 +28,7 @@ def run_test(self, satosa_config_dict, sp_conf, idp_conf, saml_backend_config, f # config test SP frontend_metadata_str = str(frontend_metadata[frontend_config["name"]][0]) sp_conf["metadata"]["inline"].append(frontend_metadata_str) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) # create auth req destination, req_args = fakesp.make_auth_req(frontend_metadata[frontend_config["name"]][0].entity_id) @@ -41,7 +41,7 @@ def run_test(self, satosa_config_dict, sp_conf, idp_conf, saml_backend_config, f # config test IdP backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0]) idp_conf["metadata"]["inline"].append(backend_metadata_str) - fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False)) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) # create auth resp req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query)) diff --git a/tests/satosa/backends/test_saml2.py b/tests/satosa/backends/test_saml2.py index e5e2d905c..eed74db6c 100644 --- a/tests/satosa/backends/test_saml2.py +++ b/tests/satosa/backends/test_saml2.py @@ -118,7 +118,7 @@ def test_discovery_server_set_in_context(self, context, sp_conf): def test_full_flow(self, context, idp_conf, sp_conf): test_state_key = "test_state_key_456afgrh" response_binding = BINDING_HTTP_REDIRECT - fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False)) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) context.state[test_state_key] = "my_state" @@ -181,8 +181,8 @@ def test_authn_request(self, context, idp_conf): def test_authn_response(self, context, idp_conf, sp_conf): response_binding = BINDING_HTTP_REDIRECT - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) - fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) destination, request_params = fakesp.make_auth_req(idp_conf["entityid"]) url, auth_resp = fakeidp.handle_auth_req(request_params["SAMLRequest"], request_params["RelayState"], BINDING_HTTP_REDIRECT, @@ -202,10 +202,10 @@ def test_authn_response(self, context, idp_conf, sp_conf): def test_authn_response_no_name_id(self, context, idp_conf, sp_conf): response_binding = BINDING_HTTP_REDIRECT - fakesp_conf = SPConfig().load(sp_conf, metadata_construction=False) + fakesp_conf = SPConfig().load(sp_conf) fakesp = FakeSP(fakesp_conf) - fakeidp_conf = IdPConfig().load(idp_conf, metadata_construction=False) + fakeidp_conf = IdPConfig().load(idp_conf) fakeidp = FakeIdP(USERS, config=fakeidp_conf) destination, request_params = fakesp.make_auth_req( diff --git a/tests/satosa/frontends/test_saml2.py b/tests/satosa/frontends/test_saml2.py index 00890a56e..1e26db460 100644 --- a/tests/satosa/frontends/test_saml2.py +++ b/tests/satosa/frontends/test_saml2.py @@ -75,7 +75,7 @@ def setup_for_authn_req(self, context, idp_conf, sp_conf, nameid_format=None, re idp_metadata_str = create_metadata_from_config_dict(samlfrontend.idp_config) sp_conf["metadata"]["inline"].append(idp_metadata_str) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) destination, auth_req = fakesp.make_auth_req( samlfrontend.idp_config["entityid"], nameid_format, @@ -94,7 +94,7 @@ def setup_for_authn_req(self, context, idp_conf, sp_conf, nameid_format=None, re return samlfrontend def get_auth_response(self, samlfrontend, context, internal_response, sp_conf, idp_metadata_str): - sp_config = SPConfig().load(sp_conf, metadata_construction=False) + sp_config = SPConfig().load(sp_conf) resp_args = { "name_id_policy": NameIDPolicy(format=NAMEID_FORMAT_TRANSIENT), "in_response_to": None, @@ -150,7 +150,7 @@ def test_handle_authn_request(self, context, idp_conf, sp_conf, internal_respons resp = samlfrontend.handle_authn_response(context, internal_response) resp_dict = parse_qs(urlparse(resp.message).query) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) resp = fakesp.parse_authn_request_response(resp_dict["SAMLResponse"][0], BINDING_HTTP_REDIRECT) for key in resp.ava: @@ -189,7 +189,7 @@ def test_handle_authn_response_without_relay_state(self, context, idp_conf, sp_c resp = samlfrontend.handle_authn_response(context, internal_response) resp_dict = parse_qs(urlparse(resp.message).query) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) resp = fakesp.parse_authn_request_response(resp_dict["SAMLResponse"][0], BINDING_HTTP_REDIRECT) @@ -213,7 +213,7 @@ def test_handle_authn_response_without_name_id( resp = samlfrontend.handle_authn_response(context, internal_response) resp_dict = parse_qs(urlparse(resp.message).query) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) resp = fakesp.parse_authn_request_response( resp_dict["SAMLResponse"][0], BINDING_HTTP_REDIRECT) @@ -548,7 +548,7 @@ def test_co_static_attributes(self, frontend, context, internal_response, # SP configuration fixture with the metadata. idp_metadata_str = create_metadata_from_config_dict(idp_conf) sp_conf["metadata"]["inline"].append(idp_metadata_str) - sp_config = SPConfig().load(sp_conf, metadata_construction=False) + sp_config = SPConfig().load(sp_conf) # Use the updated sp_config fixture to generate a fake SP and then # use the fake SP to generate an authentication request aimed at the diff --git a/tests/satosa/metadata_creation/test_saml_metadata.py b/tests/satosa/metadata_creation/test_saml_metadata.py index 49cff97a4..77e8ac1d7 100644 --- a/tests/satosa/metadata_creation/test_saml_metadata.py +++ b/tests/satosa/metadata_creation/test_saml_metadata.py @@ -236,7 +236,7 @@ def test_create_mirrored_metadata_does_not_contain_target_contact_info(self, sat class TestCreateSignedEntitiesDescriptor: @pytest.fixture def entity_desc(self, sp_conf): - return entity_descriptor(SPConfig().load(sp_conf, metadata_construction=True)) + return entity_descriptor(SPConfig().load(sp_conf)) @pytest.fixture def verification_security_context(self, cert_and_key): @@ -274,7 +274,7 @@ def test_valid_for(self, entity_desc, signature_security_context): class TestCreateSignedEntityDescriptor: @pytest.fixture def entity_desc(self, sp_conf): - return entity_descriptor(SPConfig().load(sp_conf, metadata_construction=True)) + return entity_descriptor(SPConfig().load(sp_conf)) @pytest.fixture def verification_security_context(self, cert_and_key): diff --git a/tests/util.py b/tests/util.py index 0e1f5f9fb..c26c796fe 100644 --- a/tests/util.py +++ b/tests/util.py @@ -231,7 +231,7 @@ def handle_auth_req_no_name_id(self, saml_request, relay_state, binding, def create_metadata_from_config_dict(config): nspair = {"xs": "http://www.w3.org/2001/XMLSchema"} - conf = Config().load(config, metadata_construction=True) + conf = Config().load(config) return entity_descriptor(conf).to_string(nspair).decode("utf-8") From adbf458449168fb5ff114e7f476f637ed052df84 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 30 Oct 2020 20:42:42 +0200 Subject: [PATCH 083/288] Update travis distribution from xenial to bionic Signed-off-by: Ivan Kanakarakis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 3fa650542..7e45d5d75 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,5 @@ os: linux -dist: xenial +dist: bionic language: python services: From 479a56227e49a30898723a55840d0c5f00fbac29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 10 Nov 2020 19:09:41 +0100 Subject: [PATCH 084/288] fix remote metadata config example in README --- doc/README.md | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/doc/README.md b/doc/README.md index b41bcf3aa..f6b28920f 100644 --- a/doc/README.md +++ b/doc/README.md @@ -193,11 +193,10 @@ Metadata from local file: Metadata from remote URL: - "metadata": { - "remote": - - url:https://kalmar2.org/simplesaml/module.php/aggregator/?id=kalmarcentral2&set=saml2 - cert:null - } + "metadata": + remote: + - url: "https://kalmar2.org/simplesaml/module.php/aggregator/?id=kalmarcentral2&set=saml2" + cert: null For more detailed information on how you could customize the SAML entities, see the From 326ec9319c8863f6c1b160a1dfb02c3117b955b8 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 14 Dec 2020 14:17:05 +0200 Subject: [PATCH 085/288] Fix YAML formatting Signed-off-by: Ivan Kanakarakis --- doc/README.md | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/doc/README.md b/doc/README.md index f6b28920f..3aef52f5e 100644 --- a/doc/README.md +++ b/doc/README.md @@ -306,17 +306,17 @@ basis. This example summarizes the most common settings (hopefully self-explanat ```yaml config: - idp_config: - service: - idp: - policy: - default: - sign_response: True - sign_assertion: False - sign_alg: "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" - digest_alg: "http://www.w3.org/2001/04/xmlenc#sha256" - : - ... + idp_config: + service: + idp: + policy: + default: + sign_response: True + sign_assertion: False + sign_alg: "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" + digest_alg: "http://www.w3.org/2001/04/xmlenc#sha256" + : + ... ``` Overrides per SP entityID is possible by using the entityID as a key instead of the "default" key From 9cbd8d04ff07a261960029cd7ef7422bbb46081f Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 14 Dec 2020 14:18:11 +0200 Subject: [PATCH 086/288] Remove reference to sign_alg and digest_alg from documentation Signed-off-by: Ivan Kanakarakis --- doc/README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/README.md b/doc/README.md index 3aef52f5e..157b747b0 100644 --- a/doc/README.md +++ b/doc/README.md @@ -313,8 +313,6 @@ config: default: sign_response: True sign_assertion: False - sign_alg: "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" - digest_alg: "http://www.w3.org/2001/04/xmlenc#sha256" : ... ``` From 04850eeb395b3f19c70507aa1e03201ff787b6a3 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 14 Dec 2020 19:35:14 +0200 Subject: [PATCH 087/288] Deprecate saml2 frontend sign_alg and digest_alg configuration options sign_alg and digest_alg are deprecated; instead, use signing_algorithm and digest_algorithm configurations under the service/idp configuration path (not under policy/default) Signed-off-by: Ivan Kanakarakis --- src/satosa/frontends/saml2.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 8c788d749..1af83e2fb 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -377,18 +377,18 @@ def _handle_authn_response(self, context, internal_response, idp): # Construct arguments for method create_authn_response # on IdP Server instance args = { - 'identity' : ava, - 'name_id' : name_id, - 'authn' : auth_info, - 'sign_response' : sign_response, + # Add the SP details + **resp_args, + # AuthnResponse data + 'identity': ava, + 'name_id': name_id, + 'authn': auth_info, + 'sign_response': sign_response, 'sign_assertion': sign_assertion, 'encrypt_assertion': encrypt_assertion, - 'encrypted_advice_attributes': encrypted_advice_attributes + 'encrypted_advice_attributes': encrypted_advice_attributes, } - # Add the SP details - args.update(**resp_args) - try: args['sign_alg'] = getattr(xmldsig, sign_alg) except AttributeError as e: @@ -413,6 +413,16 @@ def _handle_authn_response(self, context, internal_response, idp): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) + if 'sign_alg' in args or 'digest_alg' in args: + msg = ( + "sign_alg and digest_alg are deprecated; " + "instead, use signing_algorithm and digest_algorithm " + "under the service/idp configuration path " + "(not under policy/default)." + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.warning(msg) + resp = idp.create_authn_response(**args) http_args = idp.apply_binding( resp_args["binding"], str(resp), resp_args["destination"], From 580c16671667ff4b4ee6d47e0a6572f000698533 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 14 Dec 2020 22:17:52 +0200 Subject: [PATCH 088/288] Fix the saml2 frontend entity-category tests Signed-off-by: Ivan Kanakarakis --- tests/satosa/frontends/test_saml2.py | 60 +++++++++++++++++++--------- 1 file changed, 42 insertions(+), 18 deletions(-) diff --git a/tests/satosa/frontends/test_saml2.py b/tests/satosa/frontends/test_saml2.py index 1e26db460..8396a5945 100644 --- a/tests/satosa/frontends/test_saml2.py +++ b/tests/satosa/frontends/test_saml2.py @@ -285,17 +285,28 @@ def test_acr_mapping_per_idp_in_authn_response(self, context, idp_conf, sp_conf, authn_context_class_ref = resp.assertion.authn_statement[0].authn_context.authn_context_class_ref assert authn_context_class_ref.text == expected_loa - @pytest.mark.parametrize("entity_category, entity_category_module, expected_attributes", [ - ([""], "swamid", swamid.RELEASE[""]), - ([COCO], "edugain", edugain.RELEASE[""] + edugain.RELEASE[COCO]), - ([RESEARCH_AND_SCHOLARSHIP], "refeds", refeds.RELEASE[""] + refeds.RELEASE[RESEARCH_AND_SCHOLARSHIP]), - ([RESEARCH_AND_EDUCATION, EU], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, EU)]), - ([RESEARCH_AND_EDUCATION, HEI], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, HEI)]), - ([RESEARCH_AND_EDUCATION, NREN], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, NREN)]), - ([SFS_1993_1153], "swamid", swamid.RELEASE[""] + swamid.RELEASE[SFS_1993_1153]), - ]) - def test_respect_sp_entity_categories(self, context, entity_category, entity_category_module, expected_attributes, - idp_conf, sp_conf, internal_response): + @pytest.mark.parametrize( + "entity_category, entity_category_module, expected_attributes", + [ + ([""], "swamid", swamid.RELEASE[""]), + ([COCO], "edugain", edugain.RELEASE[""] + edugain.RELEASE[COCO]), + ([RESEARCH_AND_SCHOLARSHIP], "refeds", refeds.RELEASE[""] + refeds.RELEASE[RESEARCH_AND_SCHOLARSHIP]), + ([RESEARCH_AND_EDUCATION, EU], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, EU)]), + ([RESEARCH_AND_EDUCATION, HEI], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, HEI)]), + ([RESEARCH_AND_EDUCATION, NREN], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, NREN)]), + ([SFS_1993_1153], "swamid", swamid.RELEASE[""] + swamid.RELEASE[SFS_1993_1153]), + ] + ) + def test_respect_sp_entity_categories( + self, + context, + entity_category, + entity_category_module, + expected_attributes, + idp_conf, + sp_conf, + internal_response + ): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = [entity_category_module] if all(entity_category): # don't insert empty entity category @@ -303,10 +314,18 @@ def test_respect_sp_entity_categories(self, context, entity_category, entity_cat if entity_category == [COCO]: sp_conf["service"]["sp"]["required_attributes"] = expected_attributes - expected_attributes_in_all_entity_categories = list( - itertools.chain(swamid.RELEASE[""], edugain.RELEASE[COCO], refeds.RELEASE[RESEARCH_AND_SCHOLARSHIP], - swamid.RELEASE[(RESEARCH_AND_EDUCATION, EU)], swamid.RELEASE[(RESEARCH_AND_EDUCATION, HEI)], - swamid.RELEASE[(RESEARCH_AND_EDUCATION, NREN)], swamid.RELEASE[SFS_1993_1153])) + expected_attributes_in_all_entity_categories = set( + itertools.chain( + swamid.RELEASE[""], + edugain.RELEASE[""], + edugain.RELEASE[COCO], + refeds.RELEASE[RESEARCH_AND_SCHOLARSHIP], + swamid.RELEASE[(RESEARCH_AND_EDUCATION, EU)], + swamid.RELEASE[(RESEARCH_AND_EDUCATION, HEI)], + swamid.RELEASE[(RESEARCH_AND_EDUCATION, NREN)], + swamid.RELEASE[SFS_1993_1153], + ) + ) attribute_mapping = {} for expected_attribute in expected_attributes_in_all_entity_categories: attribute_mapping[expected_attribute.lower()] = {"saml": [expected_attribute]} @@ -345,8 +364,9 @@ def test_metadata_endpoint(self, context, idp_conf): assert headers["Content-Type"] == "text/xml" assert idp_conf["entityid"] in resp.message - def test_custom_attribute_release_with_less_attributes_than_entity_category(self, context, idp_conf, sp_conf, - internal_response): + def test_custom_attribute_release_with_less_attributes_than_entity_category( + self, context, idp_conf, sp_conf, internal_response + ): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = ["swamid"] sp_conf["entity_category"] = [SFS_1993_1153] @@ -364,8 +384,12 @@ def test_custom_attribute_release_with_less_attributes_than_entity_category(self samlfrontend = self.setup_for_authn_req(context, idp_conf, sp_conf, internal_attributes=internal_attributes, extra_config=dict(custom_attribute_release=custom_attributes)) + internal_response.requester = sp_conf["entityid"] resp = self.get_auth_response(samlfrontend, context, internal_response, sp_conf, idp_metadata_str) - assert len(resp.ava.keys()) == 0 + assert len(resp.ava.keys()) == ( + len(expected_attributes) + - len(custom_attributes[internal_response.auth_info.issuer][internal_response.requester]["exclude"]) + ) class TestSAMLMirrorFrontend: From 67da43132e894711a8e910f19e131b63909d4ffe Mon Sep 17 00:00:00 2001 From: ctr49 Date: Fri, 8 Jan 2021 17:01:53 +0100 Subject: [PATCH 089/288] improve debugging for attribute mapping --- src/satosa/attribute_mapping.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/satosa/attribute_mapping.py b/src/satosa/attribute_mapping.py index ebb008bc0..e8729561c 100644 --- a/src/satosa/attribute_mapping.py +++ b/src/satosa/attribute_mapping.py @@ -100,8 +100,8 @@ def to_internal(self, attribute_profile, external_dict): attribute_values = self._collate_attribute_values_by_priority_order(external_attribute_name, external_dict) if attribute_values: # Only insert key if it has some values - logline = "backend attribute {external} mapped to {internal}".format( - external=external_attribute_name, internal=internal_attribute_name + logline = "backend attribute {external} mapped to {internal} ({value})".format( + external=external_attribute_name, internal=internal_attribute_name, value=attribute_values ) logger.debug(logline) internal_dict[internal_attribute_name] = attribute_values @@ -205,8 +205,8 @@ def from_internal(self, attribute_profile, internal_dict): external_attribute_names = self.from_internal_attributes[internal_attribute_name][attribute_profile] # select the first attribute name external_attribute_name = external_attribute_names[0] - logline = "frontend attribute {external} mapped from {internal}".format( - external=external_attribute_name, internal=internal_attribute_name + logline = "frontend attribute {external} mapped from {internal} ({value})".format( + external=external_attribute_name, internal=internal_attribute_name, value=internal_dict[internal_attribute_name] ) logger.debug(logline) From 444d017d2129d90d8266aa4d5a380d53555384ed Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 20 Jan 2021 15:50:30 +0200 Subject: [PATCH 090/288] Prefer signing_algorithm and digest_algorithm over sign_alg and digest_alg Continuing the deprecation of saml2 frontend sign_alg and digest_alg configuration options (see, 04850eeb395b3f19c70507aa1e03201ff787b6a3). The values of the new options should be preferred when set. Otherwise, we fall back to the deprecate options. Notice that the new configuration options expect the algothim identifier, not an internal symbol. Signed-off-by: Ivan Kanakarakis --- src/satosa/frontends/saml2.py | 61 +++++++++++++++++++---------------- 1 file changed, 34 insertions(+), 27 deletions(-) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 1af83e2fb..72cbb84f2 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -362,18 +362,21 @@ def _handle_authn_response(self, context, internal_response, idp): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - policies = self.idp_config.get( - 'service', {}).get('idp', {}).get('policy', {}) + idp_conf = self.idp_config.get('service', {}).get('idp', {}) + policies = idp_conf.get('policy', {}) sp_policy = policies.get('default', {}) sp_policy.update(policies.get(sp_entity_id, {})) sign_assertion = sp_policy.get('sign_assertion', False) sign_response = sp_policy.get('sign_response', True) - sign_alg = sp_policy.get('sign_alg', 'SIG_RSA_SHA256') - digest_alg = sp_policy.get('digest_alg', 'DIGEST_SHA256') encrypt_assertion = sp_policy.get('encrypt_assertion', False) encrypted_advice_attributes = sp_policy.get('encrypted_advice_attributes', False) + signing_algorithm = idp_conf.get('signing_algorithm') + digest_algorithm = idp_conf.get('digest_algorithm') + sign_alg_attr = sp_policy.get('sign_alg', 'SIG_RSA_SHA256') + digest_alg_attr = sp_policy.get('digest_alg', 'DIGEST_SHA256') + # Construct arguments for method create_authn_response # on IdP Server instance args = { @@ -389,31 +392,35 @@ def _handle_authn_response(self, context, internal_response, idp): 'encrypted_advice_attributes': encrypted_advice_attributes, } - try: - args['sign_alg'] = getattr(xmldsig, sign_alg) - except AttributeError as e: - msg = "Unsupported sign algorithm {}".format(sign_alg) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline) - raise Exception(msg) from e - else: - msg = "signing with algorithm {}".format(args['sign_alg']) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + args['sign_alg'] = signing_algorithm + if not args['sign_alg']: + try: + args['sign_alg'] = getattr(xmldsig, sign_alg_attr) + except AttributeError as e: + msg = "Unsupported sign algorithm {}".format(sign_alg) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + raise Exception(msg) from e + + msg = "signing with algorithm {}".format(args['sign_alg']) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) - try: - args['digest_alg'] = getattr(xmldsig, digest_alg) - except AttributeError as e: - msg = "Unsupported digest algorithm {}".format(digest_alg) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline) - raise Exception(msg) from e - else: - msg = "using digest algorithm {}".format(args['digest_alg']) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + args['digest_alg'] = digest_algorithm + if not args['digest_alg']: + try: + args['digest_alg'] = getattr(xmldsig, digest_alg_attr) + except AttributeError as e: + msg = "Unsupported digest algorithm {}".format(digest_alg) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + raise Exception(msg) from e + + msg = "using digest algorithm {}".format(args['digest_alg']) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) - if 'sign_alg' in args or 'digest_alg' in args: + if sign_alg_attr or digest_alg_attr: msg = ( "sign_alg and digest_alg are deprecated; " "instead, use signing_algorithm and digest_algorithm " From 21bdada3509c7d53db131a5f8944c563934ba290 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 20 Jan 2021 15:54:10 +0200 Subject: [PATCH 091/288] Release version 7.0.2 Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 21 ++++++++++++++++++++- setup.py | 4 ++-- 3 files changed, 23 insertions(+), 4 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 489e6c1c5..80d0aba0f 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 7.0.1 +current_version = 7.0.2 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index c813a6ede..f7aaa3613 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## 7.0.2 (2021-01-20) - Security release for pySAML2 dependency + +- Add RegexSubProcessor attribute processor +- Fix SAMLVirtualCoFrontend metadata generation +- frontends: Deprecate the sign_alg and digest_alg configuration options on the + saml2 frontend. Instead, use the signing_algorithm and digest_algorithm + configuration options under the service/idp configuration path (not under + service/idp/policy/default) +- backends: New backend to login with Apple ID +- dependencies: Set minimum pysaml2 version to v6.5.0 to make sure we get a + version patched for CVE-2021-21238 and CVE-2021-21239 +- build: Fix the CI base image +- tests: Fix entity-category checks +- docs: Document the sub_hash_salt configuration for the OIDC frontend +- examples: Add entityid_endpoint to the saml backend and frontend + configuration +- examples: Fix the SAMLVirtualCoFrontend example configuration + + ## 7.0.1 (2020-06-09) - build: fix the CI release process @@ -50,7 +69,7 @@ - build: tag docker image by commit, branch, PR number, version and "latest" -## 6.1.0 (2020-02-28) +## 6.1.0 (2020-02-28) - Security release for pySAML2 dependency - Set the SameSite cookie attribute to "None" - Add compatibility support for the SameSite attribute for incompatible diff --git a/setup.py b/setup.py index 3bfe6d94d..27a62a064 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='7.0.1', + version='7.0.2', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', @@ -16,7 +16,7 @@ package_dir={'': 'src'}, install_requires=[ "pyop >= 3.0.1", - "pysaml2 >= 5.0.0", + "pysaml2 >= 6.5.0", "pycryptodomex", "requests", "PyYAML", From 473bf9523606fb75433390a3f0bd8dd04bac0a59 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 21 Jan 2021 01:50:05 +0200 Subject: [PATCH 092/288] Release version 7.0.3 Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 6 ++++++ setup.py | 4 ++-- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 80d0aba0f..ebebf4aed 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 7.0.2 +current_version = 7.0.3 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index f7aaa3613..812e5a303 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 7.0.3 (2021-01-21) + +- dependencies: Set minimum pysaml2 version to v6.5.1 to fix internal XML + parser issues around the xs and xsd namespace prefixes declarations + + ## 7.0.2 (2021-01-20) - Security release for pySAML2 dependency - Add RegexSubProcessor attribute processor diff --git a/setup.py b/setup.py index 27a62a064..ff12945e0 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='7.0.2', + version='7.0.3', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', @@ -16,7 +16,7 @@ package_dir={'': 'src'}, install_requires=[ "pyop >= 3.0.1", - "pysaml2 >= 6.5.0", + "pysaml2 >= 6.5.1", "pycryptodomex", "requests", "PyYAML", From ce3249dea725d40d5e0916b344cdde53ab6d53dc Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Mon, 1 Feb 2021 15:08:29 +0100 Subject: [PATCH 093/288] Make the ScopeExtractorProcessor usable for the Primary Identifier This patch adds support to use the ScopeExtractorProcessor on the Primary Identifiert which is, in contrast to the other values, a string. Closes #348 --- .../micro_services/processors/scope_extractor_processor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/satosa/micro_services/processors/scope_extractor_processor.py b/src/satosa/micro_services/processors/scope_extractor_processor.py index 48e8bda6c..863bc7740 100644 --- a/src/satosa/micro_services/processors/scope_extractor_processor.py +++ b/src/satosa/micro_services/processors/scope_extractor_processor.py @@ -31,6 +31,8 @@ def process(self, internal_data, attribute, **kwargs): values = attributes.get(attribute, []) if not values: raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it has no values".format(attribute)) + if not isinstance(values, list): + values = [values] if not any('@' in val for val in values): raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it's values are not scoped".format(attribute)) for value in values: From 04f9f9adae925d050b8ba1fe8f892fce235849ff Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Tue, 2 Feb 2021 10:47:25 +0100 Subject: [PATCH 094/288] Fix Attibure Generation Microservice * Fix broken indentation in the file. Now is is mostly pep8 compatible. * Add missing return in `MustachAttrValue.values` * Remove `None` from mustache rendered strings Co-authored-by: Ivan Kanakarakis --- .../micro_services/attribute_generation.py | 62 ++++++++++--------- 1 file changed, 34 insertions(+), 28 deletions(-) diff --git a/src/satosa/micro_services/attribute_generation.py b/src/satosa/micro_services/attribute_generation.py index 485491554..a51c3851d 100644 --- a/src/satosa/micro_services/attribute_generation.py +++ b/src/satosa/micro_services/attribute_generation.py @@ -4,51 +4,52 @@ from .base import ResponseMicroService from ..util import get_dict_defaults + class MustachAttrValue(object): def __init__(self, attr_name, values): - self._attr_name = attr_name - self._values = values - if any(['@' in v for v in values]): - local_parts = [] - domain_parts = [] - scopes = dict() - for v in values: - (local_part, sep, domain_part) = v.partition('@') - # probably not needed now... - local_parts.append(local_part) - domain_parts.append(domain_part) - scopes[domain_part] = True - self._scopes = list(scopes.keys()) - else: - self._scopes = None + self._attr_name = attr_name + self._values = values + if any(['@' in v for v in values]): + local_parts = [] + domain_parts = [] + scopes = dict() + for v in values: + (local_part, sep, domain_part) = v.partition('@') + # probably not needed now... + local_parts.append(local_part) + domain_parts.append(domain_part) + scopes[domain_part] = True + self._scopes = list(scopes.keys()) + else: + self._scopes = None def __str__(self): return ";".join(self._values) @property def values(self): - [{self._attr_name: v} for v in self._values] - - @property + return [{self._attr_name: v} for v in self._values] + + @property def value(self): if len(self._values) == 1: - return self._values[0] + return self._values[0] else: - return self._values + return self._values @property def first(self): if len(self._values) > 0: - return self._values[0] + return self._values[0] else: - return "" + return "" @property def scope(self): if self._scopes is not None: - return self._scopes[0] + return self._scopes[0] return "" - + class AddSyntheticAttributes(ResponseMicroService): """ @@ -124,13 +125,18 @@ def __init__(self, config, *args, **kwargs): def _synthesize(self, attributes, requester, provider): syn_attributes = dict() context = dict() - - for attr_name,values in attributes.items(): - context[attr_name] = MustachAttrValue(attr_name, values) + + for attr_name, values in attributes.items(): + context[attr_name] = MustachAttrValue(attr_name, values) recipes = get_dict_defaults(self.synthetic_attributes, requester, provider) for attr_name, fmt in recipes.items(): - syn_attributes[attr_name] = [v.strip().strip(';') for v in re.split("[;\n]+", pystache.render(fmt, context))] + syn_attributes[attr_name] = [ + value + for token in re.split("[;\n]+", pystache.render(fmt, context)) + for value in [token.strip().strip(';')] + if value + ] return syn_attributes def process(self, context, data): From 8a63fdb290d69ab257ec4172b9ea7e142d7f5469 Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Tue, 2 Feb 2021 16:02:15 +0100 Subject: [PATCH 095/288] New Microservice Attribute Policy This patch introduces a new micro_service, which is able to force attribute policies for requester by limiting results to a predefined set of allowed attributes. --- doc/README.md | 12 ++++ src/satosa/micro_services/attribute_policy.py | 35 +++++++++++ .../micro_services/test_attribute_policy.py | 58 +++++++++++++++++++ 3 files changed, 105 insertions(+) create mode 100644 src/satosa/micro_services/attribute_policy.py create mode 100644 tests/satosa/micro_services/test_attribute_policy.py diff --git a/doc/README.md b/doc/README.md index 157b747b0..5994aacea 100644 --- a/doc/README.md +++ b/doc/README.md @@ -566,6 +566,18 @@ the string `"foo:bar"`: "attr1": "foo:bar" ``` +#### Apply a Attribute Policy + +Attributes delivered from the target provider can be filtered based on a list of allowed attributes per requester +using the `AttributePolicy` class: +```yaml +attribute_policy: + : + allowed: + - attr1 + - attr2 +``` + #### Route to a specific backend based on the requester To choose which backend (essentially choosing target provider) to use based on the requester, use the `DecideBackendByRequester` class which implements that special routing behavior. See the diff --git a/src/satosa/micro_services/attribute_policy.py b/src/satosa/micro_services/attribute_policy.py new file mode 100644 index 000000000..81151d0e4 --- /dev/null +++ b/src/satosa/micro_services/attribute_policy.py @@ -0,0 +1,35 @@ +import logging + +import satosa.logging_util as lu + +from .base import ResponseMicroService + +logger = logging.getLogger(__name__) + + +class AttributePolicy(ResponseMicroService): + """ + Module to filter Attributes by a given Policy. + """ + + def __init__(self, config, *args, **kwargs): + super().__init__(*args, **kwargs) + self.attribute_policy = config["attribute_policy"] + + def process(self, context, data): + state = context.state + session_id = lu.get_session_id(state) + + msg = "Incoming data.attributes {}".format(data.attributes) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) + + policy = self.attribute_policy.get(data.requester, {}) + if "allowed" in policy: + for key in (data.attributes.keys() - set(policy["allowed"])): + del data.attributes[key] + + msg = "Returning data.attributes {}".format(data.attributes) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) + return super().process(context, data) diff --git a/tests/satosa/micro_services/test_attribute_policy.py b/tests/satosa/micro_services/test_attribute_policy.py new file mode 100644 index 000000000..f68483025 --- /dev/null +++ b/tests/satosa/micro_services/test_attribute_policy.py @@ -0,0 +1,58 @@ +from satosa.context import Context +from satosa.internal import AuthenticationInformation, InternalData +from satosa.micro_services.attribute_policy import AttributePolicy + + +class TestAttributePolicy: + def create_attribute_policy_service(self, attribute_policies): + attribute_policy_service = AttributePolicy( + config=attribute_policies, + name="test_attribute_policy", + base_url="https://satosa.example.com" + ) + attribute_policy_service.next = lambda ctx, data: data + return attribute_policy_service + + def test_attribute_policy(self): + requester = "requester" + attribute_policies = { + "attribute_policy": { + "requester_everything_allowed": {}, + "requester_nothing_allowed": { + "allowed": {} + }, + "requester_subset_allowed": { + "allowed": { + "attr1", + "attr2", + }, + }, + }, + } + attributes = { + "attr1": ["foo"], + "attr2": ["foo", "bar"], + "attr3": ["foo"] + } + results = { + "requester_everything_allowed": attributes.keys(), + "requester_nothing_allowed": set(), + "requester_subset_allowed": {"attr1", "attr2"}, + } + for requester, result in results.items(): + attribute_policy_service = self.create_attribute_policy_service( + attribute_policies) + + ctx = Context() + ctx.state = dict() + + resp = InternalData(auth_info=AuthenticationInformation()) + resp.requester = requester + resp.attributes = { + "attr1": ["foo"], + "attr2": ["foo", "bar"], + "attr3": ["foo"] + } + + filtered = attribute_policy_service.process(ctx, resp) + assert(filtered.attributes.keys() == result) From 7661bda09af3e443b0bbab523010acfcaadb6e58 Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Tue, 2 Feb 2021 16:38:17 +0100 Subject: [PATCH 096/288] Make the ID token lifetime configurable This patch adds a new configuration option to the pyop Provider making it possible to configure the lifetime of the ID token. --- doc/README.md | 1 + .../plugins/frontends/openid_connect_frontend.yaml.example | 5 +++-- src/satosa/frontends/openid_connect.py | 1 + 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/README.md b/doc/README.md index 157b747b0..15864e3d7 100644 --- a/doc/README.md +++ b/doc/README.md @@ -442,6 +442,7 @@ The configuration parameters available: * `access_token_lifetime`: how long access tokens should be valid, see [default](https://github.com/SUNET/pyop#token-lifetimes) * `refresh_token_lifetime`: how long refresh tokens should be valid, if not specified no refresh tokens will be issued (which is [default](https://github.com/SUNET/pyop#token-lifetimes)) * `refresh_token_threshold`: how long before expiration refresh tokens should be refreshed, if not specified refresh tokens will never be refreshed (which is [default](https://github.com/SUNET/pyop#token-lifetimes)) + * `id_token_lifetime`: the lifetime of the ID token in seconds, see [default](https://github.com/SUNET/pyop#token-lifetimes) The other parameters should be left with their default values. diff --git a/example/plugins/frontends/openid_connect_frontend.yaml.example b/example/plugins/frontends/openid_connect_frontend.yaml.example index 1006302e2..1b39cf718 100644 --- a/example/plugins/frontends/openid_connect_frontend.yaml.example +++ b/example/plugins/frontends/openid_connect_frontend.yaml.example @@ -12,5 +12,6 @@ config: scopes_supported: ["openid", "email"] extra_scopes: foo_scope: - - bar_claim - - baz_claim + - bar_claim + - baz_claim + id_token_lifetime: 600 diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index 1e0d20793..172822933 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -93,6 +93,7 @@ def _create_provider(self, endpoint_baseurl): cdb, Userinfo(self.user_db), extra_scopes=extra_scopes, + id_token_lifetime=self.config["provider"].get("id_token_lifetime", 600), ) def _init_authorization_state(self): From 84c02347010ebf2b9551a4f0ed0375ec25e81c0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Mon, 8 Mar 2021 12:03:32 +0100 Subject: [PATCH 097/288] feat: add support for the Scoping element and RequesterID in SAML2 backend --- example/plugins/backends/saml2_backend.yaml.example | 1 + src/satosa/backends/saml2.py | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index d5ec7cb56..07b81eb14 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -7,6 +7,7 @@ config: mirror_force_authn: no memorize_idp: no use_memorized_idp_when_force_authn: no + send_requester_id: no sp_config: key_file: backend.key diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index d855080a2..dec50af0e 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -9,6 +9,7 @@ from base64 import urlsafe_b64encode from urllib.parse import urlparse +from saml2 import samlp from saml2 import BINDING_HTTP_REDIRECT from saml2.client_base import Base from saml2.config import SPConfig @@ -79,6 +80,7 @@ class SAMLBackend(BackendModule, SAMLBaseModule): KEY_SAML_DISCOVERY_SERVICE_URL = 'saml_discovery_service_url' KEY_SAML_DISCOVERY_SERVICE_POLICY = 'saml_discovery_service_policy' KEY_SP_CONFIG = 'sp_config' + KEY_SEND_REQUESTER_ID = 'send_requester_id' KEY_MIRROR_FORCE_AUTHN = 'mirror_force_authn' KEY_MEMORIZE_IDP = 'memorize_idp' KEY_USE_MEMORIZED_IDP_WHEN_FORCE_AUTHN = 'use_memorized_idp_when_force_authn' @@ -263,6 +265,10 @@ def authn_request(self, context, entity_id): kwargs["force_authn"] = get_force_authn( context, self.config, self.sp.config ) + if self.config.get(SAMLBackend.KEY_SEND_REQUESTER_ID): + kwargs["scoping"] = samlp.Scoping(requester_id=[samlp.RequesterID()]) + requesterID = context.state.state_dict['SATOSA_BASE']['requester'] + kwargs["scoping"].requester_id[0].text = requesterID try: binding, destination = self.sp.pick_binding( From 9874cb211a1ddf9273baed39c77621499fae75ea Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 8 Mar 2021 13:31:52 +0200 Subject: [PATCH 098/288] Minor refactor Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index dec50af0e..2640fb9db 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -9,18 +9,20 @@ from base64 import urlsafe_b64encode from urllib.parse import urlparse -from saml2 import samlp from saml2 import BINDING_HTTP_REDIRECT from saml2.client_base import Base from saml2.config import SPConfig from saml2.extension.mdui import NAMESPACE as UI_NAMESPACE from saml2.metadata import create_metadata_string from saml2.authn_context import requested_authn_context +from saml2.samlp import RequesterID +from saml2.samlp import Scoping import satosa.logging_util as lu import satosa.util as util from satosa.base import SAMLBaseModule from satosa.base import SAMLEIDASBaseModule +from satosa.base import STATE_KEY as STATE_KEY_BASE from satosa.context import Context from satosa.internal import AuthenticationInformation from satosa.internal import InternalData @@ -266,9 +268,8 @@ def authn_request(self, context, entity_id): context, self.config, self.sp.config ) if self.config.get(SAMLBackend.KEY_SEND_REQUESTER_ID): - kwargs["scoping"] = samlp.Scoping(requester_id=[samlp.RequesterID()]) - requesterID = context.state.state_dict['SATOSA_BASE']['requester'] - kwargs["scoping"].requester_id[0].text = requesterID + requester = context.state.state_dict[STATE_KEY_BASE]['requester'] + kwargs["scoping"] = Scoping(requester_id=[RequesterID(text=requester)]) try: binding, destination = self.sp.pick_binding( From 8cc547e40c5f4edf9622608f83501e9df6c0d001 Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Mon, 8 Mar 2021 13:33:28 +0100 Subject: [PATCH 099/288] Apply suggestions from code review Co-authored-by: Ivan Kanakarakis --- doc/README.md | 3 +-- example/plugins/frontends/openid_connect_frontend.yaml.example | 2 +- src/satosa/frontends/openid_connect.py | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/doc/README.md b/doc/README.md index 15864e3d7..a8ccdeb57 100644 --- a/doc/README.md +++ b/doc/README.md @@ -442,7 +442,7 @@ The configuration parameters available: * `access_token_lifetime`: how long access tokens should be valid, see [default](https://github.com/SUNET/pyop#token-lifetimes) * `refresh_token_lifetime`: how long refresh tokens should be valid, if not specified no refresh tokens will be issued (which is [default](https://github.com/SUNET/pyop#token-lifetimes)) * `refresh_token_threshold`: how long before expiration refresh tokens should be refreshed, if not specified refresh tokens will never be refreshed (which is [default](https://github.com/SUNET/pyop#token-lifetimes)) - * `id_token_lifetime`: the lifetime of the ID token in seconds, see [default](https://github.com/SUNET/pyop#token-lifetimes) + * `id_token_lifetime`: the lifetime of the ID token in seconds - the default is set to 1hr (3600 seconds) (see [default](https://github.com/SUNET/pyop#token-lifetimes)) The other parameters should be left with their default values. @@ -693,4 +693,3 @@ set SATOSA_CONFIG=/home/user/proxy_conf.yaml See the [auxiliary documentation for running using mod\_wsgi](mod_wsgi.md). - diff --git a/example/plugins/frontends/openid_connect_frontend.yaml.example b/example/plugins/frontends/openid_connect_frontend.yaml.example index 1b39cf718..05b47e803 100644 --- a/example/plugins/frontends/openid_connect_frontend.yaml.example +++ b/example/plugins/frontends/openid_connect_frontend.yaml.example @@ -14,4 +14,4 @@ config: foo_scope: - bar_claim - baz_claim - id_token_lifetime: 600 + id_token_lifetime: 3600 diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index 172822933..0f96c331e 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -93,7 +93,7 @@ def _create_provider(self, endpoint_baseurl): cdb, Userinfo(self.user_db), extra_scopes=extra_scopes, - id_token_lifetime=self.config["provider"].get("id_token_lifetime", 600), + id_token_lifetime=self.config["provider"].get("id_token_lifetime", 3600), ) def _init_authorization_state(self): From c678a875ab44775f60140da03f444bfe6a0bd80c Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Tue, 2 Feb 2021 17:29:17 +0100 Subject: [PATCH 100/288] New Option to prefer cdb from file over cdb from MongoDB This patch adds the option to set the `client_db_uri` additional to the `db_uri`. Order for the client database is `client_db_uri`, `client_db_path`, in-memory. --- doc/README.md | 2 ++ src/satosa/frontends/openid_connect.py | 5 +++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/README.md b/doc/README.md index 157b747b0..b6c68d8c0 100644 --- a/doc/README.md +++ b/doc/README.md @@ -430,6 +430,8 @@ The configuration parameters available: * `signing_key_path`: path to a RSA Private Key file (PKCS#1). MUST be configured. * `db_uri`: connection URI to MongoDB instance where the data will be persisted, if it's not specified all data will only be stored in-memory (not suitable for production use). +* `client_db_uri`: connection URI to MongoDB instance where the client data will be persistent, if it's not specified the clients list will be received from the `client_db_path`. +* `client_db_path`: path to a file containing the client database in json format. It will only be used if `client_db_uri` is not set. If `client_db_uri` and `client_db_path` are not set, clients will only be stored in-memory (not suitable for production use). * `sub_hash_salt`: salt which is hashed into the `sub` claim. If it's not specified, SATOSA will generate a random salt on each startup, which means that users will get new `sub` value after every restart. * `provider`: provider configuration information. MUST be configured, the following configuration are supported: * `response_types_supported` (default: `[id_token]`): list of all supported response types, see [Section 3 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#Authentication). diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index 1e0d20793..7172a0abc 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -77,9 +77,10 @@ def _create_provider(self, endpoint_baseurl): authz_state = self._init_authorization_state() db_uri = self.config.get("db_uri") + client_db_uri = self.config.get("client_db_uri") cdb_file = self.config.get("client_db_path") - if db_uri: - cdb = MongoWrapper(db_uri, "satosa", "clients") + if client_db_uri: + cdb = MongoWrapper(client_db_uri, "satosa", "clients") elif cdb_file: with open(cdb_file) as f: cdb = json.loads(f.read()) From 47f3b1a3ce86644886f3b0ded28783341dc1f471 Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Thu, 11 Mar 2021 10:39:02 +0100 Subject: [PATCH 101/288] Add the Requester ID to the Consent Call This patch adds the requester to the consent call. This way we are able to show e.g. requester specific AGBs. --- src/satosa/micro_services/consent.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/satosa/micro_services/consent.py b/src/satosa/micro_services/consent.py index afad940e2..3823826da 100644 --- a/src/satosa/micro_services/consent.py +++ b/src/satosa/micro_services/consent.py @@ -91,6 +91,7 @@ def _approve_new_consent(self, context, internal_response, id_hash): "attr": internal_response.attributes, "id": id_hash, "redirect_endpoint": "%s/consent%s" % (self.base_url, self.endpoint), + "requester": internal_response.requester, "requester_name": internal_response.requester_name, } if self.locked_attr: From 2d6a8ab645f4808989e6024bf7c22bef74ee8f6e Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Fri, 12 Mar 2021 10:50:19 +0100 Subject: [PATCH 102/288] Option to add Extra Claims to ID Token Some OIDC clients expect extra claims in the ID Token without explicitly asking for them using the `claims` url parameter. This patch adds an option to define in the config per client which extra claims should be added to the ID Token to also work with those clients. Co-authored-by: --- .../openid_connect_frontend.yaml.example | 4 +++ src/satosa/frontends/openid_connect.py | 18 ++++++++-- tests/satosa/frontends/test_openid_connect.py | 36 +++++++++++++++++++ 3 files changed, 55 insertions(+), 3 deletions(-) diff --git a/example/plugins/frontends/openid_connect_frontend.yaml.example b/example/plugins/frontends/openid_connect_frontend.yaml.example index 05b47e803..bc941bd1c 100644 --- a/example/plugins/frontends/openid_connect_frontend.yaml.example +++ b/example/plugins/frontends/openid_connect_frontend.yaml.example @@ -15,3 +15,7 @@ config: - bar_claim - baz_claim id_token_lifetime: 3600 + extra_id_token_claims: + foo_client: + - bar_claim + - baz_claim diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index 0f96c331e..dac68ed14 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -118,7 +118,15 @@ def _init_authorization_state(self): return AuthorizationState(HashBasedSubjectIdentifierFactory(sub_hash_salt), authz_code_db, access_token_db, refresh_token_db, sub_db, **token_lifetimes) - def handle_authn_response(self, context, internal_resp, extra_id_token_claims=None): + def _get_extra_id_token_claims(self, user_id, client_id): + if "extra_id_token_claims" in self.config["provider"]: + config = self.config["provider"]["extra_id_token_claims"].get(client_id, []) + if type(config) is list and len(config) > 0: + requested_claims = {k: None for k in config} + return self.provider.userinfo.get_claims_for(user_id, requested_claims) + return {} + + def handle_authn_response(self, context, internal_resp): """ See super class method satosa.frontends.base.FrontendModule#handle_authn_response :type context: satosa.context.Context @@ -133,7 +141,8 @@ def handle_authn_response(self, context, internal_resp, extra_id_token_claims=No auth_resp = self.provider.authorize( auth_req, internal_resp.subject_id, - extra_id_token_claims=extra_id_token_claims, + extra_id_token_claims=lambda user_id, client_id: + self._get_extra_id_token_claims(user_id, client_id), ) del context.state[self.name] @@ -360,7 +369,10 @@ def token_endpoint(self, context): """ headers = {"Authorization": context.request_authorization} try: - response = self.provider.handle_token_request(urlencode(context.request), headers) + response = self.provider.handle_token_request( + urlencode(context.request), + headers, + lambda user_id, client_id: self._get_extra_id_token_claims(user_id, client_id)) return Response(response.to_json(), content="application/json") except InvalidClientAuthentication as e: logline = "invalid client authentication at token endpoint" diff --git a/tests/satosa/frontends/test_openid_connect.py b/tests/satosa/frontends/test_openid_connect.py index b33a16703..cb322e680 100644 --- a/tests/satosa/frontends/test_openid_connect.py +++ b/tests/satosa/frontends/test_openid_connect.py @@ -71,6 +71,21 @@ def frontend_config_with_extra_scopes(self, signing_key_path): return config + @pytest.fixture + def frontend_config_with_extra_id_token_claims(self, signing_key_path): + config = { + "signing_key_path": signing_key_path, + "provider": { + "response_types_supported": ["code", "id_token", "code id_token token"], + "scopes_supported": ["openid", "email"], + "extra_id_token_claims": { + CLIENT_ID: ["email"], + } + }, + } + + return config + def create_frontend(self, frontend_config): # will use in-memory storage instance = OpenIDConnectFrontend(lambda ctx, req: None, INTERNAL_ATTRIBUTES, @@ -409,6 +424,27 @@ def test_token_endpoint(self, context, frontend_config, authn_req): assert parsed["expires_in"] == token_lifetime assert parsed["id_token"] + def test_token_endpoint_with_extra_claims(self, context, frontend_config_with_extra_id_token_claims, authn_req): + frontend = self.create_frontend(frontend_config_with_extra_id_token_claims) + + user_id = "test_user" + self.insert_client_in_client_db(frontend, authn_req["redirect_uri"]) + self.insert_user_in_user_db(frontend, user_id) + authn_req["response_type"] = "code" + authn_resp = frontend.provider.authorize(authn_req, user_id) + + context.request = AccessTokenRequest(redirect_uri=authn_req["redirect_uri"], code=authn_resp["code"]).to_dict() + credentials = "{}:{}".format(CLIENT_ID, CLIENT_SECRET) + basic_auth = urlsafe_b64encode(credentials.encode("utf-8")).decode("utf-8") + context.request_authorization = "Basic {}".format(basic_auth) + + response = frontend.token_endpoint(context) + parsed = AccessTokenResponse().deserialize(response.message, "json") + assert parsed["access_token"] + + id_token = IdToken().from_jwt(parsed["id_token"], key=[frontend.signing_key]) + assert id_token["email"] == "test@example.com" + def test_token_endpoint_issues_refresh_tokens_if_configured(self, context, frontend_config, authn_req): frontend_config["provider"]["refresh_token_lifetime"] = 60 * 60 * 24 * 365 frontend = OpenIDConnectFrontend(lambda ctx, req: None, INTERNAL_ATTRIBUTES, From 87604bdfe76fbd3485e3718b4229b4582e9dd135 Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Fri, 12 Mar 2021 10:55:40 +0100 Subject: [PATCH 103/288] Add flake8 Config in Tox This patch adds a config section for flake8 to tox and also add an ignore for long lines, since it seems like these errors are already actively ignored. Some IDEs (tested only vscode) will pick this up and make working a lot easier and clearer. --- tox.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tox.ini b/tox.ini index 4d69d943e..134af7e1f 100644 --- a/tox.ini +++ b/tox.ini @@ -17,3 +17,6 @@ commands = pip --version pip freeze pytest -vvv -ra {posargs:tests/} + +[flake8] +ignore = E501 From 01c51ae99cdf36a6350d98ee8c549e6e4a6bdb7a Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Fri, 12 Mar 2021 11:00:59 +0100 Subject: [PATCH 104/288] Remove some deprecated mongod flags To make the tests work for me with MongoDB v4.4.4, I had to remove some deprecated flags from the mongod. --- tests/conftest.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ef09cd753..bc04eb2b8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -396,10 +396,9 @@ def __init__(self): self._process = subprocess.Popen(['mongod', '--bind_ip', 'localhost', '--port', str(self._port), '--dbpath', self._tmpdir, - '--nojournal', '--nohttpinterface', - '--noauth', '--smallfiles', - '--syncdelay', '0', - '--nssize', '1', ], + '--nojournal', + '--noauth', + '--syncdelay', '0'], stdout=open('/tmp/mongo-temp.log', 'wb'), stderr=subprocess.STDOUT) From ae7d3ea9e0d9785fd960749de9b649886f725413 Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Mon, 15 Mar 2021 10:03:02 +0100 Subject: [PATCH 105/288] Handly empty Attributes in Attibute Generation `MustachAttrValue` in `attibute_generation.py` can not handly empty attributes (`None`), since it expect a list, and crashed hard. This patch makes sure that `MustachAttrValue` always receives a list as values. Tests included. --- .../micro_services/attribute_generation.py | 5 ++++- .../micro_services/test_attribute_generation.py | 17 +++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/src/satosa/micro_services/attribute_generation.py b/src/satosa/micro_services/attribute_generation.py index a51c3851d..7c99a8fa7 100644 --- a/src/satosa/micro_services/attribute_generation.py +++ b/src/satosa/micro_services/attribute_generation.py @@ -127,7 +127,10 @@ def _synthesize(self, attributes, requester, provider): context = dict() for attr_name, values in attributes.items(): - context[attr_name] = MustachAttrValue(attr_name, values) + context[attr_name] = MustachAttrValue( + attr_name, + values if values is not None else [] + ) recipes = get_dict_defaults(self.synthetic_attributes, requester, provider) for attr_name, fmt in recipes.items(): diff --git a/tests/satosa/micro_services/test_attribute_generation.py b/tests/satosa/micro_services/test_attribute_generation.py index be4fd9ab9..e60ab36fc 100644 --- a/tests/satosa/micro_services/test_attribute_generation.py +++ b/tests/satosa/micro_services/test_attribute_generation.py @@ -63,3 +63,20 @@ def test_generate_mustache2(self): assert("kaka1" in resp.attributes['kaka']) assert("a@example.com" in resp.attributes['eppn']) assert("b@example.com" in resp.attributes['eppn']) + + def test_generate_mustache_empty_attribute(self): + synthetic_attributes = { + "": {"default": {"a0": "{{kaka.first}}#{{eppn.scope}}"}} + } + authz_service = self.create_syn_service(synthetic_attributes) + resp = InternalData(auth_info=AuthenticationInformation()) + resp.attributes = { + "kaka": ["kaka1", "kaka2"], + "eppn": None, + } + ctx = Context() + ctx.state = dict() + authz_service.process(ctx, resp) + assert("kaka1#" in resp.attributes['a0']) + assert("kaka1" in resp.attributes['kaka']) + assert("kaka2" in resp.attributes['kaka']) From 6a4b992eed39c80135c2f40937904f61e599a3a4 Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Mon, 15 Mar 2021 14:35:21 +0100 Subject: [PATCH 106/288] Set Primary Identifer only if it exists This patch ensures that the primary identifier is only set, if it is present and not `None`. --- src/satosa/micro_services/primary_identifier.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index 8b41b65c5..43b25bde6 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -250,13 +250,14 @@ def process(self, context, data): logger.debug(logline) data.attributes = {} - # Set the primary identifier attribute to the value found. - data.attributes[primary_identifier] = primary_identifier_val - msg = "{} Setting attribute {} to value {}".format( - logprefix, primary_identifier, primary_identifier_val - ) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + if primary_identifier: + # Set the primary identifier attribute to the value found. + data.attributes[primary_identifier] = primary_identifier_val + msg = "{} Setting attribute {} to value {}".format( + logprefix, primary_identifier, primary_identifier_val + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) msg = "{} returning data.attributes {}".format(logprefix, str(data.attributes)) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) From dd9afbc93e90c818480e0c5cd7fbfbb5bc1bc8ac Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Mon, 15 Mar 2021 16:52:47 +0100 Subject: [PATCH 107/288] Filter empty Claims This patch adds a filter to the claims to remove empty lists or `None` values. They are not needed and they break hard during the claim parsing. --- src/satosa/frontends/openid_connect.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index 0f96c331e..9ee267581 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -129,6 +129,8 @@ def handle_authn_response(self, context, internal_resp, extra_id_token_claims=No auth_req = self._get_authn_request_from_state(context.state) claims = self.converter.from_internal("openid", internal_resp.attributes) + # Filter unset claims + claims = {k: v for k, v in claims.items() if v} self.user_db[internal_resp.subject_id] = dict(combine_claim_values(claims.items())) auth_resp = self.provider.authorize( auth_req, From 7f43c15492467b5b41063611645dba5b6518708a Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 15 Mar 2021 21:01:19 +0200 Subject: [PATCH 108/288] Fix tests by setting client_db_uri Signed-off-by: Ivan Kanakarakis --- tests/flows/test_oidc-saml.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/flows/test_oidc-saml.py b/tests/flows/test_oidc-saml.py index e5888fb8f..aa61c151f 100644 --- a/tests/flows/test_oidc-saml.py +++ b/tests/flows/test_oidc-saml.py @@ -32,6 +32,7 @@ def oidc_frontend_config(signing_key_path, mongodb_instance): "issuer": "https://proxy-op.example.com", "signing_key_path": signing_key_path, "provider": {"response_types_supported": ["id_token"]}, + "client_db_uri": mongodb_instance.get_uri(), # use mongodb for integration testing "db_uri": mongodb_instance.get_uri() # use mongodb for integration testing } } From ba86be233cd5dc8bcf0b8e3db7da99709e955b6d Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 6 Apr 2021 15:06:36 +0300 Subject: [PATCH 109/288] Fix references to undefined variables Signed-off-by: Ivan Kanakarakis --- src/satosa/frontends/saml2.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 72cbb84f2..c165e1027 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -397,7 +397,7 @@ def _handle_authn_response(self, context, internal_response, idp): try: args['sign_alg'] = getattr(xmldsig, sign_alg_attr) except AttributeError as e: - msg = "Unsupported sign algorithm {}".format(sign_alg) + msg = "Unsupported sign algorithm {}".format(sign_alg_attr) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) raise Exception(msg) from e @@ -411,7 +411,7 @@ def _handle_authn_response(self, context, internal_response, idp): try: args['digest_alg'] = getattr(xmldsig, digest_alg_attr) except AttributeError as e: - msg = "Unsupported digest algorithm {}".format(digest_alg) + msg = "Unsupported digest algorithm {}".format(digest_alg_attr) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) raise Exception(msg) from e From 755c05314b1128071c083aa6c2f5c65847a0e438 Mon Sep 17 00:00:00 2001 From: peppelinux Date: Sun, 11 Apr 2021 18:10:29 +0200 Subject: [PATCH 110/288] feat: added some useful http headers to context for future Micro Services --- src/satosa/proxy_server.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index a3c336145..1b41dabc7 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -65,6 +65,13 @@ def unpack_request(environ, content_length=0): return data +def unpack_http_headers(environ): + headers = ('REQUEST_METHOD', 'PATH_INFO', 'REQUEST_URI', + 'QUERY_STRING', 'SERVER_NAME', 'REMOTE_ADDR', + 'HTTP_HOST', 'HTTP_USER_AGENT', 'HTTP_ACCEPT_LANGUAGE') + return {k:v for k,v in environ.items() if k in headers} + + class ToBytesMiddleware(object): """Converts a message to bytes to be sent by WSGI server.""" @@ -109,6 +116,7 @@ def __call__(self, environ, start_response, debug=False): body = io.BytesIO(environ['wsgi.input'].read(content_length)) environ['wsgi.input'] = body context.request = unpack_request(environ, content_length) + context._http_headers = unpack_http_headers(environ) environ['wsgi.input'].seek(0) context.cookie = environ.get("HTTP_COOKIE", "") From fab68383ead9f3710cd54ce003867075e956e812 Mon Sep 17 00:00:00 2001 From: Thijs Kinkhorst Date: Tue, 4 May 2021 12:06:11 +0200 Subject: [PATCH 111/288] Update some URLs in the README --- doc/README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/README.md b/doc/README.md index cb053975e..18aae0516 100644 --- a/doc/README.md +++ b/doc/README.md @@ -17,7 +17,7 @@ apt-get install libffi-dev libssl-dev xmlsec1 ```` ### Instructions -1. Download the SATOSA proxy project as a [compressed archive](https://github.com/SUNET/SATOSA/releases) +1. Download the SATOSA proxy project as a [compressed archive](https://github.com/IdentityPython/SATOSA/releases) and unpack it to ``. 1. Install the application: @@ -440,11 +440,11 @@ The configuration parameters available: * `client_registration_supported` (default: `No`): boolean whether [dynamic client registration is supported](https://openid.net/specs/openid-connect-registration-1_0.html). If dynamic client registration is not supported all clients must exist in the MongoDB instance configured by the `db_uri` in the `"clients"` collection of the `"satosa"` database. The registration info must be stored using the client id as a key, and use the parameter names of a [OIDC Registration Response](https://openid.net/specs/openid-connect-registration-1_0.html#RegistrationResponse). - * `authorization_code_lifetime`: how long authorization codes should be valid, see [default](https://github.com/SUNET/pyop#token-lifetimes) - * `access_token_lifetime`: how long access tokens should be valid, see [default](https://github.com/SUNET/pyop#token-lifetimes) - * `refresh_token_lifetime`: how long refresh tokens should be valid, if not specified no refresh tokens will be issued (which is [default](https://github.com/SUNET/pyop#token-lifetimes)) - * `refresh_token_threshold`: how long before expiration refresh tokens should be refreshed, if not specified refresh tokens will never be refreshed (which is [default](https://github.com/SUNET/pyop#token-lifetimes)) - * `id_token_lifetime`: the lifetime of the ID token in seconds - the default is set to 1hr (3600 seconds) (see [default](https://github.com/SUNET/pyop#token-lifetimes)) + * `authorization_code_lifetime`: how long authorization codes should be valid, see [default](https://github.com/IdentityPython/pyop#token-lifetimes) + * `access_token_lifetime`: how long access tokens should be valid, see [default](https://github.com/IdentityPython/pyop#token-lifetimes) + * `refresh_token_lifetime`: how long refresh tokens should be valid, if not specified no refresh tokens will be issued (which is [default](https://github.com/IdentityPython/pyop#token-lifetimes)) + * `refresh_token_threshold`: how long before expiration refresh tokens should be refreshed, if not specified refresh tokens will never be refreshed (which is [default](https://github.com/IdentityPython/pyop#token-lifetimes)) + * `id_token_lifetime`: the lifetime of the ID token in seconds - the default is set to 1hr (3600 seconds) (see [default](https://github.com/IdentityPython/pyop#token-lifetimes)) The other parameters should be left with their default values. From df4603e1ee3368cc6bdcbd1fc54bd1258bb9cc6a Mon Sep 17 00:00:00 2001 From: Thijs Kinkhorst Date: Fri, 7 May 2021 15:29:11 +0200 Subject: [PATCH 112/288] Fix table of contents from top-level README. Add some anchors to be able to fix links on top level. --- README.md | 9 +++++---- doc/README.md | 16 +++++++++------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index daefcd7e3..b790e0024 100644 --- a/README.md +++ b/README.md @@ -22,15 +22,16 @@ OpenID Connect and OAuth2. - [Plugins](doc/README.md#plugins) - [SAML2 plugins](doc/README.md#saml_plugin) - [Metadata](doc/README.md#metadata) - - [Frontend](doc/README.md#frontend) - - [Backend](doc/README.md#backend) + - [Frontend](doc/README.md#saml_frontend) + - [Backend](doc/README.md#saml_backend) - [Name ID Format](doc/README.md#name_id) - [OpenID Connect plugins](doc/README.md#openid_plugin) - - [Backend](doc/README.md#backend) + - [Backend](doc/README.md#openid_backend) + - [Frontend](doc/README.md#openid_frontend) - [Social login plugins](doc/README.md#social_plugins) - [Google](doc/README.md#google) - [Facebook](doc/README.md#facebook) -- [SAML metadata](doc/README.md#saml_metadata) +- [Generating proxy metadata](doc/README.md#saml_proxy_metadata) - [Running the proxy application](doc/README.md#run) diff --git a/doc/README.md b/doc/README.md index 18aae0516..572d995a7 100644 --- a/doc/README.md +++ b/doc/README.md @@ -181,6 +181,8 @@ Common configuration parameters: | `entityid_endpoint` | bool | `true` | whether `entityid` should be used as a URL that serves the metadata xml document | `acr_mapping` | dict | `None` | custom Authentication Context Class Reference +#### Metadata + The metadata could be loaded in multiple ways in the table above it's loaded from a static file by using the key "local". It's also possible to load read the metadata from a remote URL. @@ -203,7 +205,7 @@ see the [documentation of the underlying library pysaml2](https://github.com/rohe/pysaml2/blob/master/docs/howto/config.rst). -##### Providing `AuthnContextClassRef` +#### Providing `AuthnContextClassRef` SAML2 frontends and backends can provide a custom (configurable) *Authentication Context Class Reference*. For the frontend this is defined in the `AuthnStatement` of the authentication response, while, @@ -231,7 +233,7 @@ provider will be preserved, and when using a OAuth or OpenID Connect backend, th "https://accounts.google.com": LoA1 -#### Frontend +#### Frontend The SAML2 frontend act as a SAML Identity Provider (IdP), accepting authentication requests from SAML Service Providers (SP). The default @@ -299,7 +301,7 @@ config: exclude: ["givenName"] ``` -#### Policy +##### Policy Some settings related to how a SAML response is formed can be overriden on a per-instance or a per-SP basis. This example summarizes the most common settings (hopefully self-explanatory) with their defaults: @@ -322,7 +324,7 @@ in the yaml structure. The most specific key takes presedence. If no policy over the defaults above are used. -#### Backend +#### Backend The SAML2 backend act as a SAML Service Provider (SP), making authentication requests to SAML Identity Providers (IdP). The default configuration file can be found [here](../example/plugins/backends/saml2_backend.yaml.example). @@ -404,7 +406,7 @@ config: ### OpenID Connect plugins -#### Backend +#### Backend The OpenID Connect backend acts as an OpenID Connect Relying Party (RP), making authentication requests to OpenID Connect Provider (OP). The default configuration file can be found [here](../example/plugins/backends/openid_backend.yaml.example). @@ -417,7 +419,7 @@ and make sure to provide the redirect URI, constructed as described in the section about Google configuration below, in the static registration. -#### Frontend +#### Frontend The OpenID Connect frontend acts as and OpenID Connect Provider (OP), accepting requests from OpenID Connect Relying Parties (RPs). The default configuration file can be found [here](../example/plugins/frontends/openid_connect_frontend.yaml.example). @@ -663,7 +665,7 @@ methods: * Request micro services must inherit `satosa.micro_services.base.RequestMicroService`. * Request micro services must inherit `satosa.micro_services.base.ResponseMicroService`. -# Generate proxy metadata +# Generate proxy metadata The proxy metadata is generated based on the front-/backend plugins listed in `proxy_conf.yaml` using the `satosa-saml-metadata` (installed globally by SATOSA installation). From d787736baa37b8e55a1c2d2b3225ea8c286e0c7b Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Tue, 18 May 2021 14:47:11 +1200 Subject: [PATCH 113/288] fix: doc/README.md: fix typo (Request=>Response) --- doc/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/README.md b/doc/README.md index cb053975e..8846981f9 100644 --- a/doc/README.md +++ b/doc/README.md @@ -661,7 +661,7 @@ methods: * Frontends must inherit `satosa.frontends.base.FrontendModule`. * Backends must inherit `satosa.backends.base.BackendModule`. * Request micro services must inherit `satosa.micro_services.base.RequestMicroService`. -* Request micro services must inherit `satosa.micro_services.base.ResponseMicroService`. +* Response micro services must inherit `satosa.micro_services.base.ResponseMicroService`. # Generate proxy metadata From 4f26f91d834757c179fc76d5fa12a4b183278d31 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Tue, 18 May 2021 14:47:51 +1200 Subject: [PATCH 114/288] fix: example/ldap_attribute_store: fix YAML syntax Quote where needed, add missing : --- .../plugins/microservices/ldap_attribute_store.yaml.example | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index a83873a9b..05dfa7355 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -7,7 +7,7 @@ config: # the authenticating IdP, or the entityID of the CO virtual IdP. # The key "default" specifies the default configuration default: - ldap_url: ldaps://ldap.example.org + ldap_url: "ldaps://ldap.example.org" bind_dn: cn=admin,dc=example,dc=org # Obtain bind password from environment variable LDAP_BIND_PASSWORD. bind_password: !ENV LDAP_BIND_PASSWORD @@ -114,7 +114,7 @@ config: user_id_from_attrs: - uid - https://federation-proxy.my.edu/satosa/idp/proxy/some_co + https://federation-proxy.my.edu/satosa/idp/proxy/some_co: search_base: ou=People,o=some_co,dc=example,dc=org # The microservice may be configured to ignore a particular entityID. From 4288ce29c5192898195ed8e86b53390e2cbcd3cd Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Tue, 18 May 2021 15:00:54 +1200 Subject: [PATCH 115/288] fix: example: fix module name in example files --- example/plugins/microservices/ldap_attribute_store.yaml.example | 2 +- example/plugins/microservices/primary_identifier.yaml.example | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 05dfa7355..4efe85072 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -1,4 +1,4 @@ -module: LdapAttributeStore +module: satosa.micro_services.ldap_attribute_store.LdapAttributeStore name: LdapAttributeStore config: diff --git a/example/plugins/microservices/primary_identifier.yaml.example b/example/plugins/microservices/primary_identifier.yaml.example index dbc13dbf7..806b72f87 100644 --- a/example/plugins/microservices/primary_identifier.yaml.example +++ b/example/plugins/microservices/primary_identifier.yaml.example @@ -1,4 +1,4 @@ -module: PrimaryIdentifier +module: satosa.micro_services.primary_identifier.PrimaryIdentifier name: PrimaryIdentifier config: # The ordered identifier candidates are searched in order From cba6cd087178281c6eb42432cea11146f3292662 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Tue, 18 May 2021 15:01:51 +1200 Subject: [PATCH 116/288] fix: PrimaryIdentifier: exclude name_id from attribute value search Attribute 'name_id' gets special handling in other parts of the code - and the way attribute values were fetched, name_id would always throw a None value in, which would later cause this candidate to be rejected with Candidate is missing value so skipping This makes name_id work with PrimaryIdentifier --- src/satosa/micro_services/primary_identifier.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index 43b25bde6..1c41878a8 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -54,7 +54,7 @@ def constructPrimaryIdentifier(self, data, ordered_identifier_candidates): # Get the values asserted by the IdP for the configured list of attribute names for this candidate # and substitute None if the IdP did not assert any value for a configured attribute. - values = [ attributes.get(attribute_name, [None])[0] for attribute_name in candidate['attribute_names'] ] + values = [ attributes.get(attribute_name, [None])[0] for attribute_name in candidate['attribute_names'] if attribute_name != 'name_id' ] msg = "{} Found candidate values {}".format(logprefix, values) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) From 4d3020e205862cb0caf517548dd24947ae2610cb Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Tue, 18 May 2021 15:21:57 +1200 Subject: [PATCH 117/288] new: PrimaryIdentifier: add replace_subject_id option Allow replacing subject_id with the constructed primary identifier (off by default) This would normally be accomplished with user_id_from_attrs in internal_attributes.yaml - but as _auth_resp_callback_func in satosa/base.py processes user_id_from_attrs BEFORE calling ResponseMicroServices, the PrimaryIdentifier MicroService comes in too late. This allows letting OpenIDConnect front-end use a stable identifier produced by the PrimaryIdentifier MicroService. --- .../microservices/primary_identifier.yaml.example | 2 ++ src/satosa/micro_services/primary_identifier.py | 15 +++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/example/plugins/microservices/primary_identifier.yaml.example b/example/plugins/microservices/primary_identifier.yaml.example index 806b72f87..0406f578e 100644 --- a/example/plugins/microservices/primary_identifier.yaml.example +++ b/example/plugins/microservices/primary_identifier.yaml.example @@ -34,6 +34,8 @@ config: # Whether or not to clear the input attributes after setting the # primary identifier value. clear_input_attributes: no + # Whether to replace subject_id with the constructed primary identifier + replace_subject_id: no # If defined redirect to this page if no primary identifier can # be found. on_error: https://my.org/errors/no_primary_identifier diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index 1c41878a8..adf6fe4cf 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -191,6 +191,12 @@ def process(self, context, data): clear_input_attributes = self.config['clear_input_attributes'] else: clear_input_attributes = False + if 'replace_subject_id' in config: + replace_subject_id = config['replace_subject_id'] + elif 'clear_input_attributes' in self.config: + replace_subject_id = self.config['replace_subject_id'] + else: + replace_subject_id = False if 'ignore' in config: ignore = True else: @@ -259,6 +265,15 @@ def process(self, context, data): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) + # Replace subject_id with the constructed primary identifier if so configured. + if replace_subject_id: + msg = "{} Setting subject_id to value {}".format( + logprefix, primary_identifier_val + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + data.subject_id = primary_identifier_val + msg = "{} returning data.attributes {}".format(logprefix, str(data.attributes)) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) From 8ae46960a6c85b313a6d6988b8a81a4a5a00115a Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Tue, 18 May 2021 15:27:07 +1200 Subject: [PATCH 118/288] fix: PrimaryIdentifier: fix clear_input_attributes The clear_input_attributes functionality in the PrimaryIdentifier MicroService wasn't working - the original code: if clear_input_attributes: msg = "{} Clearing values for these input attributes: {}".format( logprefix, data.attribute_names ) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) data.attributes = {} * would break on the use of data.attribute_names (KeyError) * would clear ALL attributes I assume the intended functionality was to clear only the attributes used to construct the values - so `candidate['attribute_names']` for the successful `candidate`. To have access to the candidate object, this code needs to be moved into `constructPrimaryIdentifier` - and in order to have access to the `clear_input_attributes` option value, this is added as an optional parameter to `constructPrimaryIdentifier`. And, as `'name_id'` would not be found among the attributes, exclude it from the list of attributes to clear. --- .../micro_services/primary_identifier.py | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index adf6fe4cf..db0460510 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -31,7 +31,7 @@ def __init__(self, config, *args, **kwargs): super().__init__(*args, **kwargs) self.config = config - def constructPrimaryIdentifier(self, data, ordered_identifier_candidates): + def constructPrimaryIdentifier(self, data, ordered_identifier_candidates, clear_input_attributes=False): """ Construct and return a primary identifier value from the data asserted by the IdP using the ordered list of candidates @@ -120,6 +120,18 @@ def constructPrimaryIdentifier(self, data, ordered_identifier_candidates): # Concatenate all values to create the primary identifier. value = ''.join(values) + + # Clear input attributes if so configured. + if clear_input_attributes: + attributes_to_clear = [attribute_name for attribute_name in candidate['attribute_names'] if attribute_name != 'name_id'] + msg = "{} Clearing values for these input attributes: {}".format( + logprefix, attributes_to_clear + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + for attribute in attributes_to_clear: + del data.attributes[attribute] + break return value @@ -225,7 +237,7 @@ def process(self, context, data): msg = "{} Constructing primary identifier".format(logprefix) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - primary_identifier_val = self.constructPrimaryIdentifier(data, ordered_identifier_candidates) + primary_identifier_val = self.constructPrimaryIdentifier(data, ordered_identifier_candidates, clear_input_attributes) if not primary_identifier_val: msg = "{} No primary identifier found".format(logprefix) @@ -247,15 +259,6 @@ def process(self, context, data): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.info(logline) - # Clear input attributes if so configured. - if clear_input_attributes: - msg = "{} Clearing values for these input attributes: {}".format( - logprefix, data.attribute_names - ) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - data.attributes = {} - if primary_identifier: # Set the primary identifier attribute to the value found. data.attributes[primary_identifier] = primary_identifier_val From 7a61afb5a2cca071b29d35341f3ec67fc598949b Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 26 May 2021 13:52:10 +1200 Subject: [PATCH 119/288] Update src/satosa/micro_services/primary_identifier.py Fix copy-editing typo Co-authored-by: Ivan Kanakarakis --- src/satosa/micro_services/primary_identifier.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index db0460510..262bd0d86 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -205,7 +205,7 @@ def process(self, context, data): clear_input_attributes = False if 'replace_subject_id' in config: replace_subject_id = config['replace_subject_id'] - elif 'clear_input_attributes' in self.config: + elif 'replace_subject_id' in self.config: replace_subject_id = self.config['replace_subject_id'] else: replace_subject_id = False From a0b48595048a7dfdbffc46635e3f509754d07928 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 26 May 2021 14:36:55 +1200 Subject: [PATCH 120/288] Revert "fix: PrimaryIdentifier: fix clear_input_attributes" This reverts commit 8ae46960a6c85b313a6d6988b8a81a4a5a00115a. Original intention was to remove all attribute values, as per discussion in #368 --- .../micro_services/primary_identifier.py | 25 ++++++++----------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index 262bd0d86..b22e3bebb 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -31,7 +31,7 @@ def __init__(self, config, *args, **kwargs): super().__init__(*args, **kwargs) self.config = config - def constructPrimaryIdentifier(self, data, ordered_identifier_candidates, clear_input_attributes=False): + def constructPrimaryIdentifier(self, data, ordered_identifier_candidates): """ Construct and return a primary identifier value from the data asserted by the IdP using the ordered list of candidates @@ -120,18 +120,6 @@ def constructPrimaryIdentifier(self, data, ordered_identifier_candidates, clear_ # Concatenate all values to create the primary identifier. value = ''.join(values) - - # Clear input attributes if so configured. - if clear_input_attributes: - attributes_to_clear = [attribute_name for attribute_name in candidate['attribute_names'] if attribute_name != 'name_id'] - msg = "{} Clearing values for these input attributes: {}".format( - logprefix, attributes_to_clear - ) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - for attribute in attributes_to_clear: - del data.attributes[attribute] - break return value @@ -237,7 +225,7 @@ def process(self, context, data): msg = "{} Constructing primary identifier".format(logprefix) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - primary_identifier_val = self.constructPrimaryIdentifier(data, ordered_identifier_candidates, clear_input_attributes) + primary_identifier_val = self.constructPrimaryIdentifier(data, ordered_identifier_candidates) if not primary_identifier_val: msg = "{} No primary identifier found".format(logprefix) @@ -259,6 +247,15 @@ def process(self, context, data): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.info(logline) + # Clear input attributes if so configured. + if clear_input_attributes: + msg = "{} Clearing values for these input attributes: {}".format( + logprefix, data.attribute_names + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + data.attributes = {} + if primary_identifier: # Set the primary identifier attribute to the value found. data.attributes[primary_identifier] = primary_identifier_val From bd98c3d870ff2a9e60c451e882f60328c9b97930 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 26 May 2021 14:51:13 +1200 Subject: [PATCH 121/288] fix: PrimaryIdentifier: fix clear_input_attributes The clear_input_attributes functionality in the PrimaryIdentifier MicroService breaks with: AttributeError: '' object has no attribute 'attribute_names' on line logging the attributes being cleared: msg = "{} Clearing values for these input attributes: {}".format( logprefix, data.attribute_names ) As the actual attribute store (dict) being cleared is `data.attributes`, the best fix appears to be to log the keys in the dict - attribute names: logprefix, data.attributes.keys() --- src/satosa/micro_services/primary_identifier.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index b22e3bebb..9c892570d 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -250,7 +250,7 @@ def process(self, context, data): # Clear input attributes if so configured. if clear_input_attributes: msg = "{} Clearing values for these input attributes: {}".format( - logprefix, data.attribute_names + logprefix, data.attributes.keys() ) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) From dbe9dd6b7d5a39bcd2a90354a79fdd44d358271c Mon Sep 17 00:00:00 2001 From: Thijs Kinkhorst Date: Mon, 28 Jun 2021 22:19:14 +0200 Subject: [PATCH 122/288] Add isMemberOf to basic attribute map --- docker/attributemaps/basic.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/attributemaps/basic.py b/docker/attributemaps/basic.py index c05b6e98b..9d84b8236 100644 --- a/docker/attributemaps/basic.py +++ b/docker/attributemaps/basic.py @@ -84,6 +84,7 @@ DEF+'info': 'info', DEF+'initials': 'initials', DEF+'internationaliSDNNumber': 'internationaliSDNNumber', + DEF+'isMemberOf': 'isMemberOf', DEF+'janetMailbox': 'janetMailbox', DEF+'jpegPhoto': 'jpegPhoto', DEF+'knowledgeInformation': 'knowledgeInformation', From 84cf60fd38de06a85d6f9d39ef364a3d12571120 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 29 Jun 2021 15:50:48 +0300 Subject: [PATCH 123/288] Fix tests for new Werkzeug version Signed-off-by: Ivan Kanakarakis --- tests/flows/test_oidc-saml.py | 2 +- tests/flows/test_saml-saml.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/flows/test_oidc-saml.py b/tests/flows/test_oidc-saml.py index aa61c151f..90bbe1a31 100644 --- a/tests/flows/test_oidc-saml.py +++ b/tests/flows/test_oidc-saml.py @@ -92,7 +92,7 @@ def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_ # make auth resp to proxy authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp) - authn_resp = test_client.get("/" + authn_resp_req) + authn_resp = test_client.get(authn_resp_req) assert authn_resp.status == "303 See Other" # verify auth resp from proxy diff --git a/tests/flows/test_saml-saml.py b/tests/flows/test_saml-saml.py index ce6cd6960..efa1a8729 100644 --- a/tests/flows/test_saml-saml.py +++ b/tests/flows/test_saml-saml.py @@ -54,7 +54,7 @@ def run_test(self, satosa_config_dict, sp_conf, idp_conf, saml_backend_config, f # make auth resp to proxy authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp) - authn_resp = test_client.get("/" + authn_resp_req) + authn_resp = test_client.get(authn_resp_req) assert authn_resp.status == "303 See Other" # verify auth resp from proxy From ea951ce852f07a82f370e26178c9e613d0a67210 Mon Sep 17 00:00:00 2001 From: Martin van Es Date: Mon, 12 Jul 2021 21:01:33 +0200 Subject: [PATCH 124/288] Add reflector back-end, for easy front-end development. --- .../backends/reflector_backend.yaml.example | 3 + src/satosa/backends/reflector.py | 80 +++++++++++++++++++ 2 files changed, 83 insertions(+) create mode 100644 example/plugins/backends/reflector_backend.yaml.example create mode 100644 src/satosa/backends/reflector.py diff --git a/example/plugins/backends/reflector_backend.yaml.example b/example/plugins/backends/reflector_backend.yaml.example new file mode 100644 index 000000000..185a08035 --- /dev/null +++ b/example/plugins/backends/reflector_backend.yaml.example @@ -0,0 +1,3 @@ +module: satosa.backends.reflector.ReflectorBackend +name: Reflector +config: diff --git a/src/satosa/backends/reflector.py b/src/satosa/backends/reflector.py new file mode 100644 index 000000000..843cc58eb --- /dev/null +++ b/src/satosa/backends/reflector.py @@ -0,0 +1,80 @@ +""" +A reflector backend module for the satosa proxy +""" +import logging + +from satosa.internal import AuthenticationInformation +from satosa.internal import InternalData +from satosa.metadata_creation.description import MetadataDescription +from satosa.backends.base import BackendModule + +import time + +logger = logging.getLogger(__name__) + + +class ReflectorBackend(BackendModule): + """ + A reflector backend module + """ + + def __init__(self, outgoing, internal_attributes, config, base_url, name): + """ + :type outgoing: + (satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response + :type internal_attributes: dict[str, dict[str, list[str] | str]] + :type config: dict[str, Any] + :type base_url: str + :type name: str + + :param outgoing: Callback should be called by the module after + the authorization in the backend is done. + :param internal_attributes: Internal attribute map + :param config: The module config + :param base_url: base url of the service + :param name: name of the plugin + """ + super().__init__(outgoing, internal_attributes, base_url, name) + + def start_auth(self, context, internal_req): + """ + See super class method satosa.backends.base.BackendModule#start_auth + + :type context: satosa.context.Context + :type internal_req: satosa.internal.InternalData + :rtype: satosa.response.Response + """ + + timestamp = int(time.time()) + auth_info = AuthenticationInformation( + 'reflector', timestamp, 'reflector', + ) + + internal_resp = InternalData( + auth_info=auth_info, + attributes={}, + subject_type=None, + subject_id='reflector', + ) + + return self.auth_callback_func(context, internal_resp) + + def register_endpoints(self): + """ + See super class method satosa.backends.base.BackendModule#register_endpoints + :rtype list[(str, ((satosa.context.Context, Any) -> Any, Any))] + """ + url_map = [] + return url_map + + def get_metadata_desc(self): + """ + See super class satosa.backends.backend_base.BackendModule#get_metadata_desc + :rtype: satosa.metadata_creation.description.MetadataDescription + """ + entity_descriptions = [] + description = MetadataDescription(urlsafe_b64encode('reflector'.encode("utf-8")).decode("utf-8")) + description.organization = 'reflector' + + entity_descriptions.append(description) + return entity_descriptions From cdef0974e3ec5d015fedb0b36c42a7b739cc48d6 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 13 Jul 2021 01:59:49 +0300 Subject: [PATCH 125/288] Fix timestamp calculation Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/reflector.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/satosa/backends/reflector.py b/src/satosa/backends/reflector.py index 843cc58eb..3d77c556b 100644 --- a/src/satosa/backends/reflector.py +++ b/src/satosa/backends/reflector.py @@ -1,17 +1,13 @@ """ A reflector backend module for the satosa proxy """ -import logging +from datetime import datetime from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.metadata_creation.description import MetadataDescription from satosa.backends.base import BackendModule -import time - -logger = logging.getLogger(__name__) - class ReflectorBackend(BackendModule): """ @@ -45,7 +41,7 @@ def start_auth(self, context, internal_req): :rtype: satosa.response.Response """ - timestamp = int(time.time()) + timestamp = datetime.utcnow().timestamp() auth_info = AuthenticationInformation( 'reflector', timestamp, 'reflector', ) From 3616b4611a100aa50a7f3e66395ed5fa29d6f2ae Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 13 Jul 2021 02:00:13 +0300 Subject: [PATCH 126/288] Format code and use constants Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/reflector.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/satosa/backends/reflector.py b/src/satosa/backends/reflector.py index 3d77c556b..6702dc733 100644 --- a/src/satosa/backends/reflector.py +++ b/src/satosa/backends/reflector.py @@ -14,6 +14,8 @@ class ReflectorBackend(BackendModule): A reflector backend module """ + ENTITY_ID = ORG_NAME = AUTH_CLASS_REF = SUBJECT_ID = "reflector" + def __init__(self, outgoing, internal_attributes, config, base_url, name): """ :type outgoing: @@ -43,14 +45,16 @@ def start_auth(self, context, internal_req): timestamp = datetime.utcnow().timestamp() auth_info = AuthenticationInformation( - 'reflector', timestamp, 'reflector', + auth_class_ref=ReflectorBackend.AUTH_CLASS_REF, + timestamp=timestamp, + issuer=ReflectorBackend.ENTITY_ID, ) internal_resp = InternalData( auth_info=auth_info, attributes={}, subject_type=None, - subject_id='reflector', + subject_id=ReflectorBackend.SUBJECT_ID, ) return self.auth_callback_func(context, internal_resp) @@ -69,8 +73,12 @@ def get_metadata_desc(self): :rtype: satosa.metadata_creation.description.MetadataDescription """ entity_descriptions = [] - description = MetadataDescription(urlsafe_b64encode('reflector'.encode("utf-8")).decode("utf-8")) - description.organization = 'reflector' + description = MetadataDescription( + urlsafe_b64encode(ReflectorBackend.ENTITY_ID.encode("utf-8")).decode( + "utf-8" + ) + ) + description.organization = ReflectorBackend.ORG_NAME entity_descriptions.append(description) return entity_descriptions From 28c509a37bd4095bd0246d44a8e11733346ed523 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 13 Jul 2021 13:20:24 +0300 Subject: [PATCH 127/288] Bump pyop and add appropriate extras Signed-off-by: Ivan Kanakarakis --- setup.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index ff12945e0..1f6adcaf9 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ packages=find_packages('src/'), package_dir={'': 'src'}, install_requires=[ - "pyop >= 3.0.1", + "pyop >= 3.2.0", "pysaml2 >= 6.5.1", "pycryptodomex", "requests", @@ -27,7 +27,9 @@ "cookies-samesite-compat", ], extras_require={ - "ldap": ["ldap3"] + "ldap": ["ldap3"], + "pyop_mongo": ["pyop[mongo]"], + "pyop_redis": ["pyop[redis]"], }, zip_safe=False, classifiers=[ From 67eb6071abf6c7002023ad44ddec5d4a3996bd7b Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 13 Jul 2021 13:42:52 +0300 Subject: [PATCH 128/288] Replace whitelist_externals with allowlist_externals Signed-off-by: Ivan Kanakarakis --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 134af7e1f..7ab4cc495 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ envlist = [testenv] deps = -rtests/test_requirements.txt -whitelist_externals = +allowlist_externals = tox xmlsec1 commands = From 4c49145c18743778df5bf487573b4f4b2cb3d7e3 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 13 Jul 2021 13:27:16 +0300 Subject: [PATCH 129/288] Add extras for tox Signed-off-by: Ivan Kanakarakis --- tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tox.ini b/tox.ini index 7ab4cc495..4e3896034 100644 --- a/tox.ini +++ b/tox.ini @@ -5,11 +5,13 @@ envlist = pypy3 [testenv] +skip_install = true deps = -rtests/test_requirements.txt allowlist_externals = tox xmlsec1 commands = + pip install -U .[pyop_mongo] xmlsec1 --version python --version pytest --version From f1cf72016f21c86d701aeac68897a5dc5129a651 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 13 Jul 2021 13:52:52 +0300 Subject: [PATCH 130/288] Always recreate the tox environment Signed-off-by: Ivan Kanakarakis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7e45d5d75..f63586ac0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,7 +14,7 @@ install: - pip install tox-travis script: - - tox + - tox -r jobs: allow_failures: From 2b746b3fb7538f5aa35b8c288ccf82e75ad4b769 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 13 Jul 2021 13:53:03 +0300 Subject: [PATCH 131/288] Test on py38 and py39 Signed-off-by: Ivan Kanakarakis --- tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tox.ini b/tox.ini index 4e3896034..6534e1bdd 100644 --- a/tox.ini +++ b/tox.ini @@ -2,6 +2,8 @@ envlist = py36 py37 + py38 + py39 pypy3 [testenv] From 2eabbf1f144ea709846627fb0107cd7008b6e512 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 13 Jul 2021 13:57:11 +0300 Subject: [PATCH 132/288] Ensure pip wheel and setuptools are up to date Signed-off-by: Ivan Kanakarakis --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 6534e1bdd..e26b9ef89 100644 --- a/tox.ini +++ b/tox.ini @@ -13,6 +13,7 @@ allowlist_externals = tox xmlsec1 commands = + pip install -U pip wheel setuptools pip install -U .[pyop_mongo] xmlsec1 --version python --version From 0c006030caf399409a01fa8e39a6a7890cffd88e Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 10 Jul 2021 22:41:53 +0300 Subject: [PATCH 133/288] Update example configs for the saml2 frontend and backend Signed-off-by: Ivan Kanakarakis --- .../backends/saml2_backend.yaml.example | 9 +++-- .../frontends/saml2_frontend.yaml.example | 34 ++++++++++--------- 2 files changed, 25 insertions(+), 18 deletions(-) diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index 07b81eb14..8aca6b4e5 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -3,6 +3,13 @@ name: Saml2 config: idp_blacklist_file: /path/to/blacklist.json + acr_mapping: + "": default-LoA + "https://accounts.google.com": LoA1 + + # disco_srv must be defined if there is more than one IdP in the metadata specified above + disco_srv: http://disco.example.com + entityid_endpoint: true mirror_force_authn: no memorize_idp: no @@ -59,5 +66,3 @@ config: # include a Format attribute in the NameIDPolicy. # name_id_format: 'None' name_id_format_allow_create: true - # disco_srv must be defined if there is more than one IdP in the metadata specified above - disco_srv: http://disco.example.com diff --git a/example/plugins/frontends/saml2_frontend.yaml.example b/example/plugins/frontends/saml2_frontend.yaml.example index 40c9000f2..c0dffe6f6 100644 --- a/example/plugins/frontends/saml2_frontend.yaml.example +++ b/example/plugins/frontends/saml2_frontend.yaml.example @@ -1,7 +1,25 @@ module: satosa.frontends.saml2.SAMLFrontend name: Saml2IDP config: + #acr_mapping: + # "": default-LoA + # "https://accounts.google.com": LoA1 + + endpoints: + single_sign_on_service: + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect + + # If configured and not false or empty the common domain cookie _saml_idp will be set + # with or have appended the IdP used for authentication. The default is not to set the + # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie + # will be set to the value for the 'domain' key. If no 'domain' is set then the domain + # from the BASE defined for the proxy will be used. + #common_domain_cookie: + # domain: .example.com + entityid_endpoint: true + idp_config: organization: {display_name: Example Identities, name: Example Identities Org., url: 'http://www.example.com'} contact_person: @@ -50,19 +68,3 @@ config: name_form: urn:oasis:names:tc:SAML:2.0:attrname-format:uri encrypt_assertion: false encrypted_advice_attributes: false - acr_mapping: - "": default-LoA - "https://accounts.google.com": LoA1 - - endpoints: - single_sign_on_service: - 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post - 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect - - # If configured and not false or empty the common domain cookie _saml_idp will be set - # with or have appended the IdP used for authentication. The default is not to set the - # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie - # will be set to the value for the 'domain' key. If no 'domain' is set then the domain - # from the BASE defined for the proxy will be used. - #common_domain_cookie: - # domain: .example.com From d4d4767812e7b01d263be19b725d0532e063d167 Mon Sep 17 00:00:00 2001 From: peppelinux Date: Sun, 11 Apr 2021 18:17:53 +0200 Subject: [PATCH 134/288] feat: Add IdPHinting micro-service for basic IdP-hinting support Signed-off-by: Ivan Kanakarakis --- .../microservices/idp_hinting.yaml.example | 6 ++ src/satosa/micro_services/idp_hinting.py | 59 +++++++++++++++++++ 2 files changed, 65 insertions(+) create mode 100644 example/plugins/microservices/idp_hinting.yaml.example create mode 100644 src/satosa/micro_services/idp_hinting.py diff --git a/example/plugins/microservices/idp_hinting.yaml.example b/example/plugins/microservices/idp_hinting.yaml.example new file mode 100644 index 000000000..9238f3c55 --- /dev/null +++ b/example/plugins/microservices/idp_hinting.yaml.example @@ -0,0 +1,6 @@ +module: satosa.micro_services.idp_hinting.IdpHinting +name: IdpHinting +config: + allowed_params: + - idp_hinting + - idp_hint diff --git a/src/satosa/micro_services/idp_hinting.py b/src/satosa/micro_services/idp_hinting.py new file mode 100644 index 000000000..397852d09 --- /dev/null +++ b/src/satosa/micro_services/idp_hinting.py @@ -0,0 +1,59 @@ +import logging +from urllib.parse import parse_qs + +from .base import RequestMicroService +from ..exception import SATOSAConfigurationError +from ..exception import SATOSAError + + +logger = logging.getLogger(__name__) + + +class IdpHintingError(SATOSAError): + """ + SATOSA exception raised by IdpHinting microservice + """ + pass + + +class IdpHinting(RequestMicroService): + """ + Detect if an idp hinting feature have been requested + """ + + def __init__(self, config, *args, **kwargs): + """ + Constructor. + :param config: microservice configuration + :type config: Dict[str, Dict[str, str]] + """ + super().__init__(*args, **kwargs) + try: + self.idp_hint_param_names = config['allowed_params'] + except KeyError: + raise SATOSAConfigurationError( + f"{self.__class__.__name__} can't find allowed_params" + ) + + def process(self, context, data): + """ + This intercepts if idp_hint paramenter is in use + :param context: request context + :param data: the internal request + """ + target_entity_id = context.get_decoration(context.KEY_TARGET_ENTITYID) + qs_raw = context._http_headers['QUERY_STRING'] + if target_entity_id or not qs_raw: + return super().process(context, data) + + qs = parse_qs(qs_raw) + hints = ( + entity_id + for param in self.idp_hint_param_names + for entity_id in qs.get(param, [None]) + if entity_id + ) + hint = next(hints, None) + + context.decorate(context.KEY_TARGET_ENTITYID, hint) + return super().process(context, data) From dc46b6dbd4f9056caed291289a692d1e3ec658db Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 13 Jul 2021 15:41:44 +0300 Subject: [PATCH 135/288] Add section and pointers to external micro-services Signed-off-by: Ivan Kanakarakis --- README.md | 1 + doc/README.md | 26 ++++++++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/README.md b/README.md index b790e0024..044091a86 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,7 @@ OpenID Connect and OAuth2. - [Manual installation](doc/README.md#manual_installation) - [Dependencies](doc/README.md#dependencies) - [Instructions](doc/README.md#install_instructions) + - [External micro-services](doc/README.md#install_external) - [Configuration](doc/README.md#configuration) - [SATOSA proxy configuration: proxy_conf.yaml.example](doc/README.md#proxy_conf) - [Additional services](doc/README.md#additional_service) diff --git a/doc/README.md b/doc/README.md index 62c5f97e4..b9934ce94 100644 --- a/doc/README.md +++ b/doc/README.md @@ -28,6 +28,32 @@ apt-get install libffi-dev libssl-dev xmlsec1 Alternatively the application can be installed directly from PyPI (`pip install satosa`), or the [Docker image](https://hub.docker.com/r/satosa/) can be used. + +### External micro-services + +Micro-services act like plugins and can be developed by anyone. Other people +that have been working with the SaToSa proxy, have built extentions mainly in +the form of additional micro-services that can be shared and used by anyone. + +DAASI International have been a long-time user of this software and have made +their extentions available, licensed under Apache2.0. You can find the +extensions using the following URL: + +- https://gitlab.daasi.de/didmos2/didmos2-auth/-/tree/master/src/didmos_oidc/satosa/micro_services + +The extentions include: + +- SCIM attribute store to fetch attributes via SCIM API (instead of LDAP) +- Authoritzation module for blocking services if necessary group memberships or + attributes are missing in the identity (for service providers that do not + evaluate attributes themselves) +- Backend chooser with Django UI for letting the user choose between any + existing SATOSA backend +- Integration of MFA via PrivacyIDEA + +and more. + + # Configuration SATOSA is configured using YAML. From d1784a7a24a35113a544ff2695a017e0d2f4ebc2 Mon Sep 17 00:00:00 2001 From: peppelinux Date: Wed, 7 Apr 2021 01:11:54 +0200 Subject: [PATCH 136/288] feat: DecideBackedByTarget microservice --- .../target_based_routing.yaml.example | 12 ++ src/satosa/micro_services/custom_routing.py | 109 ++++++++++++++++++ .../micro_services/test_custom_routing.py | 64 +++++++++- 3 files changed, 184 insertions(+), 1 deletion(-) create mode 100644 example/plugins/microservices/target_based_routing.yaml.example diff --git a/example/plugins/microservices/target_based_routing.yaml.example b/example/plugins/microservices/target_based_routing.yaml.example new file mode 100644 index 000000000..7b017dba8 --- /dev/null +++ b/example/plugins/microservices/target_based_routing.yaml.example @@ -0,0 +1,12 @@ +module: satosa.micro_services.custom_routing.DecideBackendByTargetIdP +name: TargetRouter +config: + default_backend: Saml2 + + # the regex that will intercept http requests to be handled with this microservice + endpoint_paths: + - ".*/disco" + + target_mapping: + "http://idpspid.testunical.it:8088": "spidSaml2" # map SAML entity with entity id 'target_id' to backend name + "http://eidas.testunical.it:8081/saml2/metadata": "eidasSaml2" diff --git a/src/satosa/micro_services/custom_routing.py b/src/satosa/micro_services/custom_routing.py index d903502be..1eaccea5d 100644 --- a/src/satosa/micro_services/custom_routing.py +++ b/src/satosa/micro_services/custom_routing.py @@ -2,14 +2,123 @@ from base64 import urlsafe_b64encode from satosa.context import Context +from satosa.internal import InternalData + from .base import RequestMicroService from ..exception import SATOSAConfigurationError from ..exception import SATOSAError +from ..exception import SATOSAStateError logger = logging.getLogger(__name__) +class CustomRoutingError(SATOSAError): + """ + SATOSA exception raised by CustomRouting rules + """ + pass + + +class DecideBackendByTargetIdP(RequestMicroService): + """ + Select which backend should be used based on who is the SAML IDP + """ + + def __init__(self, config:dict, *args, **kwargs): + """ + Constructor. + :param config: microservice configuration loaded from yaml file + :type config: Dict[str, Dict[str, str]] + """ + super().__init__(*args, **kwargs) + self.target_mapping = config['target_mapping'] + self.endpoint_paths = config['endpoint_paths'] + self.default_backend = config['default_backend'] + + if not isinstance(self.endpoint_paths, list): + raise SATOSAConfigurationError() + + def register_endpoints(self): + """ + URL mapping of additional endpoints this micro service needs to register for callbacks. + + Example of a mapping from the url path '/callback' to the callback() method of a micro service: + reg_endp = [ + ("^/callback1$", self.callback), + ] + + :rtype List[Tuple[str, Callable[[satosa.context.Context, Any], satosa.response.Response]]] + + :return: A list with functions and args bound to a specific endpoint url, + [(regexp, Callable[[satosa.context.Context], satosa.response.Response]), ...] + """ + + # this intercepts disco response + return [ + (path , self.backend_by_entityid) + for path in self.endpoint_paths + ] + + def _get_request_entity_id(self, context): + return ( + context.get_decoration(Context.KEY_TARGET_ENTITYID) or + context.request.get('entityID') + ) + + def _get_backend(self, context:Context, entity_id:str) -> str: + """ + returns the Target Backend to use + """ + return ( + self.target_mapping.get(entity_id) or + self.default_backend + ) + + def process(self, context:Context, data:dict): + """ + Will modify the context.target_backend attribute based on the target entityid. + :param context: request context + :param data: the internal request + """ + entity_id = self._get_request_entity_id(context) + if entity_id: + self._rewrite_context(entity_id, context) + return super().process(context, data) + + def _rewrite_context(self, entity_id:str, context:Context) -> None: + tr_backend = self._get_backend(context, entity_id) + context.decorate(Context.KEY_TARGET_ENTITYID, entity_id) + context.target_frontend = context.target_frontend or context.state.get('ROUTER') + native_backend = context.target_backend + msg = (f'Found DecideBackendByTarget ({self.name} microservice) ' + f'redirecting {entity_id} from {native_backend} ' + f'backend to {tr_backend}') + logger.info(msg) + context.target_backend = tr_backend + + def backend_by_entityid(self, context:Context): + entity_id = self._get_request_entity_id(context) + + if entity_id: + self._rewrite_context(entity_id, context) + else: + raise CustomRoutingError( + f"{self.__class__.__name__} " + "can't find any valid entity_id in the context." + ) + + if not context.state.get('ROUTER'): + raise SATOSAStateError( + f"{self.__class__.__name__} " + "can't find any valid state in the context." + ) + + data_serialized = context.state.get(self.name, {}).get("internal", {}) + data = InternalData.from_dict(data_serialized) + return super().process(context, data) + + class DecideBackendByRequester(RequestMicroService): """ Select which backend should be used based on who the requester is. diff --git a/tests/satosa/micro_services/test_custom_routing.py b/tests/satosa/micro_services/test_custom_routing.py index 7a5227250..81425872d 100644 --- a/tests/satosa/micro_services/test_custom_routing.py +++ b/tests/satosa/micro_services/test_custom_routing.py @@ -3,9 +3,11 @@ import pytest from satosa.context import Context -from satosa.exception import SATOSAError, SATOSAConfigurationError +from satosa.exception import SATOSAError, SATOSAConfigurationError, SATOSAStateError from satosa.internal import InternalData from satosa.micro_services.custom_routing import DecideIfRequesterIsAllowed +from satosa.micro_services.custom_routing import DecideBackendByTargetIdP +from satosa.micro_services.custom_routing import CustomRoutingError TARGET_ENTITY = "entity1" @@ -156,3 +158,63 @@ def test_missing_target_entity_id_from_context(self, context): req = InternalData(requester="test_requester") with pytest.raises(SATOSAError): decide_service.process(context, req) + + +class TestDecideBackendByTargetIdP: + rules = { + 'default_backend': 'Saml2', + 'endpoint_paths': ['.*/disco'], + 'target_mapping': {'http://idpspid.testunical.it:8088': 'spidSaml2'} + } + + def create_decide_service(self, rules): + decide_service = DecideBackendByTargetIdP( + config=rules, + name="test_decide_service", + base_url="https://satosa.example.com" + ) + decide_service.next = lambda ctx, data: data + return decide_service + + + def test_missing_state(self, target_context): + decide_service = self.create_decide_service(self.rules) + target_context.request = { + 'entityID': 'http://idpspid.testunical.it:8088', + } + req = InternalData(requester="test_requester") + req.requester = "somebody else" + assert decide_service.process(target_context, req) + + with pytest.raises(SATOSAStateError): + decide_service.backend_by_entityid(target_context) + + + def test_unmatching_target(self, target_context): + """ + It would rely on the default backend + """ + decide_service = self.create_decide_service(self.rules) + target_context.request = { + 'entityID': 'unknow-entity-id', + } + target_context.state['ROUTER'] = 'Saml2' + + req = InternalData(requester="test_requester") + assert decide_service.process(target_context, req) + + res = decide_service.backend_by_entityid(target_context) + assert isinstance(res, InternalData) + + def test_matching_target(self, target_context): + decide_service = self.create_decide_service(self.rules) + target_context.request = { + 'entityID': 'http://idpspid.testunical.it:8088-entity-id' + } + target_context.state['ROUTER'] = 'Saml2' + + req = InternalData(requester="test_requester") + req.requester = "somebody else" + assert decide_service.process(target_context, req) + res = decide_service.backend_by_entityid(target_context) + assert isinstance(res, InternalData) From e7ad982cfa7318f66f9bb45fe48a104ae4589789 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 14 Jul 2021 01:14:47 +0300 Subject: [PATCH 137/288] Fix DecideBackendByTargetIdP and introduce DecideBackendByDiscoIdP Signed-off-by: Ivan Kanakarakis --- src/satosa/micro_services/custom_routing.py | 108 ++++++-------- .../micro_services/test_custom_routing.py | 133 +++++++++++------- 2 files changed, 130 insertions(+), 111 deletions(-) diff --git a/src/satosa/micro_services/custom_routing.py b/src/satosa/micro_services/custom_routing.py index 1eaccea5d..a276184c5 100644 --- a/src/satosa/micro_services/custom_routing.py +++ b/src/satosa/micro_services/custom_routing.py @@ -22,22 +22,55 @@ class CustomRoutingError(SATOSAError): class DecideBackendByTargetIdP(RequestMicroService): """ - Select which backend should be used based on who is the SAML IDP + Select target backend based on the target issuer. """ def __init__(self, config:dict, *args, **kwargs): """ Constructor. + :param config: microservice configuration loaded from yaml file :type config: Dict[str, Dict[str, str]] """ super().__init__(*args, **kwargs) + self.target_mapping = config['target_mapping'] - self.endpoint_paths = config['endpoint_paths'] self.default_backend = config['default_backend'] - if not isinstance(self.endpoint_paths, list): - raise SATOSAConfigurationError() + def process(self, context:Context, data:InternalData): + """ + Set context.target_backend based on the target issuer (context.target_entity_id) + + :param context: request context + :param data: the internal request + """ + target_issuer = context.get_decoration(Context.KEY_TARGET_ENTITYID) + if not target_issuer: + return super().process(context, data) + + target_backend = ( + self.target_mapping.get(target_issuer) + or self.default_backend + ) + + report = { + 'msg': 'decided target backend by target issuer', + 'target_issuer': target_issuer, + 'target_backend': target_backend, + } + logger.info(report) + + context.target_backend = target_backend + return super().process(context, data) + + +class DecideBackendByDiscoIdP(DecideBackendByTargetIdP): + def __init__(self, config:dict, *args, **kwargs): + super().__init__(config, *args, **kwargs) + + self.disco_endpoints = config['disco_endpoints'] + if not isinstance(self.disco_endpoints, list): + raise CustomRoutingError('disco_endpoints must be a list of str') def register_endpoints(self): """ @@ -54,69 +87,20 @@ def register_endpoints(self): [(regexp, Callable[[satosa.context.Context], satosa.response.Response]), ...] """ - # this intercepts disco response return [ - (path , self.backend_by_entityid) - for path in self.endpoint_paths + (path , self._handle_disco_response) + for path in self.disco_endpoints ] - def _get_request_entity_id(self, context): - return ( - context.get_decoration(Context.KEY_TARGET_ENTITYID) or - context.request.get('entityID') - ) - - def _get_backend(self, context:Context, entity_id:str) -> str: - """ - returns the Target Backend to use - """ - return ( - self.target_mapping.get(entity_id) or - self.default_backend - ) - - def process(self, context:Context, data:dict): - """ - Will modify the context.target_backend attribute based on the target entityid. - :param context: request context - :param data: the internal request - """ - entity_id = self._get_request_entity_id(context) - if entity_id: - self._rewrite_context(entity_id, context) - return super().process(context, data) - - def _rewrite_context(self, entity_id:str, context:Context) -> None: - tr_backend = self._get_backend(context, entity_id) - context.decorate(Context.KEY_TARGET_ENTITYID, entity_id) - context.target_frontend = context.target_frontend or context.state.get('ROUTER') - native_backend = context.target_backend - msg = (f'Found DecideBackendByTarget ({self.name} microservice) ' - f'redirecting {entity_id} from {native_backend} ' - f'backend to {tr_backend}') - logger.info(msg) - context.target_backend = tr_backend - - def backend_by_entityid(self, context:Context): - entity_id = self._get_request_entity_id(context) - - if entity_id: - self._rewrite_context(entity_id, context) - else: - raise CustomRoutingError( - f"{self.__class__.__name__} " - "can't find any valid entity_id in the context." - ) - - if not context.state.get('ROUTER'): - raise SATOSAStateError( - f"{self.__class__.__name__} " - "can't find any valid state in the context." - ) + def _handle_disco_response(self, context:Context): + target_issuer_from_disco = context.request.get('entityID') + if not target_issuer_from_disco: + raise CustomRoutingError('no valid entity_id in the disco response') - data_serialized = context.state.get(self.name, {}).get("internal", {}) + context.decorate(Context.KEY_TARGET_ENTITYID, target_issuer_from_disco) + data_serialized = context.state.get(self.name, {}).get('internal', {}) data = InternalData.from_dict(data_serialized) - return super().process(context, data) + return self.process(context, data) class DecideBackendByRequester(RequestMicroService): diff --git a/tests/satosa/micro_services/test_custom_routing.py b/tests/satosa/micro_services/test_custom_routing.py index 81425872d..9cbe4eda4 100644 --- a/tests/satosa/micro_services/test_custom_routing.py +++ b/tests/satosa/micro_services/test_custom_routing.py @@ -1,14 +1,18 @@ from base64 import urlsafe_b64encode +from unittest import TestCase import pytest from satosa.context import Context +from satosa.state import State from satosa.exception import SATOSAError, SATOSAConfigurationError, SATOSAStateError from satosa.internal import InternalData from satosa.micro_services.custom_routing import DecideIfRequesterIsAllowed +from satosa.micro_services.custom_routing import DecideBackendByDiscoIdP from satosa.micro_services.custom_routing import DecideBackendByTargetIdP from satosa.micro_services.custom_routing import CustomRoutingError + TARGET_ENTITY = "entity1" @@ -160,61 +164,92 @@ def test_missing_target_entity_id_from_context(self, context): decide_service.process(context, req) -class TestDecideBackendByTargetIdP: - rules = { - 'default_backend': 'Saml2', - 'endpoint_paths': ['.*/disco'], - 'target_mapping': {'http://idpspid.testunical.it:8088': 'spidSaml2'} - } - - def create_decide_service(self, rules): - decide_service = DecideBackendByTargetIdP( - config=rules, - name="test_decide_service", - base_url="https://satosa.example.com" - ) - decide_service.next = lambda ctx, data: data - return decide_service +class TestDecideBackendByTargetIdP(TestCase): + def setUp(self): + context = Context() + context.state = State() - - def test_missing_state(self, target_context): - decide_service = self.create_decide_service(self.rules) - target_context.request = { - 'entityID': 'http://idpspid.testunical.it:8088', + config = { + 'default_backend': 'default_backend', + 'target_mapping': { + 'mapped_idp.example.org': 'mapped_backend', + }, + 'disco_endpoints': [ + '.*/disco', + ], } - req = InternalData(requester="test_requester") - req.requester = "somebody else" - assert decide_service.process(target_context, req) - - with pytest.raises(SATOSAStateError): - decide_service.backend_by_entityid(target_context) - - def test_unmatching_target(self, target_context): - """ - It would rely on the default backend - """ - decide_service = self.create_decide_service(self.rules) - target_context.request = { - 'entityID': 'unknow-entity-id', + plugin = DecideBackendByTargetIdP( + config=config, + name='test_decide_service', + base_url='https://satosa.example.org', + ) + plugin.next = lambda ctx, data: (ctx, data) + + self.config = config + self.context = context + self.plugin = plugin + + def test_when_target_is_not_set_do_skip(self): + data = InternalData(requester='test_requester') + newctx, newdata = self.plugin.process(self.context, data) + assert not newctx.target_backend + + def test_when_target_is_not_mapped_choose_default_backend(self): + self.context.decorate(Context.KEY_TARGET_ENTITYID, 'idp.example.org') + data = InternalData(requester='test_requester') + newctx, newdata = self.plugin.process(self.context, data) + assert newctx.target_backend == 'default_backend' + + def test_when_target_is_mapped_choose_mapping_backend(self): + self.context.decorate(Context.KEY_TARGET_ENTITYID, 'mapped_idp.example.org') + data = InternalData(requester='test_requester') + data.requester = 'somebody else' + newctx, newdata = self.plugin.process(self.context, data) + assert newctx.target_backend == 'mapped_backend' + + +class TestDecideBackendByDiscoIdP(TestCase): + def setUp(self): + context = Context() + context.state = State() + + config = { + 'default_backend': 'default_backend', + 'target_mapping': { + 'mapped_idp.example.org': 'mapped_backend', + }, + 'disco_endpoints': [ + '.*/disco', + ], } - target_context.state['ROUTER'] = 'Saml2' - req = InternalData(requester="test_requester") - assert decide_service.process(target_context, req) + plugin = DecideBackendByDiscoIdP( + config=config, + name='test_decide_service', + base_url='https://satosa.example.org', + ) + plugin.next = lambda ctx, data: (ctx, data) - res = decide_service.backend_by_entityid(target_context) - assert isinstance(res, InternalData) + self.config = config + self.context = context + self.plugin = plugin - def test_matching_target(self, target_context): - decide_service = self.create_decide_service(self.rules) - target_context.request = { - 'entityID': 'http://idpspid.testunical.it:8088-entity-id' + def test_when_target_is_not_set_raise_error(self): + self.context.request = {} + with pytest.raises(CustomRoutingError): + self.plugin._handle_disco_response(self.context) + + def test_when_target_is_not_mapped_choose_default_backend(self): + self.context.request = { + 'entityID': 'idp.example.org', } - target_context.state['ROUTER'] = 'Saml2' + newctx, newdata = self.plugin._handle_disco_response(self.context) + assert newctx.target_backend == 'default_backend' - req = InternalData(requester="test_requester") - req.requester = "somebody else" - assert decide_service.process(target_context, req) - res = decide_service.backend_by_entityid(target_context) - assert isinstance(res, InternalData) + def test_when_target_is_mapped_choose_mapping_backend(self): + self.context.request = { + 'entityID': 'mapped_idp.example.org', + } + newctx, newdata = self.plugin._handle_disco_response(self.context) + assert newctx.target_backend == 'mapped_backend' From c0265f26b6b1aa8b0bb83d7f01f1becdf162f129 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 14 Jul 2021 02:03:56 +0300 Subject: [PATCH 138/288] Separate disco handling from backend decision Signed-off-by: Ivan Kanakarakis --- .../disco_to_target_issuer.yaml.example | 6 ++ .../target_based_routing.yaml.example | 6 +- src/satosa/micro_services/custom_routing.py | 53 ++---------------- src/satosa/micro_services/disco.py | 48 ++++++++++++++++ .../micro_services/test_custom_routing.py | 56 +------------------ tests/satosa/micro_services/test_disco.py | 44 +++++++++++++++ 6 files changed, 106 insertions(+), 107 deletions(-) create mode 100644 example/plugins/microservices/disco_to_target_issuer.yaml.example create mode 100644 src/satosa/micro_services/disco.py create mode 100644 tests/satosa/micro_services/test_disco.py diff --git a/example/plugins/microservices/disco_to_target_issuer.yaml.example b/example/plugins/microservices/disco_to_target_issuer.yaml.example new file mode 100644 index 000000000..5d5d0100c --- /dev/null +++ b/example/plugins/microservices/disco_to_target_issuer.yaml.example @@ -0,0 +1,6 @@ +module: satosa.micro_services.disco.DiscoToTargetIssuer +name: DiscoToTargetIssuer +config: + # the regex that will intercept http requests to be handled with this microservice + disco_endpoints: + - ".*/disco" diff --git a/example/plugins/microservices/target_based_routing.yaml.example b/example/plugins/microservices/target_based_routing.yaml.example index 7b017dba8..55e699c53 100644 --- a/example/plugins/microservices/target_based_routing.yaml.example +++ b/example/plugins/microservices/target_based_routing.yaml.example @@ -1,12 +1,8 @@ -module: satosa.micro_services.custom_routing.DecideBackendByTargetIdP +module: satosa.micro_services.custom_routing.DecideBackendByTargetIssuer name: TargetRouter config: default_backend: Saml2 - # the regex that will intercept http requests to be handled with this microservice - endpoint_paths: - - ".*/disco" - target_mapping: "http://idpspid.testunical.it:8088": "spidSaml2" # map SAML entity with entity id 'target_id' to backend name "http://eidas.testunical.it:8081/saml2/metadata": "eidasSaml2" diff --git a/src/satosa/micro_services/custom_routing.py b/src/satosa/micro_services/custom_routing.py index a276184c5..541b824f1 100644 --- a/src/satosa/micro_services/custom_routing.py +++ b/src/satosa/micro_services/custom_routing.py @@ -7,20 +7,17 @@ from .base import RequestMicroService from ..exception import SATOSAConfigurationError from ..exception import SATOSAError -from ..exception import SATOSAStateError logger = logging.getLogger(__name__) class CustomRoutingError(SATOSAError): - """ - SATOSA exception raised by CustomRouting rules - """ + """SATOSA exception raised by CustomRouting rules""" pass -class DecideBackendByTargetIdP(RequestMicroService): +class DecideBackendByTargetIssuer(RequestMicroService): """ Select target backend based on the target issuer. """ @@ -38,14 +35,11 @@ def __init__(self, config:dict, *args, **kwargs): self.default_backend = config['default_backend'] def process(self, context:Context, data:InternalData): - """ - Set context.target_backend based on the target issuer (context.target_entity_id) + """Set context.target_backend based on the target issuer""" - :param context: request context - :param data: the internal request - """ target_issuer = context.get_decoration(Context.KEY_TARGET_ENTITYID) if not target_issuer: + logger.info('skipping backend decision because no target_issuer was found') return super().process(context, data) target_backend = ( @@ -64,45 +58,6 @@ def process(self, context:Context, data:InternalData): return super().process(context, data) -class DecideBackendByDiscoIdP(DecideBackendByTargetIdP): - def __init__(self, config:dict, *args, **kwargs): - super().__init__(config, *args, **kwargs) - - self.disco_endpoints = config['disco_endpoints'] - if not isinstance(self.disco_endpoints, list): - raise CustomRoutingError('disco_endpoints must be a list of str') - - def register_endpoints(self): - """ - URL mapping of additional endpoints this micro service needs to register for callbacks. - - Example of a mapping from the url path '/callback' to the callback() method of a micro service: - reg_endp = [ - ("^/callback1$", self.callback), - ] - - :rtype List[Tuple[str, Callable[[satosa.context.Context, Any], satosa.response.Response]]] - - :return: A list with functions and args bound to a specific endpoint url, - [(regexp, Callable[[satosa.context.Context], satosa.response.Response]), ...] - """ - - return [ - (path , self._handle_disco_response) - for path in self.disco_endpoints - ] - - def _handle_disco_response(self, context:Context): - target_issuer_from_disco = context.request.get('entityID') - if not target_issuer_from_disco: - raise CustomRoutingError('no valid entity_id in the disco response') - - context.decorate(Context.KEY_TARGET_ENTITYID, target_issuer_from_disco) - data_serialized = context.state.get(self.name, {}).get('internal', {}) - data = InternalData.from_dict(data_serialized) - return self.process(context, data) - - class DecideBackendByRequester(RequestMicroService): """ Select which backend should be used based on who the requester is. diff --git a/src/satosa/micro_services/disco.py b/src/satosa/micro_services/disco.py new file mode 100644 index 000000000..7ea5bbe0a --- /dev/null +++ b/src/satosa/micro_services/disco.py @@ -0,0 +1,48 @@ +from satosa.context import Context +from satosa.internal import InternalData + +from .base import RequestMicroService +from ..exception import SATOSAError + + +class DiscoToTargetIssuerError(SATOSAError): + """SATOSA exception raised by CustomRouting rules""" + + +class DiscoToTargetIssuer(RequestMicroService): + def __init__(self, config:dict, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.disco_endpoints = config['disco_endpoints'] + if not isinstance(self.disco_endpoints, list) or not self.disco_endpoints: + raise DiscoToTargetIssuerError('disco_endpoints must be a list of str') + + def register_endpoints(self): + """ + URL mapping of additional endpoints this micro service needs to register for callbacks. + + Example of a mapping from the url path '/callback' to the callback() method of a micro service: + reg_endp = [ + ('^/callback1$', self.callback), + ] + + :rtype List[Tuple[str, Callable[[satosa.context.Context, Any], satosa.response.Response]]] + + :return: A list with functions and args bound to a specific endpoint url, + [(regexp, Callable[[satosa.context.Context], satosa.response.Response]), ...] + """ + + return [ + (path , self._handle_disco_response) + for path in self.disco_endpoints + ] + + def _handle_disco_response(self, context:Context): + target_issuer = context.request.get('entityID') + if not target_issuer: + raise DiscoToTargetIssuerError('no valid entity_id in the disco response') + + data_serialized = context.state.get(self.name, {}).get('internal_data', {}) + data = InternalData.from_dict(data_serialized) + context.decorate(Context.KEY_TARGET_ENTITYID, target_issuer) + return super().process(context, data) diff --git a/tests/satosa/micro_services/test_custom_routing.py b/tests/satosa/micro_services/test_custom_routing.py index 9cbe4eda4..d2022bc3e 100644 --- a/tests/satosa/micro_services/test_custom_routing.py +++ b/tests/satosa/micro_services/test_custom_routing.py @@ -8,8 +8,7 @@ from satosa.exception import SATOSAError, SATOSAConfigurationError, SATOSAStateError from satosa.internal import InternalData from satosa.micro_services.custom_routing import DecideIfRequesterIsAllowed -from satosa.micro_services.custom_routing import DecideBackendByDiscoIdP -from satosa.micro_services.custom_routing import DecideBackendByTargetIdP +from satosa.micro_services.custom_routing import DecideBackendByTargetIssuer from satosa.micro_services.custom_routing import CustomRoutingError @@ -164,7 +163,7 @@ def test_missing_target_entity_id_from_context(self, context): decide_service.process(context, req) -class TestDecideBackendByTargetIdP(TestCase): +class TestDecideBackendByTargetIssuer(TestCase): def setUp(self): context = Context() context.state = State() @@ -174,12 +173,9 @@ def setUp(self): 'target_mapping': { 'mapped_idp.example.org': 'mapped_backend', }, - 'disco_endpoints': [ - '.*/disco', - ], } - plugin = DecideBackendByTargetIdP( + plugin = DecideBackendByTargetIssuer( config=config, name='test_decide_service', base_url='https://satosa.example.org', @@ -207,49 +203,3 @@ def test_when_target_is_mapped_choose_mapping_backend(self): data.requester = 'somebody else' newctx, newdata = self.plugin.process(self.context, data) assert newctx.target_backend == 'mapped_backend' - - -class TestDecideBackendByDiscoIdP(TestCase): - def setUp(self): - context = Context() - context.state = State() - - config = { - 'default_backend': 'default_backend', - 'target_mapping': { - 'mapped_idp.example.org': 'mapped_backend', - }, - 'disco_endpoints': [ - '.*/disco', - ], - } - - plugin = DecideBackendByDiscoIdP( - config=config, - name='test_decide_service', - base_url='https://satosa.example.org', - ) - plugin.next = lambda ctx, data: (ctx, data) - - self.config = config - self.context = context - self.plugin = plugin - - def test_when_target_is_not_set_raise_error(self): - self.context.request = {} - with pytest.raises(CustomRoutingError): - self.plugin._handle_disco_response(self.context) - - def test_when_target_is_not_mapped_choose_default_backend(self): - self.context.request = { - 'entityID': 'idp.example.org', - } - newctx, newdata = self.plugin._handle_disco_response(self.context) - assert newctx.target_backend == 'default_backend' - - def test_when_target_is_mapped_choose_mapping_backend(self): - self.context.request = { - 'entityID': 'mapped_idp.example.org', - } - newctx, newdata = self.plugin._handle_disco_response(self.context) - assert newctx.target_backend == 'mapped_backend' diff --git a/tests/satosa/micro_services/test_disco.py b/tests/satosa/micro_services/test_disco.py new file mode 100644 index 000000000..ac2c3c5c2 --- /dev/null +++ b/tests/satosa/micro_services/test_disco.py @@ -0,0 +1,44 @@ +from unittest import TestCase + +import pytest + +from satosa.context import Context +from satosa.state import State +from satosa.micro_services.disco import DiscoToTargetIssuer +from satosa.micro_services.disco import DiscoToTargetIssuerError + + +class TestDiscoToTargetIssuer(TestCase): + def setUp(self): + context = Context() + context.state = State() + + config = { + 'disco_endpoints': [ + '.*/disco', + ], + } + + plugin = DiscoToTargetIssuer( + config=config, + name='test_disco_to_target_issuer', + base_url='https://satosa.example.org', + ) + plugin.next = lambda ctx, data: (ctx, data) + + self.config = config + self.context = context + self.plugin = plugin + + def test_when_entity_id_is_not_set_raise_error(self): + self.context.request = {} + with pytest.raises(DiscoToTargetIssuerError): + self.plugin._handle_disco_response(self.context) + + def test_when_entity_id_is_set_target_issuer_is_set(self): + entity_id = 'idp.example.org' + self.context.request = { + 'entityID': entity_id, + } + newctx, newdata = self.plugin._handle_disco_response(self.context) + assert newctx.get_decoration(Context.KEY_TARGET_ENTITYID) == entity_id From d7e45721d94ecc86501ab7eed46a20b41842501b Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 16 Jul 2021 22:04:53 +0300 Subject: [PATCH 139/288] Set target_frontend after handling the disco response When the processing of the request micro-services is finished, the context is switched from the frontend to the backend. At that point target_frontend is needed to set the state of the router. The router state will be used when the processing of the response by the response micro-services is finished, to find the appropriate frontend instance. --- The routing state is set at the point when the switch from the frontend (and request micro-service processing) is made towards the backend. If the discovery response was not intercepted by the DiscoToTargetIssuer micro-service, and instead was processed by the backend's disco-response handler, the target_frontend would not be needed, as the routing state would have already been set. When the DiscoToTargetIssuer micro-service intercepts the response, the point when the switch from the frontend to the backend happens will be executed again. Due to leaving the proxy, going to the discovery service and coming back to the proxy, context.target_frontend has been lost. Only the state stored within context.state persists (through the cookie). --- When the request micro-services finish processing the request, backend_routing is called, which sets the router state (context.state['ROUTER']) to target_frontend, and returns the appropriate backend instance based on target_backend. When the time comes to switch from the backend to the frontend, that state is looked up (see below). When the response micro-services finish processing the response, frontend_routing is called, which sets target_frontend from the router state (context.state['ROUTER']) and returns the appropriate frontend instance based on target_frontend. --- Signed-off-by: Ivan Kanakarakis --- src/satosa/micro_services/disco.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/satosa/micro_services/disco.py b/src/satosa/micro_services/disco.py index 7ea5bbe0a..274f18780 100644 --- a/src/satosa/micro_services/disco.py +++ b/src/satosa/micro_services/disco.py @@ -17,6 +17,13 @@ def __init__(self, config:dict, *args, **kwargs): if not isinstance(self.disco_endpoints, list) or not self.disco_endpoints: raise DiscoToTargetIssuerError('disco_endpoints must be a list of str') + def process(self, context:Context, data:InternalData): + context.state[self.name] = { + 'target_frontend': context.target_frontend, + 'internal_data': data.to_dict(), + } + return super().process(context, data) + def register_endpoints(self): """ URL mapping of additional endpoints this micro service needs to register for callbacks. @@ -42,7 +49,10 @@ def _handle_disco_response(self, context:Context): if not target_issuer: raise DiscoToTargetIssuerError('no valid entity_id in the disco response') + target_frontend = context.state.get(self.name, {}).get('target_frontend') data_serialized = context.state.get(self.name, {}).get('internal_data', {}) data = InternalData.from_dict(data_serialized) + + context.target_frontend = target_frontend context.decorate(Context.KEY_TARGET_ENTITYID, target_issuer) return super().process(context, data) From b78ab3ee828c4774b83a5bf1a521208c4c2af084 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Fri, 11 Jun 2021 11:51:50 +1200 Subject: [PATCH 140/288] new: SAML2 frontend+backend: support reloading metadata Using the reload_metadata method added into pysaml2 in IdentityPython/pysaml2#809, support reloading metadata when triggered via an externally exposed URL (as `//reload-metadata`) This is off by default (URL not exposed) and needs to be explicitly enabled by setting the newly introduced config option `enable_metadata_reload` for the SAML modules to `true` (or `yes`). The loaded config is already preserved in the modules, so can be easily used to provide a reference copy of the metadata configuration to the `reload_metadata` method. This is implemented separately for the SAML2 Backend and SAML2 Frontend (applying to all three SAML2 Frontend classes). This will complete the missing functionality identified in IdentityPython/pysaml2#808 --- .../backends/saml2_backend.yaml.example | 1 + .../frontends/saml2_frontend.yaml.example | 1 + .../saml2_virtualcofrontend.yaml.example | 2 ++ src/satosa/backends/saml2.py | 15 +++++++++++++++ src/satosa/base.py | 10 ++++++++++ src/satosa/frontends/saml2.py | 19 ++++++++++++++++++- 6 files changed, 47 insertions(+), 1 deletion(-) diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index 8aca6b4e5..c132e2345 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -15,6 +15,7 @@ config: memorize_idp: no use_memorized_idp_when_force_authn: no send_requester_id: no + enable_metadata_reload: no sp_config: key_file: backend.key diff --git a/example/plugins/frontends/saml2_frontend.yaml.example b/example/plugins/frontends/saml2_frontend.yaml.example index c0dffe6f6..058c7746e 100644 --- a/example/plugins/frontends/saml2_frontend.yaml.example +++ b/example/plugins/frontends/saml2_frontend.yaml.example @@ -19,6 +19,7 @@ config: # domain: .example.com entityid_endpoint: true + enable_metadata_reload: no idp_config: organization: {display_name: Example Identities, name: Example Identities Org., url: 'http://www.example.com'} diff --git a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example index 111dbf732..6d9a7b370 100644 --- a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example +++ b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example @@ -94,6 +94,8 @@ config: 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect + enable_metadata_reload: no + # If configured and not false or empty the common domain cookie _saml_idp will be set # with or have appended the IdP used for authentication. The default is not to set the # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 2640fb9db..9ff5555fa 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -82,6 +82,7 @@ class SAMLBackend(BackendModule, SAMLBaseModule): KEY_SAML_DISCOVERY_SERVICE_URL = 'saml_discovery_service_url' KEY_SAML_DISCOVERY_SERVICE_POLICY = 'saml_discovery_service_policy' KEY_SP_CONFIG = 'sp_config' + KEY_METADATA = 'metadata' KEY_SEND_REQUESTER_ID = 'send_requester_id' KEY_MIRROR_FORCE_AUTHN = 'mirror_force_authn' KEY_MEMORIZE_IDP = 'memorize_idp' @@ -479,8 +480,22 @@ def register_endpoints(self): url_map.append(("^{0}".format(parsed_entity_id.path[1:]), self._metadata_endpoint)) + if self.enable_metadata_reload(): + url_map.append( + ("^%s/%s$" % (self.name, "reload-metadata"), self._reload_metadata)) + return url_map + def _reload_metadata(self, context): + """ + Reload SAML metadata + """ + logger.debug("Reloading metadata") + res = self.sp.reload_metadata(copy.deepcopy(self.config[SAMLBackend.KEY_SP_CONFIG][SAMLBackend.KEY_METADATA])) + message = "Metadata reload %s" % ("OK" if res else "failed") + status = "200 OK" if res else "500 FAILED" + return Response(message=message, status=status) + def get_metadata_desc(self): """ See super class satosa.backends.backend_base.BackendModule#get_metadata_desc diff --git a/src/satosa/base.py b/src/satosa/base.py index d458293e1..ab872654a 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -261,6 +261,7 @@ def run(self, context): class SAMLBaseModule(object): KEY_ENTITYID_ENDPOINT = 'entityid_endpoint' + KEY_ENABLE_METADATA_RELOAD = 'enable_metadata_reload' KEY_ATTRIBUTE_PROFILE = 'attribute_profile' KEY_ACR_MAPPING = 'acr_mapping' VALUE_ATTRIBUTE_PROFILE_DEFAULT = 'saml' @@ -276,6 +277,15 @@ def expose_entityid_endpoint(self): value = self.config.get(self.KEY_ENTITYID_ENDPOINT, False) return bool(value) + def enable_metadata_reload(self): + """ + Check whether metadata reload has been enabled in config + + return: bool + """ + value = self.config.get(self.KEY_ENABLE_METADATA_RELOAD, False) + return bool(value) + class SAMLEIDASBaseModule(SAMLBaseModule): VALUE_ATTRIBUTE_PROFILE_DEFAULT = 'eidas' diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index c165e1027..4c1e4f313 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -64,6 +64,7 @@ class SAMLFrontend(FrontendModule, SAMLBaseModule): KEY_CUSTOM_ATTR_RELEASE = 'custom_attribute_release' KEY_ENDPOINTS = 'endpoints' KEY_IDP_CONFIG = 'idp_config' + KEY_METADATA = 'metadata' def __init__(self, auth_req_callback_func, internal_attributes, config, base_url, name): self._validate_config(config) @@ -113,12 +114,18 @@ def register_endpoints(self, backend_names): :type backend_names: list[str] :rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))] """ + url_map = [] + + if self.enable_metadata_reload(): + url_map.append( + ("^%s/%s$" % (self.name, "reload-metadata"), self._reload_metadata)) + self.idp_config = self._build_idp_config_endpoints( self.config[self.KEY_IDP_CONFIG], backend_names) # Create the idp idp_config = IdPConfig().load(copy.deepcopy(self.idp_config)) self.idp = Server(config=idp_config) - return self._register_endpoints(backend_names) + return self._register_endpoints(backend_names) + url_map def _create_state_data(self, context, resp_args, relay_state): """ @@ -484,6 +491,16 @@ def _metadata_endpoint(self, context): None).decode("utf-8") return Response(metadata_string, content="text/xml") + def _reload_metadata(self, context): + """ + Reload SAML metadata + """ + logger.debug("Reloading metadata") + res = self.idp.reload_metadata(copy.deepcopy(self.config[SAMLFrontend.KEY_IDP_CONFIG][SAMLFrontend.KEY_METADATA])) + message = "Metadata reload %s" % ("OK" if res else "failed") + status = "200 OK" if res else "500 FAILED" + return Response(message=message, status=status) + def _register_endpoints(self, providers): """ Register methods to endpoints From 206d55d9c951e0c3dc55f4517ae1320059fadead Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 26 Jul 2021 23:59:07 +0300 Subject: [PATCH 141/288] Remove the KEY_METADATA key SAMLBackend and SAMLFrontend KEY_* keys are reflecting top-level configuration options. KEY_METADATA is a configuration of pysaml2 objects and not controlled by the SAMLBackend and SAMLFrontend directly. This creates a nasty hardcoded dependency here. We should revamp the API of pysaml2 and cater for this need. Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 5 +++-- src/satosa/frontends/saml2.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 9ff5555fa..db8f12d50 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -82,7 +82,6 @@ class SAMLBackend(BackendModule, SAMLBaseModule): KEY_SAML_DISCOVERY_SERVICE_URL = 'saml_discovery_service_url' KEY_SAML_DISCOVERY_SERVICE_POLICY = 'saml_discovery_service_policy' KEY_SP_CONFIG = 'sp_config' - KEY_METADATA = 'metadata' KEY_SEND_REQUESTER_ID = 'send_requester_id' KEY_MIRROR_FORCE_AUTHN = 'mirror_force_authn' KEY_MEMORIZE_IDP = 'memorize_idp' @@ -491,7 +490,9 @@ def _reload_metadata(self, context): Reload SAML metadata """ logger.debug("Reloading metadata") - res = self.sp.reload_metadata(copy.deepcopy(self.config[SAMLBackend.KEY_SP_CONFIG][SAMLBackend.KEY_METADATA])) + res = self.sp.reload_metadata( + copy.deepcopy(self.config[SAMLBackend.KEY_SP_CONFIG]['metadata']) + ) message = "Metadata reload %s" % ("OK" if res else "failed") status = "200 OK" if res else "500 FAILED" return Response(message=message, status=status) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 4c1e4f313..cfd43af6c 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -64,7 +64,6 @@ class SAMLFrontend(FrontendModule, SAMLBaseModule): KEY_CUSTOM_ATTR_RELEASE = 'custom_attribute_release' KEY_ENDPOINTS = 'endpoints' KEY_IDP_CONFIG = 'idp_config' - KEY_METADATA = 'metadata' def __init__(self, auth_req_callback_func, internal_attributes, config, base_url, name): self._validate_config(config) @@ -496,7 +495,9 @@ def _reload_metadata(self, context): Reload SAML metadata """ logger.debug("Reloading metadata") - res = self.idp.reload_metadata(copy.deepcopy(self.config[SAMLFrontend.KEY_IDP_CONFIG][SAMLFrontend.KEY_METADATA])) + res = self.idp.reload_metadata( + copy.deepcopy(self.config[SAMLFrontend.KEY_IDP_CONFIG]['metadata']) + ) message = "Metadata reload %s" % ("OK" if res else "failed") status = "200 OK" if res else "500 FAILED" return Response(message=message, status=status) From 7e7241f7bbdccd026bab2cfd508935d11d183903 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 27 Jul 2021 00:08:05 +0300 Subject: [PATCH 142/288] Fix warnings from tests Signed-off-by: Ivan Kanakarakis --- tests/conftest.py | 2 +- tests/flows/test_account_linking.py | 4 ++-- tests/flows/test_consent.py | 4 ++-- tests/flows/test_oidc-saml.py | 4 ++-- tests/flows/test_saml-oidc.py | 4 ++-- tests/flows/test_saml-saml.py | 4 ++-- tests/flows/test_wsgi_flow.py | 6 +++--- 7 files changed, 14 insertions(+), 14 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index bc04eb2b8..9e7a5e18f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -441,7 +441,7 @@ def get_uri(self): return 'mongodb://localhost:{port!s}'.format(port=self.port) -@pytest.yield_fixture +@pytest.fixture def mongodb_instance(): tmp_db = MongoTemporaryInstance() yield tmp_db diff --git a/tests/flows/test_account_linking.py b/tests/flows/test_account_linking.py index 80a87a874..94f53a431 100644 --- a/tests/flows/test_account_linking.py +++ b/tests/flows/test_account_linking.py @@ -1,6 +1,6 @@ import responses from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.proxy_server import make_app from satosa.satosa_config import SATOSAConfig @@ -15,7 +15,7 @@ def test_full_flow(self, satosa_config_dict, account_linking_module_config): satosa_config_dict["MICRO_SERVICES"].insert(0, account_linking_module_config) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # incoming auth req http_resp = test_client.get("/{}/{}/request".format(satosa_config_dict["BACKEND_MODULES"][0]["name"], diff --git a/tests/flows/test_consent.py b/tests/flows/test_consent.py index d2da94350..76dff496b 100644 --- a/tests/flows/test_consent.py +++ b/tests/flows/test_consent.py @@ -3,7 +3,7 @@ import responses from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.proxy_server import make_app from satosa.satosa_config import SATOSAConfig @@ -18,7 +18,7 @@ def test_full_flow(self, satosa_config_dict, consent_module_config): satosa_config_dict["MICRO_SERVICES"].append(consent_module_config) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # incoming auth req http_resp = test_client.get("/{}/{}/request".format(satosa_config_dict["BACKEND_MODULES"][0]["name"], diff --git a/tests/flows/test_oidc-saml.py b/tests/flows/test_oidc-saml.py index 90bbe1a31..c70ba5c8b 100644 --- a/tests/flows/test_oidc-saml.py +++ b/tests/flows/test_oidc-saml.py @@ -9,7 +9,7 @@ from saml2 import BINDING_HTTP_REDIRECT from saml2.config import IdPConfig from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.metadata_creation.saml_metadata import create_entity_descriptors from satosa.proxy_server import make_app @@ -60,7 +60,7 @@ def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_ _, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # get frontend OP config info provider_config = json.loads(test_client.get("/.well-known/openid-configuration").data.decode("utf-8")) diff --git a/tests/flows/test_saml-oidc.py b/tests/flows/test_saml-oidc.py index e242ebb89..bc41acfe1 100644 --- a/tests/flows/test_saml-oidc.py +++ b/tests/flows/test_saml-oidc.py @@ -5,7 +5,7 @@ from saml2 import BINDING_HTTP_REDIRECT from saml2.config import SPConfig from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.metadata_creation.saml_metadata import create_entity_descriptors from satosa.proxy_server import make_app @@ -27,7 +27,7 @@ def run_test(self, satosa_config_dict, sp_conf, oidc_backend_config, frontend_co frontend_metadata, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # config test SP frontend_metadata_str = str(frontend_metadata[frontend_config["name"]][0]) diff --git a/tests/flows/test_saml-saml.py b/tests/flows/test_saml-saml.py index efa1a8729..91c350495 100644 --- a/tests/flows/test_saml-saml.py +++ b/tests/flows/test_saml-saml.py @@ -3,7 +3,7 @@ from saml2 import BINDING_HTTP_REDIRECT from saml2.config import SPConfig, IdPConfig from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.metadata_creation.saml_metadata import create_entity_descriptors from satosa.proxy_server import make_app @@ -23,7 +23,7 @@ def run_test(self, satosa_config_dict, sp_conf, idp_conf, saml_backend_config, f frontend_metadata, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # config test SP frontend_metadata_str = str(frontend_metadata[frontend_config["name"]][0]) diff --git a/tests/flows/test_wsgi_flow.py b/tests/flows/test_wsgi_flow.py index 08d4d4a3d..fcae4ce21 100644 --- a/tests/flows/test_wsgi_flow.py +++ b/tests/flows/test_wsgi_flow.py @@ -4,7 +4,7 @@ import json from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.proxy_server import make_app from satosa.response import NotFound @@ -21,7 +21,7 @@ def test_flow(self, satosa_config_dict): """ Performs the test. """ - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # Make request to frontend resp = test_client.get('/{}/{}/request'.format("backend", "frontend")) @@ -35,7 +35,7 @@ def test_flow(self, satosa_config_dict): assert resp.data.decode('utf-8') == "Auth response received, passed to test frontend" def test_unknown_request_path(self, satosa_config_dict): - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) resp = test_client.get('/unknown') assert resp.status == NotFound._status From 8ecbcefce7dc763783febe2b83f5659627d03c0a Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 8 Aug 2021 01:28:12 +0300 Subject: [PATCH 143/288] Attach request_uri, request_method and http_headers on the context - _http_headers is replaced by http_headers - _http_headers used to hold more than headers; this is now fixed - http_headers hold all headers that start with HTTP_ or REMOTE_ or SERVER_ - the query-string of a GET request is already available as context.request - IdpHinting micro-service is now using the new properties Signed-off-by: Ivan Kanakarakis --- src/satosa/micro_services/idp_hinting.py | 11 ++++++----- src/satosa/proxy_server.py | 20 ++++++++++++++------ 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/src/satosa/micro_services/idp_hinting.py b/src/satosa/micro_services/idp_hinting.py index 397852d09..54ff21190 100644 --- a/src/satosa/micro_services/idp_hinting.py +++ b/src/satosa/micro_services/idp_hinting.py @@ -1,5 +1,4 @@ import logging -from urllib.parse import parse_qs from .base import RequestMicroService from ..exception import SATOSAConfigurationError @@ -42,15 +41,17 @@ def process(self, context, data): :param data: the internal request """ target_entity_id = context.get_decoration(context.KEY_TARGET_ENTITYID) - qs_raw = context._http_headers['QUERY_STRING'] - if target_entity_id or not qs_raw: + query_string = context.request + + an_issuer_is_already_selected = bool(target_entity_id) + query_string_is_missing = not query_string + if an_issuer_is_already_selected or query_string_is_missing: return super().process(context, data) - qs = parse_qs(qs_raw) hints = ( entity_id for param in self.idp_hint_param_names - for entity_id in qs.get(param, [None]) + for entity_id in query_string.get(param, []) if entity_id ) hint = next(hints, None) diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index 1b41dabc7..1c3c2ed1a 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -65,11 +65,17 @@ def unpack_request(environ, content_length=0): return data -def unpack_http_headers(environ): - headers = ('REQUEST_METHOD', 'PATH_INFO', 'REQUEST_URI', - 'QUERY_STRING', 'SERVER_NAME', 'REMOTE_ADDR', - 'HTTP_HOST', 'HTTP_USER_AGENT', 'HTTP_ACCEPT_LANGUAGE') - return {k:v for k,v in environ.items() if k in headers} +def collect_http_headers(environ): + headers = { + header_name: header_value + for header_name, header_value in environ.items() + if ( + header_name.startswith("HTTP_") + or header_name.startswith("REMOTE_") + or header_name.startswith("SERVER_") + ) + } + return headers class ToBytesMiddleware(object): @@ -116,7 +122,9 @@ def __call__(self, environ, start_response, debug=False): body = io.BytesIO(environ['wsgi.input'].read(content_length)) environ['wsgi.input'] = body context.request = unpack_request(environ, content_length) - context._http_headers = unpack_http_headers(environ) + context.request_uri = environ.get("REQUEST_URI") + context.request_method = environ.get("REQUEST_METHOD") + context.http_headers = collect_http_headers(environ) environ['wsgi.input'].seek(0) context.cookie = environ.get("HTTP_COOKIE", "") From e2df9ba50e7731d903c7c740f7430c5579debaba Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 8 Aug 2021 01:31:41 +0300 Subject: [PATCH 144/288] Abstract parsing of the query string as dictionary Signed-off-by: Ivan Kanakarakis --- src/satosa/proxy_server.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index 1c3c2ed1a..a902977e4 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -3,7 +3,7 @@ import logging import logging.config import sys -from urllib.parse import parse_qsl +from urllib.parse import parse_qsl as _parse_query_string from cookies_samesite_compat import CookiesSameSiteCompatMiddleware @@ -18,16 +18,19 @@ logger = logging.getLogger(__name__) +def parse_query_string(data): + query_param_pairs = _parse_query_string(data) + query_param_dict = dict(query_param_pairs) + return query_param_dict + + def unpack_get(environ): """ Unpacks a redirect request query string. :param environ: whiskey application environment. :return: A dictionary with parameters. """ - if "QUERY_STRING" in environ: - return dict(parse_qsl(environ["QUERY_STRING"])) - - return None + return parse_query_string(environ.get("QUERY_STRING")) def unpack_post(environ, content_length): @@ -39,7 +42,7 @@ def unpack_post(environ, content_length): post_body = environ['wsgi.input'].read(content_length).decode("utf-8") data = None if "application/x-www-form-urlencoded" in environ["CONTENT_TYPE"]: - data = dict(parse_qsl(post_body)) + data = parse_query_string(post_body) elif "application/json" in environ["CONTENT_TYPE"]: data = json.loads(post_body) From 031cd818329092e34ba5abef4eb97e2526061116 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 9 Aug 2021 09:02:17 +0300 Subject: [PATCH 145/288] Fix documentation heading Signed-off-by: Ivan Kanakarakis --- doc/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/README.md b/doc/README.md index b9934ce94..4a39965cc 100644 --- a/doc/README.md +++ b/doc/README.md @@ -29,7 +29,7 @@ apt-get install libffi-dev libssl-dev xmlsec1 Alternatively the application can be installed directly from PyPI (`pip install satosa`), or the [Docker image](https://hub.docker.com/r/satosa/) can be used. -### External micro-services +### External micro-services Micro-services act like plugins and can be developed by anyone. Other people that have been working with the SaToSa proxy, have built extentions mainly in From 7c82d89041cedc4a4676573d9ab8e8ad8ab6c077 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 27 Aug 2021 20:43:08 +0300 Subject: [PATCH 146/288] Pass proper encryption keys when retrieving the subject NameID This requires the latest pysaml2 to work properly, as older versions of get_subject do not accept the optional keys argument. To have this working without this changeset, one should define the pysaml2 configuration option `encryption_keypairs`. Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index db8f12d50..44f94fc8e 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -404,7 +404,7 @@ def _translate_response(self, response, state): ) # The SAML response may not include a NameID. - subject = response.get_subject() + subject = response.get_subject(keys=self.encryption_keys) name_id = subject.text if subject else None name_id_format = subject.format if subject else None From 62ac974be3f1abb3205c64d04a024633af2a3b60 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 27 Aug 2021 00:16:06 +0300 Subject: [PATCH 147/288] Introduce explicit context property to hold the query params A POST request can have a query string. In that case context.request will hold the data from the POST request body, and thus there is no place to hold the query params. With this changeset, a new property is introduced to hold the query string, parsed as query params. The query params is a list of tuples. Each tuple holds two elements, the query param name and the query param value. Params with no value are dropped. ?param_w_value=123¶m_w_no_value Signed-off-by: Ivan Kanakarakis --- src/satosa/micro_services/idp_hinting.py | 14 +++++++------- src/satosa/proxy_server.py | 1 + 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/satosa/micro_services/idp_hinting.py b/src/satosa/micro_services/idp_hinting.py index 54ff21190..04f003ef1 100644 --- a/src/satosa/micro_services/idp_hinting.py +++ b/src/satosa/micro_services/idp_hinting.py @@ -41,18 +41,18 @@ def process(self, context, data): :param data: the internal request """ target_entity_id = context.get_decoration(context.KEY_TARGET_ENTITYID) - query_string = context.request + qs_params = context.qs_params - an_issuer_is_already_selected = bool(target_entity_id) - query_string_is_missing = not query_string - if an_issuer_is_already_selected or query_string_is_missing: + issuer_is_already_selected = bool(target_entity_id) + query_string_is_missing = not qs_params + if issuer_is_already_selected or query_string_is_missing: return super().process(context, data) hints = ( entity_id - for param in self.idp_hint_param_names - for entity_id in query_string.get(param, []) - if entity_id + for param_name in self.idp_hint_param_names + for qs_param_name, entity_id in qs_params + if param_name == qs_param_name ) hint = next(hints, None) diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index a902977e4..8c2d1795c 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -128,6 +128,7 @@ def __call__(self, environ, start_response, debug=False): context.request_uri = environ.get("REQUEST_URI") context.request_method = environ.get("REQUEST_METHOD") context.http_headers = collect_http_headers(environ) + context.qs_params = parse_query_string(environ.get("QUERY_STRING")) environ['wsgi.input'].seek(0) context.cookie = environ.get("HTTP_COOKIE", "") From 5b110d65e4f20b32046d8ca0dfbcaf805c61d2ad Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 27 Aug 2021 20:42:14 +0300 Subject: [PATCH 148/288] Initialize Context with all its properties Signed-off-by: Ivan Kanakarakis --- src/satosa/context.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/satosa/context.py b/src/satosa/context.py index a30f67c3d..ddfdd468a 100644 --- a/src/satosa/context.py +++ b/src/satosa/context.py @@ -23,12 +23,17 @@ class Context(object): def __init__(self): self._path = None self.request = None + self.request_uri = None + self.request_method = None + self.qs_params = None + self.http_headers = None + self.cookie = None + self.request_authorization = None self.target_backend = None self.target_frontend = None self.target_micro_service = None # This dict is a data carrier between frontend and backend modules. self.internal_data = {} - self.cookie = None self.state = None @property From b50f70b45589b7f385db68f90364f9382b7ff8b8 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 27 Aug 2021 21:52:50 +0300 Subject: [PATCH 149/288] Separate server headers from http headers Signed-off-by: Ivan Kanakarakis --- src/satosa/context.py | 1 + src/satosa/proxy_server.py | 28 +++++++++++++++++++--------- 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/src/satosa/context.py b/src/satosa/context.py index ddfdd468a..60f35942b 100644 --- a/src/satosa/context.py +++ b/src/satosa/context.py @@ -26,6 +26,7 @@ def __init__(self): self.request_uri = None self.request_method = None self.qs_params = None + self.server = None self.http_headers = None self.cookie = None self.request_authorization = None diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index 8c2d1795c..ce7fd1459 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -1,14 +1,15 @@ -import io import json import logging import logging.config import sys +from io import BytesIO from urllib.parse import parse_qsl as _parse_query_string from cookies_samesite_compat import CookiesSameSiteCompatMiddleware import satosa import satosa.logging_util as lu + from .base import SATOSABase from .context import Context from .response import ServiceError, NotFound @@ -68,6 +69,15 @@ def unpack_request(environ, content_length=0): return data +def collect_server_headers(environ): + headers = { + header_name: header_value + for header_name, header_value in environ.items() + if header_name.startswith("SERVER_") + } + return headers + + def collect_http_headers(environ): headers = { header_name: header_value @@ -75,7 +85,6 @@ def collect_http_headers(environ): if ( header_name.startswith("HTTP_") or header_name.startswith("REMOTE_") - or header_name.startswith("SERVER_") ) } return headers @@ -119,20 +128,21 @@ def __call__(self, environ, start_response, debug=False): context.path = path # copy wsgi.input stream to allow it to be re-read later by satosa plugins - # see: http://stackoverflow.com/ - # questions/1783383/how-do-i-copy-wsgi-input-if-i-want-to-process-post-data-more-than-once + # see: http://stackoverflow.com/questions/1783383/how-do-i-copy-wsgi-input-if-i-want-to-process-post-data-more-than-once content_length = int(environ.get('CONTENT_LENGTH', '0') or '0') - body = io.BytesIO(environ['wsgi.input'].read(content_length)) + body = BytesIO(environ['wsgi.input'].read(content_length)) environ['wsgi.input'] = body + context.request = unpack_request(environ, content_length) context.request_uri = environ.get("REQUEST_URI") context.request_method = environ.get("REQUEST_METHOD") - context.http_headers = collect_http_headers(environ) context.qs_params = parse_query_string(environ.get("QUERY_STRING")) - environ['wsgi.input'].seek(0) + context.server = collect_server_headers(environ) + context.http_headers = collect_http_headers(environ) + context.cookie = context.http_headers.get("HTTP_COOKIE", "") + context.request_authorization = context.http_headers.get("HTTP_AUTHORIZATION", "") - context.cookie = environ.get("HTTP_COOKIE", "") - context.request_authorization = environ.get("HTTP_AUTHORIZATION", "") + environ['wsgi.input'].seek(0) try: resp = self.run(context) From 7ed0774aed259c336596a34a999fa09c469cf2c3 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 27 Aug 2021 00:15:02 +0300 Subject: [PATCH 150/288] Use higher-level function to create a saml request on the saml2 backend Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 32 ++++++++++++-------------------- 1 file changed, 12 insertions(+), 20 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 44f94fc8e..770211245 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -10,7 +10,7 @@ from urllib.parse import urlparse from saml2 import BINDING_HTTP_REDIRECT -from saml2.client_base import Base +from saml2.client import Saml2Client from saml2.config import SPConfig from saml2.extension.mdui import NAMESPACE as UI_NAMESPACE from saml2.metadata import create_metadata_string @@ -109,7 +109,7 @@ def __init__(self, outgoing, internal_attributes, config, base_url, name): self.config = self.init_config(config) sp_config = SPConfig().load(copy.deepcopy(config[SAMLBackend.KEY_SP_CONFIG])) - self.sp = Base(sp_config) + self.sp = Saml2Client(sp_config) self.discosrv = config.get(SAMLBackend.KEY_DISCO_SRV) self.encryption_keys = [] @@ -272,27 +272,19 @@ def authn_request(self, context, entity_id): kwargs["scoping"] = Scoping(requester_id=[RequesterID(text=requester)]) try: - binding, destination = self.sp.pick_binding( - "single_sign_on_service", None, "idpsso", entity_id=entity_id - ) - msg = "binding: {}, destination: {}".format(binding, destination) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - acs_endp, response_binding = self.sp.config.getattr("endpoints", "sp")["assertion_consumer_service"][0] - req_id, req = self.sp.create_authn_request( - destination, binding=response_binding, **kwargs - ) relay_state = util.rndstr() - ht_args = self.sp.apply_binding(binding, "%s" % req, destination, relay_state=relay_state) - msg = "ht_args: {}".format(ht_args) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - except Exception as exc: + req_id, binding, http_info = self.sp.prepare_for_negotiated_authenticate( + entityid=entity_id, + response_binding=response_binding, + relay_state=relay_state, + **kwargs, + ) + except Exception as e: msg = "Failed to construct the AuthnRequest for state" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline, exc_info=True) - raise SATOSAAuthenticationError(context.state, "Failed to construct the AuthnRequest") from exc + raise SATOSAAuthenticationError(context.state, "Failed to construct the AuthnRequest") from e if self.sp.config.getattr('allow_unsolicited', 'sp') is False: if req_id in self.outstanding_queries: @@ -300,10 +292,10 @@ def authn_request(self, context, entity_id): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) raise SATOSAAuthenticationError(context.state, msg) - self.outstanding_queries[req_id] = req + self.outstanding_queries[req_id] = req_id context.state[self.name] = {"relay_state": relay_state} - return make_saml_response(binding, ht_args) + return make_saml_response(binding, http_info) def authn_response(self, context, binding): """ From 199025a4e7f2a2c32e6f985281f47a1b58a2acda Mon Sep 17 00:00:00 2001 From: Giuseppe Date: Sat, 14 Aug 2021 00:31:44 +0200 Subject: [PATCH 151/288] Documentation for recently added micro-services --- doc/README.md | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/doc/README.md b/doc/README.md index 4a39965cc..834967e7b 100644 --- a/doc/README.md +++ b/doc/README.md @@ -614,6 +614,14 @@ To choose which backend (essentially choosing target provider) to use based on t `DecideBackendByRequester` class which implements that special routing behavior. See the [example configuration](../example/plugins/microservices/requester_based_routing.yaml.example). +#### Route to a specific backend based on the target entity id +Use the `DecideBackendByTargetIssuer` class which implements that special routing behavior. See the +[example configuration](../example/plugins/microservices/target_based_routing.yaml.example). + +If a Discovery Service have been used and the target entity id is selected by users, + also use `DiscoToTargetIssuer` together with `DecideBackendByTargetIssuer` + to get the expected result. See the [example configuration](../example/plugins/microservices/disco_to_target_issuer.yaml.example). + #### Filter authentication requests to target SAML entities If using the `SAMLMirrorFrontend` module and some of the target providers should support some additional SP's, the `DecideIfRequesterIsAllowed` micro service can be used. It provides a rules mechanism to describe which SP's are @@ -679,6 +687,15 @@ persistent NameID may also be obtained from attributes returned from the LDAP di LDAP microservice install the extra necessary dependencies with `pip install satosa[ldap]` and then see the [example config](../example/plugins/microservices/ldap_attribute_store.yaml.example). +#### Minimal support for IdP Hinting + +It's possible to hint an IdP to SaToSa using `IdpHinting` microservice. See the + [example configuration](../example/plugins/microservices/idp_hinting.yaml.example). + +With this feature an SP can send an hint about the IdP to avoid the discovery + service page to the users, using a url parameter. Example + `https://[...]?[...]&idphint=https://that.idp.entity.id"` + ### Custom plugins It's possible to write custom plugins which can be loaded by SATOSA. They have to be contained in a Python module, @@ -734,4 +751,3 @@ set SATOSA_CONFIG=/home/user/proxy_conf.yaml ## Using Apache HTTP Server and mod\_wsgi See the [auxiliary documentation for running using mod\_wsgi](mod_wsgi.md). - From fe2100c972d93af168cc3cdabbf7659f9a214abb Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 28 Aug 2021 00:13:42 +0300 Subject: [PATCH 152/288] Normalize document formatting Signed-off-by: Ivan Kanakarakis --- doc/README.md | 74 ++++++++++++++++++++++++++++++++++----------------- 1 file changed, 49 insertions(+), 25 deletions(-) diff --git a/doc/README.md b/doc/README.md index 834967e7b..6f22af7d5 100644 --- a/doc/README.md +++ b/doc/README.md @@ -5,22 +5,26 @@ This document describes how to install and configure the SATOSA proxy. # Installation ## Docker + A pre-built Docker image is accessible at the [Docker Hub](https://hub.docker.com/r/satosa/), and is the recommended ways of running the proxy. ## Manual installation ### Dependencies + SATOSA requires Python 3.4 (or above), and the following packages on Ubuntu: -``` + +```bash apt-get install libffi-dev libssl-dev xmlsec1 ```` ### Instructions + 1. Download the SATOSA proxy project as a [compressed archive](https://github.com/IdentityPython/SATOSA/releases) and unpack it to ``. -1. Install the application: +2. Install the application: ```bash pip install @@ -28,7 +32,6 @@ apt-get install libffi-dev libssl-dev xmlsec1 Alternatively the application can be installed directly from PyPI (`pip install satosa`), or the [Docker image](https://hub.docker.com/r/satosa/) can be used. - ### External micro-services Micro-services act like plugins and can be developed by anyone. Other people @@ -53,8 +56,8 @@ The extentions include: and more. - # Configuration + SATOSA is configured using YAML. All default configuration files, as well as an example WSGI application for the proxy, can be found @@ -74,7 +77,7 @@ the value from the process environment variable of the same name. If the process environment has been set with `LDAP_BIND_PASSWORD=secret_password` then the configuration value for `bind_password` will be `secret_password`. -``` +```yaml bind_password: !ENV LDAP_BIND_PASSWORD ``` @@ -90,12 +93,12 @@ process environment has been set with `LDAP_BIND_PASSWORD_FILE=/etc/satosa/secrets/ldap.txt` then the configuration value for `bind_password` will be `secret_password`. -``` +```yaml bind_password: !ENVFILE LDAP_BIND_PASSWORD_FILE ``` - ## SATOSA proxy configuration: `proxy_conf.yaml.example` + | Parameter name | Data type | Example value | Description | | -------------- | --------- | ------------- | ----------- | | `BASE` | string | `https://proxy.example.com` | base url of the proxy | @@ -109,10 +112,10 @@ bind_password: !ENVFILE LDAP_BIND_PASSWORD_FILE | `MICRO_SERVICES` | string[] | `[statistics_service.yaml]` | list of plugin configuration file paths, describing enabled microservices | | `LOGGING` | dict | see [Python logging.conf](https://docs.python.org/3/library/logging.config.html) | optional configuration of application logging | - ## Attribute mapping configuration: `internal_attributes.yaml` ### attributes + The values directly under the `attributes` key are the internal attribute names. Every internal attribute has a map of profiles, which in turn has a list of external attributes names which should be mapped to the internal attributes. @@ -124,6 +127,7 @@ internal attribute. Sometimes the external attributes are nested/complex structures. One example is the [address claim in OpenID connect](http://openid.net/specs/openid-connect-core-1_0.html#AddressClaim) which consists of multiple sub-fields, e.g.: + ```json "address": { "formatted": "100 Universal City Plaza, Hollywood CA 91608, USA", @@ -131,7 +135,7 @@ which consists of multiple sub-fields, e.g.: "locality": "Hollywood", "region": "CA", "postal_code": "91608", - "country": "USA", + "country": "USA" } ``` @@ -166,28 +170,29 @@ attributes (in the proxy backend) <-> internal <-> returned attributes (from the * Any plugin using the `saml` profile will use the attribute value from `postaladdress` delivered from the target provider as the value for `address`. - ### user_id_from_attrs + The subject identifier generated by the backend module can be overridden by specifying a list of internal attribute names under the `user_id_from_attrs` key. The attribute values of the attributes specified in this list will be concatenated and used as the subject identifier. - ### user_id_to_attr + To store the subject identifier in a specific internal attribute, the internal attribute name can be specified in `user_id_to_attr`. When the [ALService](https://github.com/its-dirg/ALservice) is used for account linking, the `user_id_to_attr` configuration parameter should be set, since that service will overwrite the subject identifier generated by the proxy. - ## Plugins + The authentication protocol specific communication is handled by different plugins, divided into frontends (receiving requests from clients) and backends (sending requests to target providers). ### Common plugin configuration parameters + Both `name` and `module` must be specified in all plugin configurations (frontends, backends, and micro services). The `name` must be unique to ensure correct functionality, and the `module` must be the fully qualified name of an importable Python module. @@ -230,7 +235,6 @@ For more detailed information on how you could customize the SAML entities, see the [documentation of the underlying library pysaml2](https://github.com/rohe/pysaml2/blob/master/docs/howto/config.rst). - #### Providing `AuthnContextClassRef` SAML2 frontends and backends can provide a custom (configurable) *Authentication Context Class Reference*. @@ -252,12 +256,13 @@ provider will be preserved, and when using a OAuth or OpenID Connect backend, th **Example** - config: - [...] - acr_mapping: - "": default-LoA - "https://accounts.google.com": LoA1 - +```yaml +config: + [...] + acr_mapping: + "": default-LoA + "https://accounts.google.com": LoA1 +``` #### Frontend @@ -296,8 +301,8 @@ An example configuration can be found [here](../example/plugins/frontends/saml2_ `SP -> Virtual CO SAMLFrontend -> SAMLBackend -> optional discovery service -> target IdP` - ##### Custom attribute release + In addition to respecting for example entity categories from the SAML metadata, the SAML frontend can also further restrict the attribute release with the `custom_attribute_release` configuration parameter based on the SP entity id. @@ -349,13 +354,14 @@ Overrides per SP entityID is possible by using the entityID as a key instead of in the yaml structure. The most specific key takes presedence. If no policy overrides are provided the defaults above are used. - #### Backend + The SAML2 backend act as a SAML Service Provider (SP), making authentication requests to SAML Identity Providers (IdP). The default configuration file can be found [here](../example/plugins/backends/saml2_backend.yaml.example). ##### Name ID Format + The SAML backend can indicate which *Name ID* format it wants by specifying the key `name_id_format` in the SP entity configuration in the backend plugin configuration: @@ -368,6 +374,7 @@ The SAML backend can indicate which *Name ID* format it wants by specifying the ``` ##### Use a discovery service + To allow the user to choose which target provider they want to authenticate with, the configuration parameter `disco_srv`, must be specified if the metadata given to the backend module contains more than one IdP: @@ -433,6 +440,7 @@ config: ### OpenID Connect plugins #### Backend + The OpenID Connect backend acts as an OpenID Connect Relying Party (RP), making authentication requests to OpenID Connect Provider (OP). The default configuration file can be found [here](../example/plugins/backends/openid_backend.yaml.example). @@ -444,8 +452,8 @@ When using an OP that only supports statically registered clients, see the and make sure to provide the redirect URI, constructed as described in the section about Google configuration below, in the static registration. - #### Frontend + The OpenID Connect frontend acts as and OpenID Connect Provider (OP), accepting requests from OpenID Connect Relying Parties (RPs). The default configuration file can be found [here](../example/plugins/frontends/openid_connect_frontend.yaml.example). @@ -477,10 +485,12 @@ The configuration parameters available: The other parameters should be left with their default values. ### Social login plugins + The social login plugins can be used as backends for the proxy, allowing the proxy to act as a client to the social login services. #### Google + The default configuration file can be found [here](../example/plugins/backends/google_backend.yaml.example). @@ -495,7 +505,7 @@ It should use the available variables, `` and ``, where: 1. `` is the base url of the proxy as specified in the `BASE` configuration parameter in `proxy_conf.yaml`, e.g. "https://proxy.example.com". -1. `` is the plugin name specified in the `name` configuration parameter defined in the plugin configuration file. +2. `` is the plugin name specified in the `name` configuration parameter defined in the plugin configuration file. The example config in `google_backend.yaml.example`: @@ -507,14 +517,15 @@ config: redirect_uris: [/] [...] ``` + together with `BASE: "https://proxy.example.com"` in `proxy_conf.yaml` would yield the redirect URI `https://proxy.example.com/google` to register with Google. A list of all claims possibly released by Google can be found [here](https://developers.google.com/identity/protocols/OpenIDConnect#obtainuserinfo), which should be used when configuring the attribute mapping (see above). - #### Facebook + The default configuration file can be found [here](../example/plugins/backends/facebook_backend.yaml.example). @@ -549,7 +560,7 @@ pre-configured (static) attributes, see the The static attributes are described as key-value pairs in the YAML file, e.g: -``` +```yaml organisation: Example Org. country: Sweden ``` @@ -574,8 +585,10 @@ The filters are applied such that all attribute values matched by the regular ex non-matching attribute values will be discarded. ##### Examples + Filter attributes from the target provider `https://provider.example.com`, to only preserve values starting with the string `"foo:bar"`: + ```yaml "https://provider.example.com": "": @@ -583,6 +596,7 @@ string `"foo:bar"`: ``` Filter the attribute `attr1` to only preserve values ending with the string `"foo:bar"`: + ```yaml "": "": @@ -591,6 +605,7 @@ Filter the attribute `attr1` to only preserve values ending with the string `"fo Filter the attribute `attr1` to the requester `https://provider.example.com`, to only preserver values containing the string `"foo:bar"`: + ```yaml "": "https://client.example.com": @@ -601,6 +616,7 @@ the string `"foo:bar"`: Attributes delivered from the target provider can be filtered based on a list of allowed attributes per requester using the `AttributePolicy` class: + ```yaml attribute_policy: : @@ -610,11 +626,13 @@ attribute_policy: ``` #### Route to a specific backend based on the requester + To choose which backend (essentially choosing target provider) to use based on the requester, use the `DecideBackendByRequester` class which implements that special routing behavior. See the [example configuration](../example/plugins/microservices/requester_based_routing.yaml.example). #### Route to a specific backend based on the target entity id + Use the `DecideBackendByTargetIssuer` class which implements that special routing behavior. See the [example configuration](../example/plugins/microservices/target_based_routing.yaml.example). @@ -623,6 +641,7 @@ If a Discovery Service have been used and the target entity id is selected by us to get the expected result. See the [example configuration](../example/plugins/microservices/disco_to_target_issuer.yaml.example). #### Filter authentication requests to target SAML entities + If using the `SAMLMirrorFrontend` module and some of the target providers should support some additional SP's, the `DecideIfRequesterIsAllowed` micro service can be used. It provides a rules mechanism to describe which SP's are allowed to send requests to which IdP's. See the [example configuration](../example/plugins/microservices/allowed_requesters.yaml.example). @@ -633,6 +652,7 @@ Metadata containing all SP's (any SP that might be allowed by a target IdP) must The rules are described using `allow` and `deny` directives under the `rules` configuration parameter. In the following example, the target IdP `target_entity_id1` only allows requests from `requester1` and `requester2`. + ```yaml rules: target_entity_id1: @@ -643,6 +663,7 @@ SP's are by default denied if the IdP has any rules associated with it (i.e, the However, if the IdP does not have any rules associated with its entity id, all SP's are by default allowed. Deny all but one SP: + ```yaml rules: target_entity_id1: @@ -651,6 +672,7 @@ rules: ``` Allow all but one SP: + ```yaml rules: target_entity_id1: @@ -733,9 +755,11 @@ full featured general purpose web server (in a reverse proxy architecture) such Apache HTTP Server to help buffer slow clients and enable more sophisticated error page rendering. Start the proxy server with the following command: + ```bash gunicorn -b satosa.wsgi:app --keyfile= --certfile= ``` + where * `socket address` is the socket address that `gunicorn` should bind to for incoming requests, e.g. `0.0.0.0:8080` * `https key` is the path to the private key to use for HTTPS, e.g. `pki/key.pem` From 104aa87cb535533cfa3252fccdfb272f6b2bb5fb Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 28 Aug 2021 00:25:47 +0300 Subject: [PATCH 153/288] Amend docs Signed-off-by: Ivan Kanakarakis --- doc/README.md | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/doc/README.md b/doc/README.md index 6f22af7d5..d058d6ad7 100644 --- a/doc/README.md +++ b/doc/README.md @@ -636,9 +636,12 @@ To choose which backend (essentially choosing target provider) to use based on t Use the `DecideBackendByTargetIssuer` class which implements that special routing behavior. See the [example configuration](../example/plugins/microservices/target_based_routing.yaml.example). -If a Discovery Service have been used and the target entity id is selected by users, - also use `DiscoToTargetIssuer` together with `DecideBackendByTargetIssuer` - to get the expected result. See the [example configuration](../example/plugins/microservices/disco_to_target_issuer.yaml.example). +#### Route to a specific backend based on the discovery service response + +If a Discovery Service is in use and a target entity id is selected by users, you may want to use the +`DiscoToTargetIssuer` class together with `DecideBackendByTargetIssuer` to be able to select a +backend (essentially choosing target provider) based on the response from the discovery service. +See the [example configuration](../example/plugins/microservices/disco_to_target_issuer.yaml.example). #### Filter authentication requests to target SAML entities @@ -709,14 +712,15 @@ persistent NameID may also be obtained from attributes returned from the LDAP di LDAP microservice install the extra necessary dependencies with `pip install satosa[ldap]` and then see the [example config](../example/plugins/microservices/ldap_attribute_store.yaml.example). -#### Minimal support for IdP Hinting +#### Support for IdP Hinting -It's possible to hint an IdP to SaToSa using `IdpHinting` microservice. See the - [example configuration](../example/plugins/microservices/idp_hinting.yaml.example). +It's possible to hint an IdP to SaToSa using the `IdpHinting` micro-service. -With this feature an SP can send an hint about the IdP to avoid the discovery - service page to the users, using a url parameter. Example - `https://[...]?[...]&idphint=https://that.idp.entity.id"` +With this feature an SP can send a hint about the IdP that should be used, in order to skip the discovery service. +The hint as a parameter in the query string of the request. +The hint query parameter value must be the entityID of the IdP. +The hint query parameter name is specified in the micro-service configuation. +See the [example configuration](../example/plugins/microservices/idp_hinting.yaml.example). ### Custom plugins From 1de6a360d52369e01a40d7d0400af20735d7e899 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 28 Aug 2021 22:02:34 +0300 Subject: [PATCH 154/288] Add authenticating authority as part of the internal AuthenticationInformation object Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 15 +++++++++------ src/satosa/internal.py | 9 ++++++++- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 770211245..7118ea007 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -384,15 +384,18 @@ def _translate_response(self, response, state): # The response may have been encrypted by the IdP so if we have an # encryption key, try it. if self.encryption_keys: - response.parse_assertion(self.encryption_keys) + response.parse_assertion(keys=self.encryption_keys) - authn_info = response.authn_info()[0] - auth_class_ref = authn_info[0] - timestamp = response.assertion.authn_statement[0].authn_instant issuer = response.response.issuer.text - + authn_context_ref, authenticating_authorities, authn_instant = next( + iter(response.authn_info()), [None, None, None] + ) + authenticating_authority = next(iter(authenticating_authorities), None) auth_info = AuthenticationInformation( - auth_class_ref, timestamp, issuer, + auth_class_ref=authn_context_ref, + timestamp=authn_instant, + authority=authenticating_authority, + issuer=issuer, ) # The SAML response may not include a NameID. diff --git a/src/satosa/internal.py b/src/satosa/internal.py index 38b82acfb..24de31890 100644 --- a/src/satosa/internal.py +++ b/src/satosa/internal.py @@ -85,7 +85,13 @@ class AuthenticationInformation(_Datafy): """ def __init__( - self, auth_class_ref=None, timestamp=None, issuer=None, *args, **kwargs + self, + auth_class_ref=None, + timestamp=None, + issuer=None, + authority=None, + *args, + **kwargs, ): """ Initiate the data carrier @@ -102,6 +108,7 @@ def __init__( self.auth_class_ref = auth_class_ref self.timestamp = timestamp self.issuer = issuer + self.authority = authority class InternalData(_Datafy): From 5d0601453e20e8145902116862780327d6950c2e Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 28 Aug 2021 22:45:02 +0300 Subject: [PATCH 155/288] Revert "Pass proper encryption keys when retrieving the subject NameID" This reverts commit 7c82d89041cedc4a4676573d9ab8e8ad8ab6c077. --- src/satosa/backends/saml2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 7118ea007..bd4733275 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -399,7 +399,7 @@ def _translate_response(self, response, state): ) # The SAML response may not include a NameID. - subject = response.get_subject(keys=self.encryption_keys) + subject = response.get_subject() name_id = subject.text if subject else None name_id_format = subject.format if subject else None From b1ea01d8f6e18eb53fb66438e6058fa2bf3d9ced Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sat, 28 Aug 2021 22:44:19 +0300 Subject: [PATCH 156/288] Reflect the encryption_keypairs in the saml client configuration See also commit 7c82d89041cedc4a4676573d9ab8e8ad8ab6c077 which was reverted for backwards compatibility reasons by commit 5d0601453e20e8145902116862780327d6950c2e The original goal was: > Pass proper encryption keys when retrieving the subject NameID > > This requires the latest pysaml2 to work properly, as older versions of > get_subject do not accept the optional keys argument. > > To have this working without this changeset, one should define the > pysaml2 configuration option `encryption_keypairs`. We are now opting the solution without the above changeset (it was reverted) to keep backwards compatibility. Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 33 ++++++++++++++++++++++----------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index bd4733275..8b726edf9 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -108,27 +108,38 @@ def __init__(self, outgoing, internal_attributes, config, base_url, name): super().__init__(outgoing, internal_attributes, base_url, name) self.config = self.init_config(config) - sp_config = SPConfig().load(copy.deepcopy(config[SAMLBackend.KEY_SP_CONFIG])) - self.sp = Saml2Client(sp_config) - self.discosrv = config.get(SAMLBackend.KEY_DISCO_SRV) self.encryption_keys = [] self.outstanding_queries = {} self.idp_blacklist_file = config.get('idp_blacklist_file', None) - sp_keypairs = sp_config.getattr('encryption_keypairs', '') - sp_key_file = sp_config.getattr('key_file', '') - if sp_keypairs: - key_file_paths = [pair['key_file'] for pair in sp_keypairs] - elif sp_key_file: - key_file_paths = [sp_key_file] - else: - key_file_paths = [] + sp_config = SPConfig().load(copy.deepcopy(config[SAMLBackend.KEY_SP_CONFIG])) + + # if encryption_keypairs is defined, use those keys for decryption + # else, if key_file and cert_file are defined, use them for decryption + # otherwise, do not use any decryption key. + # ensure the choice is reflected back in the configuration. + sp_conf_encryption_keypairs = sp_config.getattr('encryption_keypairs', '') + sp_conf_key_file = sp_config.getattr('key_file', '') + sp_conf_cert_file = sp_config.getattr('cert_file', '') + sp_keypairs = ( + sp_conf_encryption_keypairs + if sp_conf_encryption_keypairs + else [{'key_file': sp_conf_key_file, 'cert_file': sp_conf_cert_file}] + if sp_conf_key_file and sp_conf_cert_file + else [] + ) + sp_config.setattr('', 'encryption_keypairs', sp_keypairs) + # load the encryption keys + key_file_paths = [pair['key_file'] for pair in sp_keypairs] for p in key_file_paths: with open(p) as key_file: self.encryption_keys.append(key_file.read()) + # finally, initialize the client object + self.sp = Saml2Client(sp_config) + def get_idp_entity_id(self, context): """ :type context: satosa.context.Context From d8cc20817c971fa0d25ddbde99d833948fdd02ff Mon Sep 17 00:00:00 2001 From: Ali Haider Date: Mon, 30 Aug 2021 10:16:50 +0200 Subject: [PATCH 157/288] OpenIDConnectFrontend jwks endpoint should also expose "kid" if configured via the new "signing_key_id" configuration parameter in the openid_connect_frontend.yaml. --- .../plugins/frontends/openid_connect_frontend.yaml.example | 1 + src/satosa/frontends/openid_connect.py | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/example/plugins/frontends/openid_connect_frontend.yaml.example b/example/plugins/frontends/openid_connect_frontend.yaml.example index bc941bd1c..6c74b2d4c 100644 --- a/example/plugins/frontends/openid_connect_frontend.yaml.example +++ b/example/plugins/frontends/openid_connect_frontend.yaml.example @@ -2,6 +2,7 @@ module: satosa.frontends.openid_connect.OpenIDConnectFrontend name: OIDC config: signing_key_path: frontend.key + signing_key_id: frontend.key1 db_uri: mongodb://db.example.com # optional: only support MongoDB, will default to in-memory storage if not specified client_db_path: /path/to/your/cdb.json sub_hash_salt: randomSALTvalue # if not specified, it is randomly generated on every startup diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index 1acc80583..8bd1319f7 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -44,7 +44,8 @@ def __init__(self, auth_req_callback_func, internal_attributes, conf, base_url, super().__init__(auth_req_callback_func, internal_attributes, base_url, name) self.config = conf - self.signing_key = RSAKey(key=rsa_load(conf["signing_key_path"]), use="sig", alg="RS256") + self.signing_key = RSAKey(key=rsa_load(conf["signing_key_path"]), use="sig", alg="RS256", + kid=conf.get("signing_key_id", "")) def _create_provider(self, endpoint_baseurl): response_types_supported = self.config["provider"].get("response_types_supported", ["id_token"]) @@ -240,6 +241,10 @@ def _validate_config(self, config): if k not in config: raise ValueError("Missing configuration parameter '{}' for OpenID Connect frontend.".format(k)) + if "signing_key_id" in config and type(config["signing_key_id"]) is not str: + raise ValueError( + "The configuration parameter 'signing_key_id' is not defined as a string for OpenID Connect frontend.") + def _get_authn_request_from_state(self, state): """ Extract the clietns request stoed in the SATOSA state. From 89741a8b349df69f6974c64d1a8ebb0561fde12c Mon Sep 17 00:00:00 2001 From: Giuseppe Date: Tue, 31 Aug 2021 14:10:31 +0200 Subject: [PATCH 158/288] IdP hinting improvements - fix: exception accessing to qs_params - chore: improved exacmple configuration - feat: added unit tests --- .../microservices/idp_hinting.yaml.example | 1 + src/satosa/micro_services/idp_hinting.py | 3 +- .../satosa/micro_services/test_idp_hinting.py | 40 +++++++++++++++++++ 3 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 tests/satosa/micro_services/test_idp_hinting.py diff --git a/example/plugins/microservices/idp_hinting.yaml.example b/example/plugins/microservices/idp_hinting.yaml.example index 9238f3c55..8dbc26932 100644 --- a/example/plugins/microservices/idp_hinting.yaml.example +++ b/example/plugins/microservices/idp_hinting.yaml.example @@ -4,3 +4,4 @@ config: allowed_params: - idp_hinting - idp_hint + - idphint diff --git a/src/satosa/micro_services/idp_hinting.py b/src/satosa/micro_services/idp_hinting.py index 04f003ef1..3d3694cfc 100644 --- a/src/satosa/micro_services/idp_hinting.py +++ b/src/satosa/micro_services/idp_hinting.py @@ -51,10 +51,9 @@ def process(self, context, data): hints = ( entity_id for param_name in self.idp_hint_param_names - for qs_param_name, entity_id in qs_params + for qs_param_name, entity_id in qs_params.items() if param_name == qs_param_name ) hint = next(hints, None) - context.decorate(context.KEY_TARGET_ENTITYID, hint) return super().process(context, data) diff --git a/tests/satosa/micro_services/test_idp_hinting.py b/tests/satosa/micro_services/test_idp_hinting.py new file mode 100644 index 000000000..06b96bd69 --- /dev/null +++ b/tests/satosa/micro_services/test_idp_hinting.py @@ -0,0 +1,40 @@ +from unittest import TestCase + +import pytest + +from satosa.context import Context +from satosa.state import State +from satosa.micro_services.idp_hinting import IdpHinting + + +class TestIdpHinting(TestCase): + def setUp(self): + context = Context() + context.state = State() + + config = { + 'allowed_params': ["idp_hinting", "idp_hint", "idphint"] + } + + plugin = IdpHinting( + config=config, + name='test_idphinting', + base_url='https://satosa.example.org', + ) + plugin.next = lambda ctx, data: (ctx, data) + + self.config = config + self.context = context + self.plugin = plugin + + def test_idp_hinting(self): + self.context.request = {} + _target = 'https://localhost:8080' + self.context.qs_params = {'idphint': _target} + res = self.plugin.process(self.context, data={}) + assert res[0].internal_data.get('target_entity_id') == _target + + def test_no_idp_hinting(self): + self.context.request = {} + res = self.plugin.process(self.context, data={}) + assert not res[0].internal_data.get('target_entity_id') From fdc5bfd29b31c4ee3b97bb3c55c2ad606085df76 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 31 Aug 2021 16:33:10 +0300 Subject: [PATCH 159/288] Add more tests for the idp hinting micro-service Signed-off-by: Ivan Kanakarakis --- src/satosa/micro_services/idp_hinting.py | 1 + .../satosa/micro_services/test_idp_hinting.py | 33 ++++++++++++++----- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/src/satosa/micro_services/idp_hinting.py b/src/satosa/micro_services/idp_hinting.py index 3d3694cfc..90569d706 100644 --- a/src/satosa/micro_services/idp_hinting.py +++ b/src/satosa/micro_services/idp_hinting.py @@ -55,5 +55,6 @@ def process(self, context, data): if param_name == qs_param_name ) hint = next(hints, None) + context.decorate(context.KEY_TARGET_ENTITYID, hint) return super().process(context, data) diff --git a/tests/satosa/micro_services/test_idp_hinting.py b/tests/satosa/micro_services/test_idp_hinting.py index 06b96bd69..a13d3d7a3 100644 --- a/tests/satosa/micro_services/test_idp_hinting.py +++ b/tests/satosa/micro_services/test_idp_hinting.py @@ -3,6 +3,7 @@ import pytest from satosa.context import Context +from satosa.internal import InternalData from satosa.state import State from satosa.micro_services.idp_hinting import IdpHinting @@ -11,6 +12,7 @@ class TestIdpHinting(TestCase): def setUp(self): context = Context() context.state = State() + internal_data = InternalData() config = { 'allowed_params': ["idp_hinting", "idp_hint", "idphint"] @@ -25,16 +27,31 @@ def setUp(self): self.config = config self.context = context + self.data = internal_data self.plugin = plugin - def test_idp_hinting(self): - self.context.request = {} + def test_no_query_params(self): + self.context.qs_params = {} + new_context, new_data = self.plugin.process(self.context, self.data) + assert not new_context.get_decoration(Context.KEY_TARGET_ENTITYID) + + def test_hint_in_params(self): _target = 'https://localhost:8080' self.context.qs_params = {'idphint': _target} - res = self.plugin.process(self.context, data={}) - assert res[0].internal_data.get('target_entity_id') == _target + new_context, new_data = self.plugin.process(self.context, self.data) + assert new_context.get_decoration(Context.KEY_TARGET_ENTITYID) == _target + + def test_no_hint_in_params(self): + _target = 'https://localhost:8080' + self.context.qs_params = {'param_not_in_allowed_params': _target} + new_context, new_data = self.plugin.process(self.context, self.data) + assert not new_context.get_decoration(Context.KEY_TARGET_ENTITYID) - def test_no_idp_hinting(self): - self.context.request = {} - res = self.plugin.process(self.context, data={}) - assert not res[0].internal_data.get('target_entity_id') + def test_issuer_already_set(self): + _pre_selected_target = 'https://local.localhost:8080' + self.context.decorate(Context.KEY_TARGET_ENTITYID, _pre_selected_target) + _target = 'https://localhost:8080' + self.context.qs_params = {'idphint': _target} + new_context, new_data = self.plugin.process(self.context, self.data) + assert new_context.get_decoration(Context.KEY_TARGET_ENTITYID) == _pre_selected_target + assert new_context.get_decoration(Context.KEY_TARGET_ENTITYID) != _target From 027a4219d7ba8143b9de62fe1efc736b845622e6 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 6 Sep 2021 15:19:00 +0300 Subject: [PATCH 160/288] Switch from pystache to chevron as the dep to render mustache templates Signed-off-by: Ivan Kanakarakis --- setup.py | 2 +- src/satosa/micro_services/attribute_generation.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 1f6adcaf9..7b2be2673 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ "gunicorn", "Werkzeug", "click", - "pystache", + "chevron", "cookies-samesite-compat", ], extras_require={ diff --git a/src/satosa/micro_services/attribute_generation.py b/src/satosa/micro_services/attribute_generation.py index 7c99a8fa7..d96d8e1e1 100644 --- a/src/satosa/micro_services/attribute_generation.py +++ b/src/satosa/micro_services/attribute_generation.py @@ -1,5 +1,5 @@ import re -import pystache +from chevron import render as render_mustache from .base import ResponseMicroService from ..util import get_dict_defaults @@ -136,7 +136,7 @@ def _synthesize(self, attributes, requester, provider): for attr_name, fmt in recipes.items(): syn_attributes[attr_name] = [ value - for token in re.split("[;\n]+", pystache.render(fmt, context)) + for token in re.split("[;\n]+", render_mustache(fmt, context)) for value in [token.strip().strip(';')] if value ] From 003881baffcd26ae2e4e3d89ba74184a7175fa4b Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 7 Sep 2021 14:58:36 +0300 Subject: [PATCH 161/288] Release version 8.0.0 Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 95 ++++++++++++++++++++++++++++++++++++++++++++++++ setup.py | 2 +- 3 files changed, 97 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index ebebf4aed..5b192d1fa 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 7.0.3 +current_version = 8.0.0 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index 812e5a303..47a12d345 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,100 @@ # Changelog +## 8.0.0 (2021-08-08) + +This is a breaking release, if you were using the openid_connect frontend. To +keep compatibility: + +1. Install the proxy with `pip install satosa[pyop_mongo]` in order to fetch + the right dependencies. +2. If you were not using the `client_db_path` option then set the new option + `client_db_uri` to the value of `db_uri`. + +- The internal data now hold the authenticating authority as part of the + AuthenticationInformation object + (`satosa.internal::AuthenticationInformation::authority`). +- The Context object now holds a dictionary of query string params + (`context.qs_params`). +- The Context object now holds a dictionary of http headers + (`context.http_headers`). +- The Context object now holds a dictionary of server headers + (`context.server_headers`). +- The Context object now holds the request method (`context.request_method`). +- The Context object now holds the request uri (`context.request_uri`). +- The Context object now holds a dictionary of http headers. +- frontends: the openid_connect frontend has a new configuration option + `signing_key_id` to set the `kid` field on the jwks endpoint. +- frontends: the openid_connect frontend dependency `pyop` has been updated + to work with both Redis and MongoDB. This changed how its dependencies are + set. This is reflected in this package's new extras that can be set to + `pyop_mongo` (to preserve the previous behaviour), or `pyop_redis`. +- frontends: the openid_connect frontend filters out unset claims. +- frontends: the openid_connect frontend has a new option + `extra_id_token_claims` to define in the config per client which extra claims + should be added to the ID Token to also work with those clients. +- frontends: the openid_connect frontend has a new option `client_db_uri` to + specify a database connection string for the client database. If unset, + `client_db_path` will be used to load the clients from a file. + Previously, the option `db_uri` was used to set the client database string. + If you were relying on this behaviour, add the `client_db_uri` option with + the same value as `db_uri`. +- frontends: document the `client_db_path` option for openid_connect +- frontends: the openid_connect frontend has a new configuration option + `id_token_lifetime` to set the lifetime of the ID token in seconds. +- frontends: the saml2 frontend has a new option `enable_metadata_reload` to + expose an endpoint (`//reload-metadata`) that allows external + triggers to reload the frontend's metadata. This setting is disabled by + default. It is up to the user to protect the endpoint if enabled. This + feature requires pysaml2 > 7.0.1 +- backends: the saml2 backend derives the encryption keys based on the + `encryption_keypairs` configuration option, otherwise falling back to + the `key_file` and `cert_file` pair. This is now reflected in the internal + pysaml2 configuration. +- backends: the saml2 backend `sp` property is now of type + `saml2.client::Saml2Client` instead of `saml2.client_base::Base`. This allows + us to call the higer level method + `saml2.client::Saml2Client::prepare_for_negotiated_authenticate` instead of + `saml2.client_base::Base::create_authn_request` to properly behave when + needing to sign the AuthnRequest using the Redirect binding. +- backends: the saml2 backend has a new option `enable_metadata_reload` to + expose an endpoint (`//reload-metadata`) that allows external + triggers to reload the backend's metadata. This setting is disabled by + default. It is up to the user to protect the endpoint if enabled. This + feature requires pysaml2 > 7.0.1 +- backends: new ReflectorBackend to help with frontend debugging easier and + developing quicker. +- backends: the saml2 backend has a new configuration option + `send_requester_id` to specify whether Scoping/RequesterID element should be + part of the AuthnRequest. +- micro-services: new DecideBackendByTargetIssuer micro-service, to select + a target backend based on the target issuer. +- micro-services: new DiscoToTargetIssuer micro-service, to set the discovery + protocol response to be the target issuer. +- micro-services: new IdpHinting micro-service, to detect if an idp-hinting + feature has been requested and set the target entityID. Enabling this + micro-service will result in skipping the discovery service and using the + specified entityID as the IdP to be used. The IdP entityID is expected to be + specified as a query-param value on the authentication request. +- micro-services: new AttributePolicy micro-service, which is able to force + attribute policies for requester by limiting results to a predefined set of + allowed attributes. +- micro-services: the PrimaryIdentifier micro-service has a new option + `replace_subject_id` to specify whether to replace the `subject_id` with the + constructed primary identifier. +- micro-services: PrimaryIdentifier is set only if there is a value. +- micro-services: AddSyntheticAttributes has various small fixes. +- micro-services: ScopeExtractorProcessor can handle string values. +- dependencies: the `pystache` package has been replaced by `chevron`, as + `pystache` seems to be abandoned and will not work with python v3.10 and + `setuptools` v58 or newer. This package is a dependency of the + `satosa.micro_services.attribute_generation.AddSyntheticAttributes` + micro-service. +- tests: MongoDB flags have been updated to cater for deprecated flags. +- docs: updated with information about the newly added micro-services. +- docs: various typo fixes. +- docs: various example configuration fixes. + + ## 7.0.3 (2021-01-21) - dependencies: Set minimum pysaml2 version to v6.5.1 to fix internal XML diff --git a/setup.py b/setup.py index 7b2be2673..44ab5ee74 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='7.0.3', + version='8.0.0', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', From 3ef09280ce2ff2efa3b3b13dc538b3b1b4a1917c Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 7 Sep 2021 15:11:21 +0300 Subject: [PATCH 162/288] Fix changlog entry date Signed-off-by: Ivan Kanakarakis --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 47a12d345..acea1c0dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## 8.0.0 (2021-08-08) +## 8.0.0 (2021-09-07) This is a breaking release, if you were using the openid_connect frontend. To keep compatibility: From e5bd7b6b98316ea896515735a9ce49da8beddaf8 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 8 Sep 2021 12:10:28 +1200 Subject: [PATCH 163/288] new: examples: add attribute_policy.yaml.example --- .../microservices/attribute_policy.yaml.example | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 example/plugins/microservices/attribute_policy.yaml.example diff --git a/example/plugins/microservices/attribute_policy.yaml.example b/example/plugins/microservices/attribute_policy.yaml.example new file mode 100644 index 000000000..3a32c78df --- /dev/null +++ b/example/plugins/microservices/attribute_policy.yaml.example @@ -0,0 +1,12 @@ +module: satosa.micro_services.attribute_policy.AttributePolicy +name: AttributePolicy +config: + attribute_policy: + : + allowed: + - mail + - name + - givenname + - surname + + From bf39e0688fa26bd3f6fb52a72ff49311d8c6407f Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Fri, 10 Sep 2021 14:06:50 +1200 Subject: [PATCH 164/288] fix: do not pass extra arg to logging.error SATOSA formats all log messages explicitly before passing them to the logger. Python logging formats messages if it receives extra args in the call, otherwise pass them straight through. This call to logger.error in _run_bound_endpoint was (accidentally) passing an extra argument error.state, causing logging to do another round of formatting on an already formatted message. This is dangerous, as the text of the (already formatted) message may contain externally supplied data - such as the redirect URI with URI-encoded data like %3A#2F (which in best part just throw another exception - "Unknown formatting character A") State is already included in the explicit message formatting, so the extra argument here should be safe to remove. --- src/satosa/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/base.py b/src/satosa/base.py index ab872654a..7468a4ca0 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -185,7 +185,7 @@ def _run_bound_endpoint(self, context, spec): err_id=error.error_id, state=state ) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline, error.state, exc_info=True) + logger.error(logline, exc_info=True) return self._handle_satosa_authentication_error(error) def _load_state(self, context): From e7f281c2418902f3a00bed88b311a670cd938136 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 14 Sep 2021 18:14:58 +0300 Subject: [PATCH 165/288] Allow request micro-services to affect the authn-context-class-ref that the backend will generate Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 17 ++++++++++------- src/satosa/context.py | 1 + 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 8b726edf9..a3aa210d3 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -225,11 +225,11 @@ def disco_query(self, context): ) return SeeOther(loc) - def construct_requested_authn_context(self, entity_id): - if not self.acr_mapping: - return None - - acr_entry = util.get_dict_defaults(self.acr_mapping, entity_id) + def construct_requested_authn_context(self, entity_id, *, target_accr=None): + acr_entry = ( + target_accr + or util.get_dict_defaults(self.acr_mapping or {}, entity_id) + ) if not acr_entry: return None @@ -241,7 +241,9 @@ def construct_requested_authn_context(self, entity_id): authn_context = requested_authn_context( acr_entry['class_ref'], comparison=acr_entry.get( - 'comparison', self.VALUE_ACR_COMPARISON_DEFAULT)) + 'comparison', self.VALUE_ACR_COMPARISON_DEFAULT + ) + ) return authn_context @@ -271,7 +273,8 @@ def authn_request(self, context, entity_id): raise SATOSAAuthenticationError(context.state, "Selected IdP is blacklisted for this backend") kwargs = {} - authn_context = self.construct_requested_authn_context(entity_id) + target_accr = context.state.get(Context.KEY_TARGET_AUTHN_CONTEXT_CLASS_REF) + authn_context = self.construct_requested_authn_context(entity_id, target_accr=target_accr) if authn_context: kwargs["requested_authn_context"] = authn_context if self.config.get(SAMLBackend.KEY_MIRROR_FORCE_AUTHN): diff --git a/src/satosa/context.py b/src/satosa/context.py index 60f35942b..1cf140586 100644 --- a/src/satosa/context.py +++ b/src/satosa/context.py @@ -19,6 +19,7 @@ class Context(object): KEY_FORCE_AUTHN = 'force_authn' KEY_MEMORIZED_IDP = 'memorized_idp' KEY_AUTHN_CONTEXT_CLASS_REF = 'authn_context_class_ref' + KEY_TARGET_AUTHN_CONTEXT_CLASS_REF = 'target_authn_context_class_ref' def __init__(self): self._path = None From d409448433fcdf74f6ac0326f67b4484e0382cd3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Fri, 1 Oct 2021 10:36:35 +0200 Subject: [PATCH 166/288] docs: fix display_name in Apple backend example --- example/plugins/backends/apple_backend.yaml.example | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/example/plugins/backends/apple_backend.yaml.example b/example/plugins/backends/apple_backend.yaml.example index 4426c8cc4..bae8e5673 100644 --- a/example/plugins/backends/apple_backend.yaml.example +++ b/example/plugins/backends/apple_backend.yaml.example @@ -25,5 +25,4 @@ config: - ['Apple Inc.', 'en'] ui_info: display_name: - - lang: en - text: 'Sign in with Apple' + - ['Sign in with Apple', 'en'] From 5a9447e1c4f7bafd0473b029c9860ab1fcaae83b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Wed, 6 Oct 2021 11:53:59 +0200 Subject: [PATCH 167/288] fix: correct return variable in github backend convert GitHub id into string to avoid TypeError --- src/satosa/backends/github.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/backends/github.py b/src/satosa/backends/github.py index 1da9dadbe..b04906f56 100644 --- a/src/satosa/backends/github.py +++ b/src/satosa/backends/github.py @@ -108,4 +108,4 @@ def user_information(self, access_token): r = requests.get(url, headers=headers) ret = r.json() ret['id'] = str(ret['id']) - return r.json() + return ret From 731dd496f4de7ce8f8a0f2647c8f74be883cbab5 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 18 Oct 2021 15:17:39 +0300 Subject: [PATCH 168/288] Keep the last authority from the authenticating authority list Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index a3aa210d3..f138648a9 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -404,7 +404,11 @@ def _translate_response(self, response, state): authn_context_ref, authenticating_authorities, authn_instant = next( iter(response.authn_info()), [None, None, None] ) - authenticating_authority = next(iter(authenticating_authorities), None) + authenticating_authority = ( + authenticating_authorities[-1] + if authenticating_authorities + else None + ) auth_info = AuthenticationInformation( auth_class_ref=authn_context_ref, timestamp=authn_instant, From db4c551c6376a64a9d8b6c0ad6032726e1040163 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 1 Nov 2021 23:10:21 +0200 Subject: [PATCH 169/288] Bump minimum pyop version to handle invalid redirect-uris correctly Signed-off-by: Ivan Kanakarakis --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 44ab5ee74..12ffacf72 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ packages=find_packages('src/'), package_dir={'': 'src'}, install_requires=[ - "pyop >= 3.2.0", + "pyop >= 3.3.1", "pysaml2 >= 6.5.1", "pycryptodomex", "requests", From 8a096d52fc146a2cd0d8d2ef70c46d999389ce81 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 2 Nov 2021 00:31:05 +0200 Subject: [PATCH 170/288] Reinitialize state if error occurs while loading state Signed-off-by: Ivan Kanakarakis --- src/satosa/state.py | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/src/satosa/state.py b/src/satosa/state.py index 6aaa5154b..7feba1a9e 100644 --- a/src/satosa/state.py +++ b/src/satosa/state.py @@ -13,6 +13,7 @@ from lzma import LZMACompressor from lzma import LZMADecompressor +from lzma import LZMAError from Cryptodome import Random from Cryptodome.Cipher import AES @@ -186,15 +187,27 @@ def __init__(self, urlstate_data=None, encryption_key=None): raise ValueError("If an 'urlstate_data' is supplied 'encrypt_key' must be specified.") if urlstate_data: - urlstate_data = urlstate_data.encode("utf-8") - urlstate_data = base64.urlsafe_b64decode(urlstate_data) - lzma = LZMADecompressor() - urlstate_data = lzma.decompress(urlstate_data) - urlstate_data = _AESCipher(encryption_key).decrypt(urlstate_data) - lzma = LZMADecompressor() - urlstate_data = lzma.decompress(urlstate_data) - urlstate_data = urlstate_data.decode("UTF-8") - urlstate_data = json.loads(urlstate_data) + try: + urlstate_data_bytes = urlstate_data.encode("utf-8") + urlstate_data_b64decoded = base64.urlsafe_b64decode(urlstate_data_bytes) + lzma = LZMADecompressor() + urlstate_data_decompressed = lzma.decompress(urlstate_data_b64decoded) + urlstate_data_decrypted = _AESCipher(encryption_key).decrypt( + urlstate_data_decompressed + ) + lzma = LZMADecompressor() + urlstate_data_decrypted_decompressed = lzma.decompress(urlstate_data_decrypted) + urlstate_data_obj = json.loads(urlstate_data_decrypted_decompressed) + except Exception as e: + error_context = { + "message": "Failed to load state data. Reinitializing empty state.", + "reason": str(e), + "urlstate_data": urlstate_data, + } + logger.warning(error_context) + urlstate_data = {} + else: + urlstate_data = urlstate_data_obj session_id = ( urlstate_data[_SESSION_ID_KEY] From 87f40822707dd0e7f4fc695b6291ea15ee995ee9 Mon Sep 17 00:00:00 2001 From: Vishal Kadam Date: Fri, 16 Oct 2020 17:53:27 -0400 Subject: [PATCH 171/288] Support for exposing co entity's metadata endpoint --- .gitignore | 1 + example/internal_attributes.yaml.example | 2 +- src/satosa/backends/saml2.py | 3 +- src/satosa/frontends/saml2.py | 52 ++++++++++++++++--- src/satosa/metadata_creation/saml_metadata.py | 2 +- 5 files changed, 49 insertions(+), 11 deletions(-) diff --git a/.gitignore b/.gitignore index 6c67df01d..9d8244255 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ **/.DS_Store _build .idea +*.iml *.pyc *.log* diff --git a/example/internal_attributes.yaml.example b/example/internal_attributes.yaml.example index dc8b5fe1f..02e1a131e 100644 --- a/example/internal_attributes.yaml.example +++ b/example/internal_attributes.yaml.example @@ -27,7 +27,7 @@ attributes: orcid: [emails.str] github: [email] openid: [email] - saml: [email, emailAdress, mail] + saml: [email, emailAddress, mail] name: facebook: [name] orcid: [name.credit-name] diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index f138648a9..d50a93fb7 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -448,7 +448,7 @@ def _metadata_endpoint(self, context): :param context: The current context :return: response with metadata """ - msg = "Sending metadata response" + msg = "Sending metadata response for entityId = {}".format(self.sp.config.entityid) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) @@ -488,6 +488,7 @@ def register_endpoints(self): ("^%s$" % parsed_endp.path[1:], self.disco_response)) if self.expose_entityid_endpoint(): + logger.debug("Exposing backend entity endpoint = {}".format(self.sp.config.entityid)) parsed_entity_id = urlparse(self.sp.config.entityid) url_map.append(("^{0}".format(parsed_entity_id.path[1:]), self._metadata_endpoint)) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index cfd43af6c..ee9083b05 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -483,7 +483,7 @@ def _metadata_endpoint(self, context): :param context: The current context :return: response with metadata """ - msg = "Sending metadata response" + msg = "Sending metadata response for entityId = {}".format(self.idp.config.entityid) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) metadata_string = create_metadata_string(None, self.idp.config, 4, None, None, None, None, @@ -523,6 +523,7 @@ def _register_endpoints(self, providers): functools.partial(self.handle_authn_request, binding_in=binding))) if self.expose_entityid_endpoint(): + logger.debug("Exposing frontend entity endpoint = {}".format(self.idp.config.entityid)) parsed_entity_id = urlparse(self.idp.config.entityid) url_map.append(("^{0}".format(parsed_entity_id.path[1:]), self._metadata_endpoint)) @@ -959,7 +960,7 @@ def _add_endpoints_to_config(self, config, co_name, backend_name): return config - def _add_entity_id(self, config, co_name): + def _add_entity_id(self, config, co_name, backend_name): """ Use the CO name to construct the entity ID for the virtual IdP for the CO and add it to the config. Also add it to the @@ -967,22 +968,31 @@ def _add_entity_id(self, config, co_name): The entity ID has the form - {base_entity_id}/{co_name} + {base_entity_id}/{backend_name}/{co_name} :type context: The current context :type config: satosa.satosa_config.SATOSAConfig :type co_name: str + :type backend_name: str :rtype: satosa.satosa_config.SATOSAConfig :param context: :param config: satosa proxy config :param co_name: CO name + :param backend_name: Backend name :return: config with updated entity ID """ base_entity_id = config['entityid'] - co_entity_id = "{}/{}".format(base_entity_id, quote_plus(co_name)) - config['entityid'] = co_entity_id + + replace = [ + ("", quote_plus(backend_name)), + ("", quote_plus(co_name)) + ] + for _replace in replace: + base_entity_id = base_entity_id.replace(_replace[0], _replace[1]) + + config['entityid'] = base_entity_id return config @@ -1035,7 +1045,7 @@ def _co_names_from_config(self): return co_names - def _create_co_virtual_idp(self, context): + def _create_co_virtual_idp(self, context, co_name=None): """ Create a virtual IdP to represent the CO. @@ -1045,7 +1055,7 @@ def _create_co_virtual_idp(self, context): :param context: :return: An idp server """ - co_name = self._get_co_name(context) + co_name = co_name or self._get_co_name(context) context.decorate(self.KEY_CO_NAME, co_name) # Verify that we are configured for this CO. If the CO was not @@ -1068,7 +1078,7 @@ def _create_co_virtual_idp(self, context): idp_config = self._add_endpoints_to_config( idp_config, co_name, backend_name ) - idp_config = self._add_entity_id(idp_config, co_name) + idp_config = self._add_entity_id(idp_config, co_name, backend_name) context.decorate(self.KEY_CO_ENTITY_ID, idp_config['entityid']) # Use the overwritten IdP config to generate a pysaml2 config object @@ -1155,4 +1165,30 @@ def _register_endpoints(self, backend_names): logline = "Adding mapping {}".format(mapping) logger.debug(logline) + if self.expose_entityid_endpoint(): + for backend_name in backend_names: + for co_name in co_names: + idp_config = self._add_entity_id(copy.deepcopy(self.idp_config), co_name, backend_name) + entity_id = idp_config['entityid'] + logger.debug("Exposing frontend entity endpoint = {}".format(entity_id)) + parsed_entity_id = urlparse(entity_id) + metadata_endpoint = "^{0}".format(parsed_entity_id.path[1:]) + the_callable = functools.partial(self._metadata_endpoint, co_name=co_name) + url_to_callable_mappings.append((metadata_endpoint, the_callable)) + return url_to_callable_mappings + + def _metadata_endpoint(self, context, co_name): + """ + Endpoint for retrieving the virtual frontend metadata + :type context: satosa.context.Context + :rtype: satosa.response.Response + + :param context: The current context + :return: response with metadata + """ + # Using the context of the current request and saved state from the + # authentication request dynamically create an IdP instance. + self.idp = self._create_co_virtual_idp(context, co_name=co_name) + return super()._metadata_endpoint(context=context); + diff --git a/src/satosa/metadata_creation/saml_metadata.py b/src/satosa/metadata_creation/saml_metadata.py index f1b294759..895de4b98 100644 --- a/src/satosa/metadata_creation/saml_metadata.py +++ b/src/satosa/metadata_creation/saml_metadata.py @@ -80,7 +80,7 @@ def _create_frontend_metadata(frontend_modules, backend_modules): logger.info(logline) idp_config = copy.deepcopy(frontend.config["idp_config"]) idp_config = frontend._add_endpoints_to_config(idp_config, co_name, backend.name) - idp_config = frontend._add_entity_id(idp_config, co_name) + idp_config = frontend._add_entity_id(idp_config, co_name, backend.name) idp_config = frontend._overlay_for_saml_metadata(idp_config, co_name) entity_desc = _create_entity_descriptor(idp_config) frontend_metadata[frontend.name].append(entity_desc) From 0fc3ef328a600ccf6d6c35bbfb9d2ae947eaea51 Mon Sep 17 00:00:00 2001 From: Vishal Kadam Date: Thu, 3 Jun 2021 12:53:09 -0400 Subject: [PATCH 172/288] Added test cases for expose co-frontend entity endpoints and duplicate entity id fix --- .gitignore | 1 + .../saml2_virtualcofrontend.yaml.example | 7 +- src/satosa/frontends/saml2.py | 21 +++- tests/conftest.py | 2 +- tests/satosa/frontends/test_saml2.py | 116 ++++++++++++------ 5 files changed, 104 insertions(+), 43 deletions(-) diff --git a/.gitignore b/.gitignore index 9d8244255..bb142cef6 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ _build build/ dist/ .coverage +venv/ \ No newline at end of file diff --git a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example index 6d9a7b370..e7415c55e 100644 --- a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example +++ b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example @@ -49,7 +49,12 @@ config: metadata: local: [sp.xml] - entityid: //proxy.xml + # Available placeholders to use while constructing entityid, + # : Backend name + # : collaborative_organizations encodeable_name + # : Base url of installation + # : Name of this virtual co-frontend + entityid: //idp/ accepted_time_diff: 60 service: idp: diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index ee9083b05..c744610ed 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -788,6 +788,10 @@ class SAMLVirtualCoFrontend(SAMLFrontend): KEY_ORGANIZATION = 'organization' KEY_ORGANIZATION_KEYS = ['display_name', 'name', 'url'] + def __init__(self, auth_req_callback_func, internal_attributes, config, base_url, name): + self.has_multiple_backends = False + super().__init__(auth_req_callback_func, internal_attributes, config, base_url, name) + def handle_authn_request(self, context, binding_in): """ See super class @@ -984,6 +988,9 @@ def _add_entity_id(self, config, co_name, backend_name): :return: config with updated entity ID """ base_entity_id = config['entityid'] + # If not using template for entityId and does not has multiple backends, then for backward compatibility append co_name at end + if "" not in base_entity_id and not self.has_multiple_backends: + base_entity_id = "{}/{}".format(base_entity_id, "") replace = [ ("", quote_plus(backend_name)), @@ -1110,10 +1117,22 @@ def _register_endpoints(self, backend_names): :param backend_names: A list of backend names :return: A list of url and endpoint function pairs """ + + # Throw exception if there is possibility of duplicate entity ids when using co_names with multiple backends + self.has_multiple_backends = len(backend_names) > 1 + co_names = self._co_names_from_config() + all_entity_ids = [] + for backend_name in backend_names: + for co_name in co_names: + all_entity_ids.append(self._add_entity_id(copy.deepcopy(self.idp_config), co_name, backend_name)['entityid']) + + if len(all_entity_ids) != len(set(all_entity_ids)): + raise ValueError("Duplicate entities ids would be created for co-frontends, please make sure to make entity ids unique. " + "You can use and to achieve it. See example yaml file.") + # Create a regex pattern that will match any of the CO names. We # escape special characters like '+' and '.' that are valid # characters in an URL encoded string. - co_names = self._co_names_from_config() url_encoded_co_names = [re.escape(quote_plus(name)) for name in co_names] co_name_pattern = "|".join(url_encoded_co_names) diff --git a/tests/conftest.py b/tests/conftest.py index 9e7a5e18f..65b3602b1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -64,7 +64,7 @@ def sp_conf(cert_and_key): @pytest.fixture def idp_conf(cert_and_key): - idp_base = "http://idp.example.com" + idp_base = BASE_URL idpconfig = { "entityid": "{}/{}/proxy.xml".format(idp_base, "Saml2IDP"), diff --git a/tests/satosa/frontends/test_saml2.py b/tests/satosa/frontends/test_saml2.py index 8396a5945..d5eb2af98 100644 --- a/tests/satosa/frontends/test_saml2.py +++ b/tests/satosa/frontends/test_saml2.py @@ -1,6 +1,7 @@ """ Tests for the SAML frontend module src/frontends/saml2.py. """ +import copy import itertools import re from collections import Counter @@ -28,7 +29,6 @@ from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.state import State -from satosa.context import Context from tests.users import USERS from tests.util import FakeSP, create_metadata_from_config_dict @@ -298,14 +298,14 @@ def test_acr_mapping_per_idp_in_authn_response(self, context, idp_conf, sp_conf, ] ) def test_respect_sp_entity_categories( - self, - context, - entity_category, - entity_category_module, - expected_attributes, - idp_conf, - sp_conf, - internal_response + self, + context, + entity_category, + entity_category_module, + expected_attributes, + idp_conf, + sp_conf, + internal_response ): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = [entity_category_module] @@ -365,7 +365,7 @@ def test_metadata_endpoint(self, context, idp_conf): assert idp_conf["entityid"] in resp.message def test_custom_attribute_release_with_less_attributes_than_entity_category( - self, context, idp_conf, sp_conf, internal_response + self, context, idp_conf, sp_conf, internal_response ): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = ["swamid"] @@ -387,8 +387,8 @@ def test_custom_attribute_release_with_less_attributes_than_entity_category( internal_response.requester = sp_conf["entityid"] resp = self.get_auth_response(samlfrontend, context, internal_response, sp_conf, idp_metadata_str) assert len(resp.ava.keys()) == ( - len(expected_attributes) - - len(custom_attributes[internal_response.auth_info.issuer][internal_response.requester]["exclude"]) + len(expected_attributes) + - len(custom_attributes[internal_response.auth_info.issuer][internal_response.requester]["exclude"]) ) @@ -431,6 +431,7 @@ def test_load_idp_dynamic_entity_id(self, idp_conf): class TestSAMLVirtualCoFrontend(TestSAMLFrontend): BACKEND = "test_backend" + BACKEND_1 = "test_backend_1" CO = "MESS" CO_O = "organization" CO_C = "countryname" @@ -442,7 +443,7 @@ class TestSAMLVirtualCoFrontend(TestSAMLFrontend): CO_C: ["US"], CO_CO: ["United States"], CO_NOREDUORGACRONYM: ["MESS"] - } + } KEY_SSO = "single_sign_on_service" @pytest.fixture @@ -471,10 +472,10 @@ def frontend(self, idp_conf, sp_conf): # endpoints, and the collaborative organization configuration to # create the configuration for the frontend. conf = { - "idp_config": idp_conf, - "endpoints": ENDPOINTS, - "collaborative_organizations": [collab_org] - } + "idp_config": idp_conf, + "endpoints": ENDPOINTS, + "collaborative_organizations": [collab_org] + } # Use a richer set of internal attributes than what is provided # for the parent class so that we can test for the static SAML @@ -504,10 +505,13 @@ def context(self, context): that would be available during a SAML flow and that would include a path and target_backend that indicates the CO. """ - context.path = "{}/{}/sso/redirect".format(self.BACKEND, self.CO) - context.target_backend = self.BACKEND + return self._make_context(context, self.BACKEND, self.CO) - return context + def _make_context(self, context, backend, co_name): + _context = copy.deepcopy(context) + _context.path = "{}/{}/sso/redirect".format(backend, co_name) + _context.target_backend = backend + return _context def test_create_state_data(self, frontend, context, idp_conf): frontend._create_co_virtual_idp(context) @@ -542,6 +546,17 @@ def test_create_co_virtual_idp(self, frontend, context, idp_conf): assert idp_server.config.entityid == expected_entityid assert all(sso in sso_endpoints for sso in expected_endpoints) + def test_create_co_virtual_idp_with_entity_id_templates(self, frontend, context): + frontend.idp_config['entityid'] = "{}/Saml2IDP/proxy.xml".format(BASE_URL) + expected_entity_id = "{}/Saml2IDP/proxy.xml/{}".format(BASE_URL, self.CO) + idp_server = frontend._create_co_virtual_idp(context) + assert idp_server.config.entityid == expected_entity_id + + frontend.idp_config['entityid'] = "{}//idp/".format(BASE_URL) + expected_entity_id = "{}/{}/idp/{}".format(BASE_URL, context.target_backend, self.CO) + idp_server = frontend._create_co_virtual_idp(context) + assert idp_server.config.entityid == expected_entity_id + def test_register_endpoints(self, frontend, context): idp_server = frontend._create_co_virtual_idp(context) url_map = frontend.register_endpoints([self.BACKEND]) @@ -553,6 +568,28 @@ def test_register_endpoints(self, frontend, context): for endpoint in all_idp_endpoints: assert any(pat.match(endpoint) for pat in compiled_regex) + def test_register_endpoints_throws_error_in_case_duplicate_entity_ids(self, frontend): + with pytest.raises(ValueError): + frontend.register_endpoints([self.BACKEND, self.BACKEND_1]) + + def test_register_endpoints_with_metadata_endpoints(self, frontend, context): + frontend.idp_config['entityid'] = "{}//idp/".format(BASE_URL) + frontend.config['entityid_endpoint'] = True + idp_server_1 = frontend._create_co_virtual_idp(context) + context_2 = self._make_context(context, self.BACKEND_1, self.CO) + idp_server_2 = frontend._create_co_virtual_idp(context_2) + + url_map = frontend.register_endpoints([self.BACKEND, self.BACKEND_1]) + expected_idp_endpoints = [urlparse(endpoint[0]).path[1:] for server in [idp_server_1, idp_server_2] + for endpoint in server.config._idp_endpoints[self.KEY_SSO]] + for server in [idp_server_1, idp_server_2]: + expected_idp_endpoints.append(urlparse(server.config.entityid).path[1:]) + + compiled_regex = [re.compile(regex) for regex, _ in url_map] + + for endpoint in expected_idp_endpoints: + assert any(pat.match(endpoint) for pat in compiled_regex) + def test_co_static_attributes(self, frontend, context, internal_response, idp_conf, sp_conf): # Use the frontend and context fixtures to dynamically create the @@ -563,9 +600,8 @@ def test_co_static_attributes(self, frontend, context, internal_response, # and then use those to dynamically update the ipd_conf fixture. co_name = frontend._get_co_name(context) backend_name = context.target_backend - idp_conf = frontend._add_endpoints_to_config(idp_conf, co_name, - backend_name) - idp_conf = frontend._add_entity_id(idp_conf, co_name) + idp_conf = frontend._add_endpoints_to_config(idp_conf, co_name, backend_name) + idp_conf = frontend._add_entity_id(idp_conf, co_name, backend_name) # Use a utility function to serialize the idp_conf IdP configuration # fixture to a string and then dynamically update the sp_conf @@ -597,9 +633,9 @@ def test_co_static_attributes(self, frontend, context, internal_response, "name_id_policy": NameIDPolicy(format=NAMEID_FORMAT_TRANSIENT), "in_response_to": None, "destination": sp_config.endpoint( - "assertion_consumer_service", - binding=BINDING_HTTP_REDIRECT - )[0], + "assertion_consumer_service", + binding=BINDING_HTTP_REDIRECT + )[0], "sp_entity_id": sp_conf["entityid"], "binding": BINDING_HTTP_REDIRECT } @@ -616,42 +652,42 @@ def test_co_static_attributes(self, frontend, context, internal_response, class TestSubjectTypeToSamlNameIdFormat: def test_should_default_to_persistent(self): assert ( - subject_type_to_saml_nameid_format("unmatched") - == NAMEID_FORMAT_PERSISTENT + subject_type_to_saml_nameid_format("unmatched") + == NAMEID_FORMAT_PERSISTENT ) def test_should_map_persistent(self): assert ( - subject_type_to_saml_nameid_format(NAMEID_FORMAT_PERSISTENT) - == NAMEID_FORMAT_PERSISTENT + subject_type_to_saml_nameid_format(NAMEID_FORMAT_PERSISTENT) + == NAMEID_FORMAT_PERSISTENT ) def test_should_map_transient(self): assert ( - subject_type_to_saml_nameid_format(NAMEID_FORMAT_TRANSIENT) - == NAMEID_FORMAT_TRANSIENT + subject_type_to_saml_nameid_format(NAMEID_FORMAT_TRANSIENT) + == NAMEID_FORMAT_TRANSIENT ) def test_should_map_emailaddress(self): assert ( - subject_type_to_saml_nameid_format(NAMEID_FORMAT_EMAILADDRESS) - == NAMEID_FORMAT_EMAILADDRESS + subject_type_to_saml_nameid_format(NAMEID_FORMAT_EMAILADDRESS) + == NAMEID_FORMAT_EMAILADDRESS ) def test_should_map_unspecified(self): assert ( - subject_type_to_saml_nameid_format(NAMEID_FORMAT_UNSPECIFIED) - == NAMEID_FORMAT_UNSPECIFIED + subject_type_to_saml_nameid_format(NAMEID_FORMAT_UNSPECIFIED) + == NAMEID_FORMAT_UNSPECIFIED ) def test_should_map_public(self): assert ( - subject_type_to_saml_nameid_format("public") - == NAMEID_FORMAT_PERSISTENT + subject_type_to_saml_nameid_format("public") + == NAMEID_FORMAT_PERSISTENT ) def test_should_map_pairwise(self): assert ( - subject_type_to_saml_nameid_format("pairwise") - == NAMEID_FORMAT_TRANSIENT + subject_type_to_saml_nameid_format("pairwise") + == NAMEID_FORMAT_TRANSIENT ) From c35a20bc21e58c2dfe1bcbab41c9af8dcb39017a Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 2 Nov 2021 01:25:13 +0200 Subject: [PATCH 173/288] Fix formatting Signed-off-by: Ivan Kanakarakis --- .gitignore | 2 +- src/satosa/frontends/saml2.py | 1 - tests/conftest.py | 2 +- tests/satosa/frontends/test_saml2.py | 52 +++++++++++++--------------- 4 files changed, 27 insertions(+), 30 deletions(-) diff --git a/.gitignore b/.gitignore index bb142cef6..17b270187 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,4 @@ _build build/ dist/ .coverage -venv/ \ No newline at end of file +venv/ diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index c744610ed..b481b5d25 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -1210,4 +1210,3 @@ def _metadata_endpoint(self, context, co_name): # authentication request dynamically create an IdP instance. self.idp = self._create_co_virtual_idp(context, co_name=co_name) return super()._metadata_endpoint(context=context); - diff --git a/tests/conftest.py b/tests/conftest.py index 65b3602b1..9e7a5e18f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -64,7 +64,7 @@ def sp_conf(cert_and_key): @pytest.fixture def idp_conf(cert_and_key): - idp_base = BASE_URL + idp_base = "http://idp.example.com" idpconfig = { "entityid": "{}/{}/proxy.xml".format(idp_base, "Saml2IDP"), diff --git a/tests/satosa/frontends/test_saml2.py b/tests/satosa/frontends/test_saml2.py index d5eb2af98..978489429 100644 --- a/tests/satosa/frontends/test_saml2.py +++ b/tests/satosa/frontends/test_saml2.py @@ -298,14 +298,14 @@ def test_acr_mapping_per_idp_in_authn_response(self, context, idp_conf, sp_conf, ] ) def test_respect_sp_entity_categories( - self, - context, - entity_category, - entity_category_module, - expected_attributes, - idp_conf, - sp_conf, - internal_response + self, + context, + entity_category, + entity_category_module, + expected_attributes, + idp_conf, + sp_conf, + internal_response ): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = [entity_category_module] @@ -365,7 +365,7 @@ def test_metadata_endpoint(self, context, idp_conf): assert idp_conf["entityid"] in resp.message def test_custom_attribute_release_with_less_attributes_than_entity_category( - self, context, idp_conf, sp_conf, internal_response + self, context, idp_conf, sp_conf, internal_response ): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = ["swamid"] @@ -387,8 +387,8 @@ def test_custom_attribute_release_with_less_attributes_than_entity_category( internal_response.requester = sp_conf["entityid"] resp = self.get_auth_response(samlfrontend, context, internal_response, sp_conf, idp_metadata_str) assert len(resp.ava.keys()) == ( - len(expected_attributes) - - len(custom_attributes[internal_response.auth_info.issuer][internal_response.requester]["exclude"]) + len(expected_attributes) + - len(custom_attributes[internal_response.auth_info.issuer][internal_response.requester]["exclude"]) ) @@ -442,7 +442,7 @@ class TestSAMLVirtualCoFrontend(TestSAMLFrontend): CO_O: ["Medium Energy Synchrotron Source"], CO_C: ["US"], CO_CO: ["United States"], - CO_NOREDUORGACRONYM: ["MESS"] + CO_NOREDUORGACRONYM: ["MESS"], } KEY_SSO = "single_sign_on_service" @@ -474,7 +474,7 @@ def frontend(self, idp_conf, sp_conf): conf = { "idp_config": idp_conf, "endpoints": ENDPOINTS, - "collaborative_organizations": [collab_org] + "collaborative_organizations": [collab_org], } # Use a richer set of internal attributes than what is provided @@ -652,42 +652,40 @@ def test_co_static_attributes(self, frontend, context, internal_response, class TestSubjectTypeToSamlNameIdFormat: def test_should_default_to_persistent(self): assert ( - subject_type_to_saml_nameid_format("unmatched") - == NAMEID_FORMAT_PERSISTENT + subject_type_to_saml_nameid_format("unmatched") + == NAMEID_FORMAT_PERSISTENT ) def test_should_map_persistent(self): assert ( - subject_type_to_saml_nameid_format(NAMEID_FORMAT_PERSISTENT) - == NAMEID_FORMAT_PERSISTENT + subject_type_to_saml_nameid_format(NAMEID_FORMAT_PERSISTENT) + == NAMEID_FORMAT_PERSISTENT ) def test_should_map_transient(self): assert ( - subject_type_to_saml_nameid_format(NAMEID_FORMAT_TRANSIENT) - == NAMEID_FORMAT_TRANSIENT + subject_type_to_saml_nameid_format(NAMEID_FORMAT_TRANSIENT) + == NAMEID_FORMAT_TRANSIENT ) def test_should_map_emailaddress(self): assert ( - subject_type_to_saml_nameid_format(NAMEID_FORMAT_EMAILADDRESS) - == NAMEID_FORMAT_EMAILADDRESS + subject_type_to_saml_nameid_format(NAMEID_FORMAT_EMAILADDRESS) + == NAMEID_FORMAT_EMAILADDRESS ) def test_should_map_unspecified(self): assert ( - subject_type_to_saml_nameid_format(NAMEID_FORMAT_UNSPECIFIED) - == NAMEID_FORMAT_UNSPECIFIED + subject_type_to_saml_nameid_format(NAMEID_FORMAT_UNSPECIFIED) + == NAMEID_FORMAT_UNSPECIFIED ) def test_should_map_public(self): assert ( - subject_type_to_saml_nameid_format("public") - == NAMEID_FORMAT_PERSISTENT + subject_type_to_saml_nameid_format("public") == NAMEID_FORMAT_PERSISTENT ) def test_should_map_pairwise(self): assert ( - subject_type_to_saml_nameid_format("pairwise") - == NAMEID_FORMAT_TRANSIENT + subject_type_to_saml_nameid_format("pairwise") == NAMEID_FORMAT_TRANSIENT ) From 065e5da1c669d5d2a3e2be3d4806947ab1b32a00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 16 Nov 2021 14:37:20 +0100 Subject: [PATCH 174/288] docs: add name and description to example SAML2 backend these attributes are required to generate valid metadata in combination with required_attributes and/or optional_attributes --- example/plugins/backends/saml2_backend.yaml.example | 2 ++ 1 file changed, 2 insertions(+) diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index c132e2345..335da8117 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -18,6 +18,8 @@ config: enable_metadata_reload: no sp_config: + name: "SP Name" + description: "SP Description" key_file: backend.key cert_file: backend.crt organization: {display_name: Example Identities, name: Example Identities Org., url: 'http://www.example.com'} From b361005302c2fd5b9cd1c88d0b2fd447df401dd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Fri, 19 Nov 2021 12:21:29 +0100 Subject: [PATCH 175/288] docs: fix disco_srv nesting in README disco_srv needs to be in the top-level config, not under sp_config --- doc/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/README.md b/doc/README.md index d058d6ad7..047cda51c 100644 --- a/doc/README.md +++ b/doc/README.md @@ -380,8 +380,8 @@ parameter `disco_srv`, must be specified if the metadata given to the backend mo ```yaml config: + disco_srv: http://disco.example.com sp_config: [...] - disco_srv: http://disco.example.com ``` ##### Mirror the SAML ForceAuthn option From 2fb3d5ad7200a1094998c60e5c3e108a2a25361d Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Sat, 11 Dec 2021 15:56:53 +0100 Subject: [PATCH 176/288] Add option search_filter to ldap This patch adds the option to override the search_filter in ldap with an own complex search_filter, because sometimes a single simple argument is not sufficient. --- .../microservices/ldap_attribute_store.yaml.example | 5 +++++ src/satosa/micro_services/ldap_attribute_store.py | 8 ++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 4efe85072..77be74e44 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -84,6 +84,11 @@ config: ldap_identifier_attribute: uid + # Override the contructed search_filter with ldap_identifier_attribute + # with an own filter. This allows more komplex queries. + # {0} will be injected with the ordered_identifier_candidates. + search_filter: None + # Whether to clear values for attributes incoming # to this microservice. Default is no or false. clear_input_attributes: no diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index 6d61559b1..d5c1f05eb 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -46,6 +46,7 @@ class LdapAttributeStore(ResponseMicroService): "clear_input_attributes": False, "ignore": False, "ldap_identifier_attribute": None, + "search_filter": None, "ldap_url": None, "ldap_to_internal_map": None, "on_ldap_search_result_empty": None, @@ -473,8 +474,11 @@ def process(self, context, data): logger.debug(logline) for filter_val in filter_values: - ldap_ident_attr = config["ldap_identifier_attribute"] - search_filter = "({0}={1})".format(ldap_ident_attr, filter_val) + if config["search_filter"]: + search_filter = config["search_filter"].format(filter_val) + else: + ldap_ident_attr = config["ldap_identifier_attribute"] + search_filter = "({0}={1})".format(ldap_ident_attr, filter_val) msg = { "message": "LDAP query with constructed search filter", "search filter": search_filter, From f0c57fd81f8e2fcfa5ff386fa139460273214264 Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Sat, 11 Dec 2021 16:26:25 +0100 Subject: [PATCH 177/288] Select LDAP config by extracted attribute This patch introduces a new global config variable `provider_attribute` to make it possible to select the config not only by entity but also select the config variable by a previous set attribute. This way it is possible to use a single point of authentication, but enrich the information from different ldap server based on e.g. the domain attribute extracted in previous steps. --- .../ldap_attribute_store.yaml.example | 19 +++++++++++++++---- .../micro_services/ldap_attribute_store.py | 16 +++++++++++++++- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 4efe85072..e1085f7bc 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -97,13 +97,23 @@ config: # from LDAP. The default is not to redirect. on_ldap_search_result_empty: https://my.vo.org/please/go/enroll - # The microservice may be configured per entityID. + # The microservice may be configured per entityID or per extracted attribute. # The configuration key is the entityID of the requesting SP, - # the authenticating IdP, or the entityID of the CO virtual IdP. - # When more than one configured entityID matches during a flow - # the priority ordering is requesting SP, then authenticating IdP, then + # the authenticating IdP, the entityID of the CO virtual IdP, or the + # extracted attribute defined by `global.provider_attribute`. + # When more than one configured key matches during a flow + # the priority ordering is provider attribute, requesting SP, then authenticating IdP, then # CO virtual IdP. Ī‘ny missing parameters are taken from the # default configuration. + global: + provider_attribute: domain + + # domain attribute is extracted in a previous microserver and used as a key + # here. + company.com: + ldap_url: ldaps://ldap.company.com + search_base: ou=group,dc=identity,dc=company,dc=com + https://sp.myserver.edu/shibboleth-sp: search_base: ou=People,o=MyVO,dc=example,dc=org search_return_attributes: @@ -120,3 +130,4 @@ config: # The microservice may be configured to ignore a particular entityID. https://another.sp.myserver.edu: ignore: true + diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index 6d61559b1..89145955a 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -81,9 +81,15 @@ def __init__(self, config, *args, **kwargs): self.config = {} + # Get provider attribute + self.provider_attribute = None + if "global" in config: + if "provider_attribute" in config["global"]: + self.provider_attribute = config["global"]["provider_attribute"] + # Process the default configuration first then any per-SP overrides. sp_list = ["default"] - sp_list.extend([key for key in config.keys() if key != "default"]) + sp_list.extend([key for key in config.keys() if key != "default" and key != "global"]) connections = {} @@ -412,6 +418,14 @@ def process(self, context, data): co_entity_id = state.get(frontend_name, {}).get(co_entity_id_key) entity_ids = [requester, issuer, co_entity_id, "default"] + if self.provider_attribute: + try: + entity_ids.insert( + 0, + data.attributes[self.provider_attribute][0] + ) + except (KeyError, IndexError): + pass config, entity_id = next((self.config.get(e), e) for e in entity_ids if self.config.get(e)) From af4820ce6e81cb5a47cef244c4f3000a1db35555 Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Sat, 11 Dec 2021 16:49:45 +0100 Subject: [PATCH 178/288] Add option to process all ldap results This patch adds an option to not only process the first ldap result, but all of them. This can be useful while trying to enrich the data e.g. with multiple group information. --- .../ldap_attribute_store.yaml.example | 4 + .../micro_services/ldap_attribute_store.py | 135 +++++++++--------- 2 files changed, 73 insertions(+), 66 deletions(-) diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 4efe85072..d5d4d7885 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -93,6 +93,10 @@ config: user_id_from_attrs: - employeeNumber + # If true, do not only process the first ldap result, but iterate over + # the result and process all of them. + use_all_results: false + # Where to redirect the browser if no record is returned # from LDAP. The default is not to redirect. on_ldap_search_result_empty: https://my.vo.org/please/go/enroll diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index 6d61559b1..2a4284746 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -526,13 +526,13 @@ def process(self, context, data): # For now consider only the first record found (if any). if len(responses) > 0: - if len(responses) > 1: + if len(responses) > 1 and not config.get("use_all_results", False): msg = "LDAP server returned {} records using search filter" msg = msg + " value {}" msg = msg.format(len(responses), filter_val) logline = lu.LOG_FMT.format(id=session_id, message=msg) logger.warning(logline) - record = responses[0] + responses = responses[0:1] break # Before using a found record, if any, to populate attributes @@ -544,73 +544,76 @@ def process(self, context, data): logger.debug(logline) data.attributes = {} - # This adapts records with different search and connection strategy - # (sync without pool), it should be tested with anonimous bind with - # message_id. - if isinstance(results, bool) and record: - record = { - "dn": record.entry_dn if hasattr(record, "entry_dn") else "", - "attributes": ( - record.entry_attributes_as_dict - if hasattr(record, "entry_attributes_as_dict") - else {} - ), - } - - # Use a found record, if any, to populate attributes and input for - # NameID - if record: - msg = { - "message": "Using record with DN and attributes", - "DN": record["dn"], - "attributes": record["attributes"], - } - logline = lu.LOG_FMT.format(id=session_id, message=msg) - logger.debug(logline) + for record in responses: + # This adapts records with different search and connection strategy + # (sync without pool), it should be tested with anonimous bind with + # message_id. + if isinstance(results, bool) and record: + record = { + "dn": record.entry_dn if hasattr(record, "entry_dn") else "", + "attributes": ( + record.entry_attributes_as_dict + if hasattr(record, "entry_attributes_as_dict") + else {} + ), + } + + # Use a found record, if any, to populate attributes and input for + # NameID + if record: + msg = { + "message": "Using record with DN and attributes", + "DN": record["dn"], + "attributes": record["attributes"], + } + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) - # Populate attributes as configured. - new_attrs = self._populate_attributes(config, record) - - overwrite = config["overwrite_existing_attributes"] - for attr, values in new_attrs.items(): - if not overwrite: - values = list(set(data.attributes.get(attr, []) + values)) - data.attributes[attr] = values - - # Populate input for NameID if configured. SATOSA core does the - # hashing of input to create a persistent NameID. - user_ids = self._populate_input_for_name_id(config, record, data) - if user_ids: - data.subject_id = "".join(user_ids) - msg = "NameID value is {}".format(data.subject_id) - logger.debug(msg) + # Populate attributes as configured. + new_attrs = self._populate_attributes(config, record) + + overwrite = config["overwrite_existing_attributes"] + for attr, values in new_attrs.items(): + if not overwrite: + values = list(map(str, set(data.attributes.get(attr, []) + values))) + else: + values = list(map(str, set(values))) + data.attributes[attr] = values + + # Populate input for NameID if configured. SATOSA core does the + # hashing of input to create a persistent NameID. + user_ids = self._populate_input_for_name_id(config, record, data) + if user_ids: + data.subject_id = "".join(user_ids) + msg = "NameID value is {}".format(data.subject_id) + logger.debug(msg) - # Add the record to the context so that later microservices - # may use it if required. - context.decorate(KEY_FOUND_LDAP_RECORD, record) - msg = "Added record {} to context".format(record) - logline = lu.LOG_FMT.format(id=session_id, message=msg) - logger.debug(logline) - else: - msg = "No record found in LDAP so no attributes will be added" - logline = lu.LOG_FMT.format(id=session_id, message=msg) - logger.warning(logline) - on_ldap_search_result_empty = config["on_ldap_search_result_empty"] - if on_ldap_search_result_empty: - # Redirect to the configured URL with - # the entityIDs for the target SP and IdP used by the user - # as query string parameters (URL encoded). - encoded_sp_entity_id = urllib.parse.quote_plus(requester) - encoded_idp_entity_id = urllib.parse.quote_plus(issuer) - url = "{}?sp={}&idp={}".format( - on_ldap_search_result_empty, - encoded_sp_entity_id, - encoded_idp_entity_id, - ) - msg = "Redirecting to {}".format(url) + # Add the record to the context so that later microservices + # may use it if required. + context.decorate(KEY_FOUND_LDAP_RECORD, record) + msg = "Added record {} to context".format(record) logline = lu.LOG_FMT.format(id=session_id, message=msg) - logger.info(logline) - return Redirect(url) + logger.debug(logline) + else: + msg = "No record found in LDAP so no attributes will be added" + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.warning(logline) + on_ldap_search_result_empty = config["on_ldap_search_result_empty"] + if on_ldap_search_result_empty: + # Redirect to the configured URL with + # the entityIDs for the target SP and IdP used by the user + # as query string parameters (URL encoded). + encoded_sp_entity_id = urllib.parse.quote_plus(requester) + encoded_idp_entity_id = urllib.parse.quote_plus(issuer) + url = "{}?sp={}&idp={}".format( + on_ldap_search_result_empty, + encoded_sp_entity_id, + encoded_idp_entity_id, + ) + msg = "Redirecting to {}".format(url) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.info(logline) + return Redirect(url) msg = "Returning data.attributes {}".format(data.attributes) logline = lu.LOG_FMT.format(id=session_id, message=msg) From 1a408439a6b8855346e5ca2c645dee6ab1ce8c0a Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 22 Feb 2022 15:40:33 +0200 Subject: [PATCH 179/288] Release version 8.0.1 Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 12 ++++++++++++ setup.py | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 5b192d1fa..621e3e9a0 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 8.0.0 +current_version = 8.0.1 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index acea1c0dd..bbdade9d7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## 8.0.1 (2022-02-22) + +- Reinitialize state if error occurs while loading state +- VirtualCoFrontend: Expose metadata endpoint and fix duplicate entity ids with multiple backends +- saml-backend: Allow request micro-services to affect the authn-context-class-ref +- saml-backend: Keep the last authority from the authenticating authority list +- minor fixes to the Apple and GitHub backends +- micro_services: example config for attribute_policy +- deps: bump minimum pyop version to 3.3.1 +- docs: fixes for example files and config options + + ## 8.0.0 (2021-09-07) This is a breaking release, if you were using the openid_connect frontend. To diff --git a/setup.py b/setup.py index 12ffacf72..175b97b29 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='8.0.0', + version='8.0.1', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', From 52dce96454fd83f359ad8524c524e5d261e44dca Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Tue, 22 Feb 2022 16:14:14 +0100 Subject: [PATCH 180/288] Update example like suggested in the Pull Request --- .../plugins/microservices/ldap_attribute_store.yaml.example | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 77be74e44..033737924 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -85,8 +85,10 @@ config: ldap_identifier_attribute: uid # Override the contructed search_filter with ldap_identifier_attribute - # with an own filter. This allows more komplex queries. + # with an own filter. This allows more complex queries. # {0} will be injected with the ordered_identifier_candidates. + # For example: + # search_filter: "(&(uid={0})(isMemberOf=authorized))" search_filter: None # Whether to clear values for attributes incoming From c96990027c12268cb5e2b82e8c4204f65c0ccc0f Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 12 Apr 2022 21:46:35 +0300 Subject: [PATCH 181/288] Orcid family-name is optional Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/orcid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/backends/orcid.py b/src/satosa/backends/orcid.py index aaa18b7e5..6ad69fe96 100644 --- a/src/satosa/backends/orcid.py +++ b/src/satosa/backends/orcid.py @@ -99,6 +99,6 @@ def user_information(self, access_token, orcid, name): mail=' '.join([e['email'] for e in emails]), name=name, givenname=r['name']['given-names']['value'], - surname=r['name']['family-name']['value'], + surname=(r['name']['family-name'] or {}).get('value'), ) return ret From 250a6e72be2ee6aa593f29f2794f313b1ea2346d Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Thu, 13 May 2021 14:00:57 +0200 Subject: [PATCH 182/288] OIDC frontend: support Redis and session expiration Support all storage backends from recent pyop. Add automatic expiration TTL for the different collections so that the session databases does not grow without bounds. The default TTL values were copied from pyop's current defaults. TODO: add pyop version requirement once there is an official release. Signed-off-by: Ivan Kanakarakis --- doc/README.md | 8 ++--- src/satosa/frontends/openid_connect.py | 41 +++++++++++++++++++++----- 2 files changed, 38 insertions(+), 11 deletions(-) diff --git a/doc/README.md b/doc/README.md index 047cda51c..f4b907ec7 100644 --- a/doc/README.md +++ b/doc/README.md @@ -459,14 +459,14 @@ Connect Relying Parties (RPs). The default configuration file can be found [here](../example/plugins/frontends/openid_connect_frontend.yaml.example). As opposed to the other plugins, this plugin is NOT stateless (due to the nature of OpenID Connect using any other -flow than "Implicit Flow"). However, the frontend supports using a MongoDB instance as its backend storage, so as long +flow than "Implicit Flow"). However, the frontend supports using a MongoDB or Redis instance as its backend storage, so as long that's reachable from all machines it should not be a problem. The configuration parameters available: * `signing_key_path`: path to a RSA Private Key file (PKCS#1). MUST be configured. -* `db_uri`: connection URI to MongoDB instance where the data will be persisted, if it's not specified all data will only +* `db_uri`: connection URI to MongoDB or Redis instance where the data will be persisted, if it's not specified all data will only be stored in-memory (not suitable for production use). -* `client_db_uri`: connection URI to MongoDB instance where the client data will be persistent, if it's not specified the clients list will be received from the `client_db_path`. +* `client_db_uri`: connection URI to MongoDB or Redis instance where the client data will be persistent, if it's not specified the clients list will be received from the `client_db_path`. * `client_db_path`: path to a file containing the client database in json format. It will only be used if `client_db_uri` is not set. If `client_db_uri` and `client_db_path` are not set, clients will only be stored in-memory (not suitable for production use). * `sub_hash_salt`: salt which is hashed into the `sub` claim. If it's not specified, SATOSA will generate a random salt on each startup, which means that users will get new `sub` value after every restart. * `provider`: provider configuration information. MUST be configured, the following configuration are supported: @@ -474,7 +474,7 @@ The configuration parameters available: * `subject_types_supported` (default: `[pairwise]`): list of all supported subject identifier types, see [Section 8 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes) * `scopes_supported` (default: `[openid]`): list of all supported scopes, see [Section 5.4 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#ScopeClaims) * `client_registration_supported` (default: `No`): boolean whether [dynamic client registration is supported](https://openid.net/specs/openid-connect-registration-1_0.html). - If dynamic client registration is not supported all clients must exist in the MongoDB instance configured by the `db_uri` in the `"clients"` collection of the `"satosa"` database. + If dynamic client registration is not supported all clients must exist in the MongoDB or Redis instance configured by the `db_uri` in the `"clients"` collection of the `"satosa"` database. The registration info must be stored using the client id as a key, and use the parameter names of a [OIDC Registration Response](https://openid.net/specs/openid-connect-registration-1_0.html#RegistrationResponse). * `authorization_code_lifetime`: how long authorization codes should be valid, see [default](https://github.com/IdentityPython/pyop#token-lifetimes) * `access_token_lifetime`: how long access tokens should be valid, see [default](https://github.com/IdentityPython/pyop#token-lifetimes) diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index 8bd1319f7..af96a8215 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -16,7 +16,7 @@ from pyop.exceptions import (InvalidAuthenticationRequest, InvalidClientRegistrationRequest, InvalidClientAuthentication, OAuthError, BearerTokenError, InvalidAccessToken) from pyop.provider import Provider -from pyop.storage import MongoWrapper +from pyop.storage import StorageBase from pyop.subject_identifier import HashBasedSubjectIdentifierFactory from pyop.userinfo import Userinfo from pyop.util import should_fragment_encode @@ -81,13 +81,22 @@ def _create_provider(self, endpoint_baseurl): client_db_uri = self.config.get("client_db_uri") cdb_file = self.config.get("client_db_path") if client_db_uri: - cdb = MongoWrapper(client_db_uri, "satosa", "clients") + cdb = StorageBase.from_uri( + client_db_uri, db_name="satosa", collection="clients" + ) elif cdb_file: with open(cdb_file) as f: cdb = json.loads(f.read()) else: cdb = {} - self.user_db = MongoWrapper(db_uri, "satosa", "authz_codes") if db_uri else {} + + #XXX What is the correct ttl for user_db? Is it the same as authz_code_db? + self.user_db = ( + StorageBase.from_uri(db_uri, db_name="satosa", collection="authz_codes") + if db_uri + else {} + ) + self.provider = Provider( self.signing_key, capabilities, @@ -102,10 +111,28 @@ def _init_authorization_state(self): sub_hash_salt = self.config.get("sub_hash_salt", rndstr(16)) db_uri = self.config.get("db_uri") if db_uri: - authz_code_db = MongoWrapper(db_uri, "satosa", "authz_codes") - access_token_db = MongoWrapper(db_uri, "satosa", "access_tokens") - refresh_token_db = MongoWrapper(db_uri, "satosa", "refresh_tokens") - sub_db = MongoWrapper(db_uri, "satosa", "subject_identifiers") + authz_code_db = StorageBase.from_uri( + db_uri, + db_name="satosa", + collection="authz_codes", + ttl=self.config["provider"].get("authorization_code_lifetime", 600), + ) + access_token_db = StorageBase.from_uri( + db_uri, + db_name="satosa", + collection="access_tokens", + ttl=self.config["provider"].get("access_token_lifetime", 3600), + ) + refresh_token_db = StorageBase.from_uri( + db_uri, + db_name="satosa", + collection="refresh_tokens", + ttl=self.config["provider"].get("refresh_token_lifetime", None), + ) + #XXX what is the correct TTL for sub_db? + sub_db = StorageBase.from_uri( + db_uri, db_name="satosa", collection="subject_identifiers" + ) else: authz_code_db = None access_token_db = None From 3c8d95f9c21949008a9a4e93fa1cb324f813a3a6 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 18 Apr 2022 16:31:48 +0300 Subject: [PATCH 183/288] Restructure initialization Signed-off-by: Ivan Kanakarakis --- src/satosa/frontends/openid_connect.py | 281 +++++++++++++++---------- 1 file changed, 167 insertions(+), 114 deletions(-) diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index af96a8215..f144b43e2 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -40,49 +40,35 @@ class OpenIDConnectFrontend(FrontendModule): """ def __init__(self, auth_req_callback_func, internal_attributes, conf, base_url, name): - self._validate_config(conf) + _validate_config(conf) super().__init__(auth_req_callback_func, internal_attributes, base_url, name) self.config = conf - self.signing_key = RSAKey(key=rsa_load(conf["signing_key_path"]), use="sig", alg="RS256", - kid=conf.get("signing_key_id", "")) - - def _create_provider(self, endpoint_baseurl): - response_types_supported = self.config["provider"].get("response_types_supported", ["id_token"]) - subject_types_supported = self.config["provider"].get("subject_types_supported", ["pairwise"]) - scopes_supported = self.config["provider"].get("scopes_supported", ["openid"]) - extra_scopes = self.config["provider"].get("extra_scopes") - capabilities = { - "issuer": self.base_url, - "authorization_endpoint": "{}/{}".format(endpoint_baseurl, AuthorizationEndpoint.url), - "jwks_uri": "{}/jwks".format(endpoint_baseurl), - "response_types_supported": response_types_supported, - "id_token_signing_alg_values_supported": [self.signing_key.alg], - "response_modes_supported": ["fragment", "query"], - "subject_types_supported": subject_types_supported, - "claim_types_supported": ["normal"], - "claims_parameter_supported": True, - "claims_supported": [attribute_map["openid"][0] - for attribute_map in self.internal_attributes["attributes"].values() - if "openid" in attribute_map], - "request_parameter_supported": False, - "request_uri_parameter_supported": False, - "scopes_supported": scopes_supported - } - - if 'code' in response_types_supported: - capabilities["token_endpoint"] = "{}/{}".format(endpoint_baseurl, TokenEndpoint.url) - - if self.config["provider"].get("client_registration_supported", False): - capabilities["registration_endpoint"] = "{}/{}".format(endpoint_baseurl, RegistrationEndpoint.url) - - authz_state = self._init_authorization_state() + provider_config = self.config["provider"] + provider_config["issuer"] = base_url + + self.signing_key = RSAKey( + key=rsa_load(self.config["signing_key_path"]), + use="sig", + alg="RS256", + kid=self.config.get("signing_key_id", ""), + ) + db_uri = self.config.get("db_uri") + self.user_db = ( + StorageBase.from_uri(db_uri, db_name="satosa", collection="authz_codes") + if db_uri + else {} + ) + + sub_hash_salt = self.config.get("sub_hash_salt", rndstr(16)) + authz_state = _init_authorization_state(provider_config, db_uri, sub_hash_salt) + client_db_uri = self.config.get("client_db_uri") cdb_file = self.config.get("client_db_path") if client_db_uri: cdb = StorageBase.from_uri( - client_db_uri, db_name="satosa", collection="clients" + client_db_uri, db_name="satosa", collection="clients", ttl=None ) elif cdb_file: with open(cdb_file) as f: @@ -90,63 +76,17 @@ def _create_provider(self, endpoint_baseurl): else: cdb = {} - #XXX What is the correct ttl for user_db? Is it the same as authz_code_db? - self.user_db = ( - StorageBase.from_uri(db_uri, db_name="satosa", collection="authz_codes") - if db_uri - else {} - ) - - self.provider = Provider( + self.endpoint_baseurl = "{}/{}".format(self.base_url, self.name) + self.provider = _create_provider( + provider_config, + self.endpoint_baseurl, + self.internal_attributes, self.signing_key, - capabilities, authz_state, + self.user_db, cdb, - Userinfo(self.user_db), - extra_scopes=extra_scopes, - id_token_lifetime=self.config["provider"].get("id_token_lifetime", 3600), ) - def _init_authorization_state(self): - sub_hash_salt = self.config.get("sub_hash_salt", rndstr(16)) - db_uri = self.config.get("db_uri") - if db_uri: - authz_code_db = StorageBase.from_uri( - db_uri, - db_name="satosa", - collection="authz_codes", - ttl=self.config["provider"].get("authorization_code_lifetime", 600), - ) - access_token_db = StorageBase.from_uri( - db_uri, - db_name="satosa", - collection="access_tokens", - ttl=self.config["provider"].get("access_token_lifetime", 3600), - ) - refresh_token_db = StorageBase.from_uri( - db_uri, - db_name="satosa", - collection="refresh_tokens", - ttl=self.config["provider"].get("refresh_token_lifetime", None), - ) - #XXX what is the correct TTL for sub_db? - sub_db = StorageBase.from_uri( - db_uri, db_name="satosa", collection="subject_identifiers" - ) - else: - authz_code_db = None - access_token_db = None - refresh_token_db = None - sub_db = None - - token_lifetimes = {k: self.config["provider"][k] for k in ["authorization_code_lifetime", - "access_token_lifetime", - "refresh_token_lifetime", - "refresh_token_threshold"] - if k in self.config["provider"]} - return AuthorizationState(HashBasedSubjectIdentifierFactory(sub_hash_salt), authz_code_db, access_token_db, - refresh_token_db, sub_db, **token_lifetimes) - def _get_extra_id_token_claims(self, user_id, client_id): if "extra_id_token_claims" in self.config["provider"]: config = self.config["provider"]["extra_id_token_claims"].get(client_id, []) @@ -223,9 +163,6 @@ def register_endpoints(self, backend_names): else: backend_name = backend_names[0] - endpoint_baseurl = "{}/{}".format(self.base_url, self.name) - self._create_provider(endpoint_baseurl) - provider_config = ("^.well-known/openid-configuration$", self.provider_config) jwks_uri = ("^{}/jwks$".format(self.name), self.jwks) @@ -236,42 +173,36 @@ def register_endpoints(self, backend_names): auth_path = urlparse(auth_endpoint).path.lstrip("/") else: auth_path = "{}/{}".format(self.name, AuthorizationEndpoint.url) + authentication = ("^{}$".format(auth_path), self.handle_authn_request) url_map = [provider_config, jwks_uri, authentication] if any("code" in v for v in self.provider.configuration_information["response_types_supported"]): - self.provider.configuration_information["token_endpoint"] = "{}/{}".format(endpoint_baseurl, - TokenEndpoint.url) - token_endpoint = ("^{}/{}".format(self.name, TokenEndpoint.url), self.token_endpoint) + self.provider.configuration_information["token_endpoint"] = "{}/{}".format( + self.endpoint_baseurl, TokenEndpoint.url + ) + token_endpoint = ( + "^{}/{}".format(self.name, TokenEndpoint.url), self.token_endpoint + ) url_map.append(token_endpoint) - self.provider.configuration_information["userinfo_endpoint"] = "{}/{}".format(endpoint_baseurl, - UserinfoEndpoint.url) - userinfo_endpoint = ("^{}/{}".format(self.name, UserinfoEndpoint.url), self.userinfo_endpoint) + self.provider.configuration_information["userinfo_endpoint"] = ( + "{}/{}".format(self.endpoint_baseurl, UserinfoEndpoint.url) + ) + userinfo_endpoint = ( + "^{}/{}".format(self.name, UserinfoEndpoint.url), self.userinfo_endpoint + ) url_map.append(userinfo_endpoint) + if "registration_endpoint" in self.provider.configuration_information: - client_registration = ("^{}/{}".format(self.name, RegistrationEndpoint.url), self.client_registration) + client_registration = ( + "^{}/{}".format(self.name, RegistrationEndpoint.url), + self.client_registration, + ) url_map.append(client_registration) return url_map - def _validate_config(self, config): - """ - Validates that all necessary config parameters are specified. - :type config: dict[str, dict[str, Any] | str] - :param config: the module config - """ - if config is None: - raise ValueError("OIDCFrontend conf can't be 'None'.") - - for k in {"signing_key_path", "provider"}: - if k not in config: - raise ValueError("Missing configuration parameter '{}' for OpenID Connect frontend.".format(k)) - - if "signing_key_id" in config and type(config["signing_key_id"]) is not str: - raise ValueError( - "The configuration parameter 'signing_key_id' is not defined as a string for OpenID Connect frontend.") - def _get_authn_request_from_state(self, state): """ Extract the clietns request stoed in the SATOSA state. @@ -438,6 +369,128 @@ def userinfo_endpoint(self, context): return response +def _validate_config(config): + """ + Validates that all necessary config parameters are specified. + :type config: dict[str, dict[str, Any] | str] + :param config: the module config + """ + if config is None: + raise ValueError("OIDCFrontend configuration can't be 'None'.") + + for k in {"signing_key_path", "provider"}: + if k not in config: + raise ValueError("Missing configuration parameter '{}' for OpenID Connect frontend.".format(k)) + + if "signing_key_id" in config and type(config["signing_key_id"]) is not str: + raise ValueError( + "The configuration parameter 'signing_key_id' is not defined as a string for OpenID Connect frontend.") + + +def _create_provider( + provider_config, + endpoint_baseurl, + internal_attributes, + signing_key, + authz_state, + user_db, + cdb, +): + response_types_supported = provider_config.get("response_types_supported", ["id_token"]) + subject_types_supported = provider_config.get("subject_types_supported", ["pairwise"]) + scopes_supported = provider_config.get("scopes_supported", ["openid"]) + extra_scopes = provider_config.get("extra_scopes") + capabilities = { + "issuer": provider_config["issuer"], + "authorization_endpoint": "{}/{}".format(endpoint_baseurl, AuthorizationEndpoint.url), + "jwks_uri": "{}/jwks".format(endpoint_baseurl), + "response_types_supported": response_types_supported, + "id_token_signing_alg_values_supported": [signing_key.alg], + "response_modes_supported": ["fragment", "query"], + "subject_types_supported": subject_types_supported, + "claim_types_supported": ["normal"], + "claims_parameter_supported": True, + "claims_supported": [ + attribute_map["openid"][0] + for attribute_map in internal_attributes["attributes"].values() + if "openid" in attribute_map + ], + "request_parameter_supported": False, + "request_uri_parameter_supported": False, + "scopes_supported": scopes_supported + } + + if 'code' in response_types_supported: + capabilities["token_endpoint"] = "{}/{}".format( + endpoint_baseurl, TokenEndpoint.url + ) + + if provider_config.get("client_registration_supported", False): + capabilities["registration_endpoint"] = "{}/{}".format( + endpoint_baseurl, RegistrationEndpoint.url + ) + + provider = Provider( + signing_key, + capabilities, + authz_state, + cdb, + Userinfo(user_db), + extra_scopes=extra_scopes, + id_token_lifetime=provider_config.get("id_token_lifetime", 3600), + ) + return provider + + +def _init_authorization_state(provider_config, db_uri, sub_hash_salt): + if db_uri: + authz_code_db = StorageBase.from_uri( + db_uri, + db_name="satosa", + collection="authz_codes", + ttl=provider_config.get("authorization_code_lifetime", 600), + ) + access_token_db = StorageBase.from_uri( + db_uri, + db_name="satosa", + collection="access_tokens", + ttl=provider_config.get("access_token_lifetime", 3600), + ) + refresh_token_db = StorageBase.from_uri( + db_uri, + db_name="satosa", + collection="refresh_tokens", + ttl=provider_config.get("refresh_token_lifetime", None), + ) + sub_db = StorageBase.from_uri( + db_uri, db_name="satosa", collection="subject_identifiers", ttl=None + ) + else: + authz_code_db = None + access_token_db = None + refresh_token_db = None + sub_db = None + + token_lifetimes = { + k: provider_config[k] + for k in [ + "authorization_code_lifetime", + "access_token_lifetime", + "refresh_token_lifetime", + "refresh_token_threshold", + ] + if k in provider_config + } + return AuthorizationState( + HashBasedSubjectIdentifierFactory(sub_hash_salt), + authz_code_db, + access_token_db, + refresh_token_db, + sub_db, + **token_lifetimes, + ) + + def combine_return_input(values): return values From bece3e259eaee34b9cd926bfd73f0f62d1718753 Mon Sep 17 00:00:00 2001 From: Ali Haider Date: Wed, 22 Dec 2021 07:45:21 +0100 Subject: [PATCH 184/288] Support stateless code flow Signed-off-by: Ivan Kanakarakis --- .../openid_connect_frontend.yaml.example | 30 ++- src/satosa/frontends/openid_connect.py | 28 ++- tests/flows/test_oidc-saml.py | 171 ++++++++++++++++++ 3 files changed, 220 insertions(+), 9 deletions(-) diff --git a/example/plugins/frontends/openid_connect_frontend.yaml.example b/example/plugins/frontends/openid_connect_frontend.yaml.example index 6c74b2d4c..d7a5584d8 100644 --- a/example/plugins/frontends/openid_connect_frontend.yaml.example +++ b/example/plugins/frontends/openid_connect_frontend.yaml.example @@ -3,9 +3,35 @@ name: OIDC config: signing_key_path: frontend.key signing_key_id: frontend.key1 - db_uri: mongodb://db.example.com # optional: only support MongoDB, will default to in-memory storage if not specified + + # Defines the database connection URI for the databases: + # - authz_code_db + # - access_token_db + # - refresh_token_db + # - sub_db + # - user_db + # + # supported storage backends: + # - In-memory dictionary + # - MongoDB (e.g. mongodb://db.example.com) + # - Redis (e.g. redis://example/0) + # - Stateless (eg. stateless://user:encryptionkey?alg=aes256) + # + # This configuration is optional. + # By default, the in-memory storage is used. + db_uri: mongodb://db.example.com + + # Where to store clients. + # + # If client_db_uri is set, the database connection is used. + # Otherwise, if client_db_path is set, the JSON file is used. + # By default, an in-memory dictionary is used. + client_db_uri: mongodb://db.example.com client_db_path: /path/to/your/cdb.json - sub_hash_salt: randomSALTvalue # if not specified, it is randomly generated on every startup + + # if not specified, it is randomly generated on every startup + sub_hash_salt: randomSALTvalue + provider: client_registration_supported: Yes response_types_supported: ["code", "id_token token"] diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index f144b43e2..d4069aec0 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -1,20 +1,32 @@ """ A OpenID Connect frontend module for the satosa proxy """ + import json import logging from collections import defaultdict from urllib.parse import urlencode, urlparse from jwkest.jwk import rsa_load, RSAKey + from oic.oic import scope2claims -from oic.oic.message import (AuthorizationRequest, AuthorizationErrorResponse, TokenErrorResponse, - UserInfoErrorResponse) -from oic.oic.provider import RegistrationEndpoint, AuthorizationEndpoint, TokenEndpoint, UserinfoEndpoint +from oic.oic.message import AuthorizationRequest +from oic.oic.message import AuthorizationErrorResponse +from oic.oic.message import TokenErrorResponse +from oic.oic.message import UserInfoErrorResponse +from oic.oic.provider import RegistrationEndpoint +from oic.oic.provider import AuthorizationEndpoint +from oic.oic.provider import TokenEndpoint +from oic.oic.provider import UserinfoEndpoint + from pyop.access_token import AccessToken from pyop.authz_state import AuthorizationState -from pyop.exceptions import (InvalidAuthenticationRequest, InvalidClientRegistrationRequest, - InvalidClientAuthentication, OAuthError, BearerTokenError, InvalidAccessToken) +from pyop.exceptions import InvalidAuthenticationRequest +from pyop.exceptions import InvalidClientRegistrationRequest +from pyop.exceptions import InvalidClientAuthentication +from pyop.exceptions import OAuthError +from pyop.exceptions import BearerTokenError +from pyop.exceptions import InvalidAccessToken from pyop.provider import Provider from pyop.storage import StorageBase from pyop.subject_identifier import HashBasedSubjectIdentifierFactory @@ -57,7 +69,7 @@ def __init__(self, auth_req_callback_func, internal_attributes, conf, base_url, db_uri = self.config.get("db_uri") self.user_db = ( StorageBase.from_uri(db_uri, db_name="satosa", collection="authz_codes") - if db_uri + if db_uri and not StorageBase.type(db_uri) == "stateless" else {} ) @@ -108,7 +120,9 @@ def handle_authn_response(self, context, internal_resp): claims = self.converter.from_internal("openid", internal_resp.attributes) # Filter unset claims claims = {k: v for k, v in claims.items() if v} - self.user_db[internal_resp.subject_id] = dict(combine_claim_values(claims.items())) + self.user_db[internal_resp.subject_id] = dict( + combine_claim_values(claims.items()) + ) auth_resp = self.provider.authorize( auth_req, internal_resp.subject_id, diff --git a/tests/flows/test_oidc-saml.py b/tests/flows/test_oidc-saml.py index c70ba5c8b..257a8f7c9 100644 --- a/tests/flows/test_oidc-saml.py +++ b/tests/flows/test_oidc-saml.py @@ -1,4 +1,6 @@ +import os import json +import base64 from urllib.parse import urlparse, urlencode, parse_qsl import pytest @@ -20,8 +22,27 @@ CLIENT_ID = "client1" +CLIENT_SECRET = "secret" +CLIENT_REDIRECT_URI = "https://client.example.com/cb" REDIRECT_URI = "https://client.example.com/cb" +@pytest.fixture(scope="session") +def client_db_path(tmpdir_factory): + tmpdir = str(tmpdir_factory.getbasetemp()) + path = os.path.join(tmpdir, "cdb.json") + cdb_json = { + CLIENT_ID: { + "response_types": ["id_token", "code"], + "redirect_uris": [ + CLIENT_REDIRECT_URI + ], + "client_secret": CLIENT_SECRET + } + } + with open(path, "w") as f: + f.write(json.dumps(cdb_json)) + + return path @pytest.fixture def oidc_frontend_config(signing_key_path, mongodb_instance): @@ -47,6 +68,25 @@ def oidc_frontend_config(signing_key_path, mongodb_instance): return data +@pytest.fixture +def oidc_stateless_frontend_config(signing_key_path, client_db_path): + data = { + "module": "satosa.frontends.openid_connect.OpenIDConnectFrontend", + "name": "OIDCFrontend", + "config": { + "issuer": "https://proxy-op.example.com", + "signing_key_path": signing_key_path, + "client_db_path": client_db_path, + "db_uri": "stateless://user:abc123@localhost", + "provider": { + "response_types_supported": ["id_token", "code"] + } + } + } + + return data + + class TestOIDCToSAML: def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_config, idp_conf): subject_id = "testuser1" @@ -105,3 +145,134 @@ def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_ (name, values) in id_token_claims.items() for name, values in OIDC_USERS[subject_id].items() ) + + def test_full_stateless_id_token_flow(self, satosa_config_dict, oidc_stateless_frontend_config, saml_backend_config, idp_conf): + subject_id = "testuser1" + + # proxy config + satosa_config_dict["FRONTEND_MODULES"] = [oidc_stateless_frontend_config] + satosa_config_dict["BACKEND_MODULES"] = [saml_backend_config] + satosa_config_dict["INTERNAL_ATTRIBUTES"]["attributes"] = {attr_name: {"openid": [attr_name], + "saml": [attr_name]} + for attr_name in USERS[subject_id]} + _, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) + + # application + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) + + # get frontend OP config info + provider_config = json.loads(test_client.get("/.well-known/openid-configuration").data.decode("utf-8")) + + # create auth req + claims_request = ClaimsRequest(id_token=Claims(**{k: None for k in USERS[subject_id]})) + req_args = {"scope": "openid", "response_type": "id_token", "client_id": CLIENT_ID, + "redirect_uri": REDIRECT_URI, "nonce": "nonce", + "claims": claims_request.to_json()} + auth_req = urlparse(provider_config["authorization_endpoint"]).path + "?" + urlencode(req_args) + + # make auth req to proxy + proxied_auth_req = test_client.get(auth_req) + assert proxied_auth_req.status == "303 See Other" + + # config test IdP + backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0]) + idp_conf["metadata"]["inline"].append(backend_metadata_str) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) + + # create auth resp + req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query)) + url, authn_resp = fakeidp.handle_auth_req( + req_params["SAMLRequest"], + req_params["RelayState"], + BINDING_HTTP_REDIRECT, + subject_id, + response_binding=BINDING_HTTP_REDIRECT) + + # make auth resp to proxy + authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp) + authn_resp = test_client.get(authn_resp_req) + assert authn_resp.status == "303 See Other" + + # verify auth resp from proxy + resp_dict = dict(parse_qsl(urlparse(authn_resp.data.decode("utf-8")).fragment)) + signing_key = RSAKey(key=rsa_load(oidc_stateless_frontend_config["config"]["signing_key_path"]), + use="sig", alg="RS256") + id_token_claims = JWS().verify_compact(resp_dict["id_token"], keys=[signing_key]) + + assert all( + (name, values) in id_token_claims.items() + for name, values in OIDC_USERS[subject_id].items() + ) + + def test_full_stateless_code_flow(self, satosa_config_dict, oidc_stateless_frontend_config, saml_backend_config, idp_conf): + subject_id = "testuser1" + + # proxy config + satosa_config_dict["FRONTEND_MODULES"] = [oidc_stateless_frontend_config] + satosa_config_dict["BACKEND_MODULES"] = [saml_backend_config] + satosa_config_dict["INTERNAL_ATTRIBUTES"]["attributes"] = {attr_name: {"openid": [attr_name], + "saml": [attr_name]} + for attr_name in USERS[subject_id]} + _, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) + + # application + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) + + # get frontend OP config info + provider_config = json.loads(test_client.get("/.well-known/openid-configuration").data.decode("utf-8")) + + # create auth req + claims_request = ClaimsRequest(id_token=Claims(**{k: None for k in USERS[subject_id]})) + req_args = {"scope": "openid", "response_type": "code", "client_id": CLIENT_ID, + "redirect_uri": REDIRECT_URI, "nonce": "nonce", + "claims": claims_request.to_json()} + auth_req = urlparse(provider_config["authorization_endpoint"]).path + "?" + urlencode(req_args) + + # make auth req to proxy + proxied_auth_req = test_client.get(auth_req) + assert proxied_auth_req.status == "303 See Other" + + # config test IdP + backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0]) + idp_conf["metadata"]["inline"].append(backend_metadata_str) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) + + # create auth resp + req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query)) + url, authn_resp = fakeidp.handle_auth_req( + req_params["SAMLRequest"], + req_params["RelayState"], + BINDING_HTTP_REDIRECT, + subject_id, + response_binding=BINDING_HTTP_REDIRECT) + + # make auth resp to proxy + authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp) + authn_resp = test_client.get(authn_resp_req) + assert authn_resp.status == "303 See Other" + + resp_dict = dict(parse_qsl(urlparse(authn_resp.data.decode("utf-8")).query)) + code = resp_dict["code"] + client_id_secret_str = CLIENT_ID + ":" + CLIENT_SECRET + auth_header = "Basic %s" % base64.b64encode(client_id_secret_str.encode()).decode() + + authn_resp = test_client.post(provider_config["token_endpoint"], + data={ + "code": code, + "grant_type": "authorization_code", + "redirect_uri": CLIENT_REDIRECT_URI + }, + headers={'Authorization': auth_header}) + + assert authn_resp.status == "200 OK" + + # verify auth resp from proxy + resp_dict = json.loads(authn_resp.data.decode("utf-8")) + signing_key = RSAKey(key=rsa_load(oidc_stateless_frontend_config["config"]["signing_key_path"]), + use="sig", alg="RS256") + id_token_claims = JWS().verify_compact(resp_dict["id_token"], keys=[signing_key]) + + assert all( + (name, values) in id_token_claims.items() + for name, values in OIDC_USERS[subject_id].items() + ) From b903d97fe16d8cf5787b35c42349b0fc9111d2df Mon Sep 17 00:00:00 2001 From: Ali Haider Date: Wed, 20 Apr 2022 13:16:43 +0500 Subject: [PATCH 185/288] Remove user entry from the user_db in the case of stateless flow --- src/satosa/frontends/openid_connect.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index d4069aec0..c2787ea5c 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -67,9 +67,10 @@ def __init__(self, auth_req_callback_func, internal_attributes, conf, base_url, ) db_uri = self.config.get("db_uri") + self.stateless = StorageBase.type(db_uri) == "stateless" self.user_db = ( StorageBase.from_uri(db_uri, db_name="satosa", collection="authz_codes") - if db_uri and not StorageBase.type(db_uri) == "stateless" + if db_uri and not self.stateless else {} ) @@ -130,6 +131,9 @@ def handle_authn_response(self, context, internal_resp): self._get_extra_id_token_claims(user_id, client_id), ) + if self.stateless: + del self.user_db[internal_resp.subject_id] + del context.state[self.name] http_response = auth_resp.request(auth_req["redirect_uri"], should_fragment_encode(auth_req)) return SeeOther(http_response) From eb0ba8ddc4da9480396be72bf387927d74843ae0 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 6 May 2022 18:46:01 +0300 Subject: [PATCH 186/288] Update documentation Signed-off-by: Ivan Kanakarakis --- README.md | 103 ++++++++++++++++----------- doc/README.md | 193 +++++++++++++++++++++++++++++--------------------- 2 files changed, 175 insertions(+), 121 deletions(-) diff --git a/README.md b/README.md index 044091a86..a1f251318 100644 --- a/README.md +++ b/README.md @@ -1,63 +1,84 @@ # SATOSA -[![Build Status](https://travis-ci.org/IdentityPython/SATOSA.svg?branch=travis)](https://travis-ci.org/IdentityPython/SATOSA) + [![PyPI](https://img.shields.io/pypi/v/SATOSA.svg)](https://pypi.python.org/pypi/SATOSA) -A configurable proxy for translating between different authentication protocols such as SAML2, -OpenID Connect and OAuth2. +A configurable proxy for translating between different authentication protocols +such as SAML2, OpenID Connect and OAuth2. + # Table of Contents - [Installation](doc/README.md#installation) - - [Docker](doc/README.md#docker) - - [Manual installation](doc/README.md#manual_installation) - - [Dependencies](doc/README.md#dependencies) - - [Instructions](doc/README.md#install_instructions) - - [External micro-services](doc/README.md#install_external) + - [Docker](doc/README.md#docker) + - [Manual installation](doc/README.md#manual-installation) + - [Dependencies](doc/README.md#dependencies) + - [Instructions](doc/README.md#instructions) - [Configuration](doc/README.md#configuration) - - [SATOSA proxy configuration: proxy_conf.yaml.example](doc/README.md#proxy_conf) - - [Additional services](doc/README.md#additional_service) - - [Attribute mapping configuration: internal_attributes.yaml](doc/README.md#attr_map) - - [attributes](doc/README.md#attributes) - - [user_id_from_attrs](doc/README.md#user_id_from_attrs) - - [user_id_to_attr](doc/README.md#user_id_to_attr) + - [SATOSA proxy configuration: proxy_conf.yaml.example](doc/README.md#satosa-proxy-configuration-proxy_confyamlexample) + - [Attribute mapping configuration: internal_attributes.yaml](doc/README.md#attribute-mapping-configuration-internal_attributesyaml) + - [attributes](doc/README.md#attributes) + - [user_id_from_attrs](doc/README.md#user_id_from_attrs) + - [user_id_to_attr](doc/README.md#user_id_to_attr) - [Plugins](doc/README.md#plugins) - - [SAML2 plugins](doc/README.md#saml_plugin) - - [Metadata](doc/README.md#metadata) - - [Frontend](doc/README.md#saml_frontend) - - [Backend](doc/README.md#saml_backend) - - [Name ID Format](doc/README.md#name_id) - - [OpenID Connect plugins](doc/README.md#openid_plugin) - - [Backend](doc/README.md#openid_backend) - - [Frontend](doc/README.md#openid_frontend) - - [Social login plugins](doc/README.md#social_plugins) - - [Google](doc/README.md#google) - - [Facebook](doc/README.md#facebook) -- [Generating proxy metadata](doc/README.md#saml_proxy_metadata) -- [Running the proxy application](doc/README.md#run) + - [SAML2 plugins](doc/README.md#saml2-plugins) + - [Metadata](doc/README.md#metadata) + - [AuthnContextClassRef](doc/README.md#providing-authncontextclassref) + - [Frontend](doc/README.md#saml2-frontend) + - [Custom attribute release](doc/README.md#custom-attribute-release) + - [Policy](doc/README.md#policy) + - [Backend](doc/README.md#saml2-backend) + - [Name ID Format](doc/README.md#name-id-format) + - [Discovery service](doc/README.md#use-a-discovery-service) + - [ForceAuthn option](doc/README.md#mirror-the-saml-forceauthn-option) + - [Memorize IdP](doc/README.md#memorize-the-idp-selected-through-the-discovery-service) + - [OpenID Connect plugins](doc/README.md#openid-connect-plugins) + - [Frontend](doc/README.md#oidc-frontend) + - [Backend](doc/README.md#oidc-backend) + - [Social login plugins](doc/README.md#social-login-plugins) + - [Google](doc/README.md#google) + - [Facebook](doc/README.md#facebook) + - [Dummy adapters](doc/README.md#dummy-adapters) + - [Micro-services](doc/README.md#micro-services) +- [Generating proxy metadata](doc/README.md#generate-proxy-metadata) +- [Running the proxy application](doc/README.md#running-the-proxy-application) +- [External contributions](doc/README.md#external-contributions) # Use cases + In this section a set of use cases for the proxy is presented. + ## SAML2<->SAML2 -There are SAML2 service providers for example Box which is not able to handle multiple identity -providers. For more information about how to set up, configure and run such a proxy instance -please visit [Single Service Provider<->Multiple Identity providers](doc/one-to-many.md) -If an identity provider can not communicate with service providers in for example a federation the -can convert request and make the communication possible. +There are SAML2 service providers for example Box which is not able to handle +multiple identity providers. For more information about how to set up, +configure and run such a proxy instance please visit [Single Service +Provider<->Multiple Identity providers](doc/one-to-many.md) + +If an identity provider can not communicate with service providers in for +example a federation the can convert request and make the communication +possible. + ## SAML2<->Social logins -This setup makes it possible to connect a SAML2 service provider to multiple social media identity -providers such as Google and Facebook. The proxy makes it possible to mirror a identity provider by -generating SAML2 metadata corresponding that provider and create dynamic endpoint which -are connected to a single identity provider. -For more information about how to set up, configure and run such a proxy instance please visit -[SAML2<->Social logins](doc/SAML2-to-Social_logins.md) + +This setup makes it possible to connect a SAML2 service provider to multiple +social media identity providers such as Google and Facebook. The proxy makes it +possible to mirror a identity provider by generating SAML2 metadata +corresponding that provider and create dynamic endpoint which are connected to +a single identity provider. + +For more information about how to set up, configure and run such a proxy +instance please read [SAML2<->Social logins](doc/SAML2-to-Social_logins.md) + ## SAML2<->OIDC -The proxy is able to act as a proxy between a SAML2 service provider and a OpenID connect provider -[SAML2<->OIDC](doc/saml2-to-oidc.md) + +The proxy is able to act as a proxy between a SAML2 service provider and a +OpenID connect provider [SAML2<->OIDC](doc/saml2-to-oidc.md) # Contact -If you have any questions regarding operations/deployment of SATOSA please use the satosa-users [mailing list](https://lists.sunet.se/listinfo/satosa-users). + +If you have any questions regarding operations/deployment of SATOSA please use +the satosa-users [mailing list](https://lists.sunet.se/listinfo/satosa-users). diff --git a/doc/README.md b/doc/README.md index f4b907ec7..bf6e56099 100644 --- a/doc/README.md +++ b/doc/README.md @@ -4,14 +4,14 @@ This document describes how to install and configure the SATOSA proxy. # Installation -## Docker +## Docker A pre-built Docker image is accessible at the [Docker Hub](https://hub.docker.com/r/satosa/), and is the recommended ways of running the proxy. -## Manual installation +## Manual installation -### Dependencies +### Dependencies SATOSA requires Python 3.4 (or above), and the following packages on Ubuntu: @@ -19,7 +19,7 @@ SATOSA requires Python 3.4 (or above), and the following packages on Ubuntu: apt-get install libffi-dev libssl-dev xmlsec1 ```` -### Instructions +### Instructions 1. Download the SATOSA proxy project as a [compressed archive](https://github.com/IdentityPython/SATOSA/releases) and unpack it to ``. @@ -32,29 +32,6 @@ apt-get install libffi-dev libssl-dev xmlsec1 Alternatively the application can be installed directly from PyPI (`pip install satosa`), or the [Docker image](https://hub.docker.com/r/satosa/) can be used. -### External micro-services - -Micro-services act like plugins and can be developed by anyone. Other people -that have been working with the SaToSa proxy, have built extentions mainly in -the form of additional micro-services that can be shared and used by anyone. - -DAASI International have been a long-time user of this software and have made -their extentions available, licensed under Apache2.0. You can find the -extensions using the following URL: - -- https://gitlab.daasi.de/didmos2/didmos2-auth/-/tree/master/src/didmos_oidc/satosa/micro_services - -The extentions include: - -- SCIM attribute store to fetch attributes via SCIM API (instead of LDAP) -- Authoritzation module for blocking services if necessary group memberships or - attributes are missing in the identity (for service providers that do not - evaluate attributes themselves) -- Backend chooser with Django UI for letting the user choose between any - existing SATOSA backend -- Integration of MFA via PrivacyIDEA - -and more. # Configuration @@ -97,7 +74,7 @@ value for `bind_password` will be `secret_password`. bind_password: !ENVFILE LDAP_BIND_PASSWORD_FILE ``` -## SATOSA proxy configuration: `proxy_conf.yaml.example` +## SATOSA proxy configuration: `proxy_conf.yaml.example` | Parameter name | Data type | Example value | Description | | -------------- | --------- | ------------- | ----------- | @@ -112,7 +89,7 @@ bind_password: !ENVFILE LDAP_BIND_PASSWORD_FILE | `MICRO_SERVICES` | string[] | `[statistics_service.yaml]` | list of plugin configuration file paths, describing enabled microservices | | `LOGGING` | dict | see [Python logging.conf](https://docs.python.org/3/library/logging.config.html) | optional configuration of application logging | -## Attribute mapping configuration: `internal_attributes.yaml` +## Attribute mapping configuration: `internal_attributes.yaml` ### attributes @@ -185,19 +162,19 @@ When the [ALService](https://github.com/its-dirg/ALservice) is used for account linking, the `user_id_to_attr` configuration parameter should be set, since that service will overwrite the subject identifier generated by the proxy. -## Plugins +# Plugins The authentication protocol specific communication is handled by different plugins, divided into frontends (receiving requests from clients) and backends (sending requests to target providers). -### Common plugin configuration parameters +## Common plugin configuration parameters Both `name` and `module` must be specified in all plugin configurations (frontends, backends, and micro services). The `name` must be unique to ensure correct functionality, and the `module` must be the fully qualified name of an importable Python module. -### SAML2 plugins +## SAML2 plugins Common configuration parameters: @@ -212,7 +189,7 @@ Common configuration parameters: | `entityid_endpoint` | bool | `true` | whether `entityid` should be used as a URL that serves the metadata xml document | `acr_mapping` | dict | `None` | custom Authentication Context Class Reference -#### Metadata +### Metadata The metadata could be loaded in multiple ways in the table above it's loaded from a static file by using the key "local". It's also possible to load read the metadata from a remote URL. @@ -235,7 +212,7 @@ For more detailed information on how you could customize the SAML entities, see the [documentation of the underlying library pysaml2](https://github.com/rohe/pysaml2/blob/master/docs/howto/config.rst). -#### Providing `AuthnContextClassRef` +### Providing `AuthnContextClassRef` SAML2 frontends and backends can provide a custom (configurable) *Authentication Context Class Reference*. For the frontend this is defined in the `AuthnStatement` of the authentication response, while, @@ -264,7 +241,7 @@ config: "https://accounts.google.com": LoA1 ``` -#### Frontend +### SAML2 Frontend The SAML2 frontend act as a SAML Identity Provider (IdP), accepting authentication requests from SAML Service Providers (SP). The default @@ -301,7 +278,7 @@ An example configuration can be found [here](../example/plugins/frontends/saml2_ `SP -> Virtual CO SAMLFrontend -> SAMLBackend -> optional discovery service -> target IdP` -##### Custom attribute release +#### Custom attribute release In addition to respecting for example entity categories from the SAML metadata, the SAML frontend can also further restrict the attribute release with the `custom_attribute_release` configuration parameter based on the SP entity id. @@ -332,7 +309,7 @@ config: exclude: ["givenName"] ``` -##### Policy +#### Policy Some settings related to how a SAML response is formed can be overriden on a per-instance or a per-SP basis. This example summarizes the most common settings (hopefully self-explanatory) with their defaults: @@ -354,13 +331,13 @@ Overrides per SP entityID is possible by using the entityID as a key instead of in the yaml structure. The most specific key takes presedence. If no policy overrides are provided the defaults above are used. -#### Backend +### SAML2 Backend The SAML2 backend act as a SAML Service Provider (SP), making authentication requests to SAML Identity Providers (IdP). The default configuration file can be found [here](../example/plugins/backends/saml2_backend.yaml.example). -##### Name ID Format +#### Name ID Format The SAML backend can indicate which *Name ID* format it wants by specifying the key `name_id_format` in the SP entity configuration in the backend plugin configuration: @@ -373,7 +350,7 @@ The SAML backend can indicate which *Name ID* format it wants by specifying the name_id_format: urn:oasis:names:tc:SAML:2.0:nameid-format:transient ``` -##### Use a discovery service +#### Use a discovery service To allow the user to choose which target provider they want to authenticate with, the configuration parameter `disco_srv`, must be specified if the metadata given to the backend module contains more than one IdP: @@ -384,7 +361,7 @@ config: sp_config: [...] ``` -##### Mirror the SAML ForceAuthn option +#### Mirror the SAML ForceAuthn option By default when the SAML frontend receives a SAML authentication request with `ForceAuthn` set to `True`, this information is not mirrored in the SAML @@ -402,7 +379,7 @@ config: [...] ``` -##### Memorize the IdP selected through the discovery service +#### Memorize the IdP selected through the discovery service In the classic flow, the user is asked to select their home organization to authenticate to. The `memorize_idp` configuration option controls whether @@ -437,22 +414,9 @@ config: [...] ``` -### OpenID Connect plugins - -#### Backend - -The OpenID Connect backend acts as an OpenID Connect Relying Party (RP), making -authentication requests to OpenID Connect Provider (OP). The default -configuration file can be found [here](../example/plugins/backends/openid_backend.yaml.example). - -The example configuration assumes the OP supports [discovery](http://openid.net/specs/openid-connect-discovery-1_0.html) -and [dynamic client registration](https://openid.net/specs/openid-connect-registration-1_0.html). -When using an OP that only supports statically registered clients, see the -[default configuration for using Google as the OP](../example/plugins/backends/google_backend.yaml.example) -and make sure to provide the redirect URI, constructed as described in the -section about Google configuration below, in the static registration. +## OpenID Connect plugins -#### Frontend +### OIDC Frontend The OpenID Connect frontend acts as and OpenID Connect Provider (OP), accepting requests from OpenID Connect Relying Parties (RPs). The default configuration file can be found @@ -484,7 +448,20 @@ The configuration parameters available: The other parameters should be left with their default values. -### Social login plugins +### OIDC Backend + +The OpenID Connect backend acts as an OpenID Connect Relying Party (RP), making +authentication requests to OpenID Connect Provider (OP). The default +configuration file can be found [here](../example/plugins/backends/openid_backend.yaml.example). + +The example configuration assumes the OP supports [discovery](http://openid.net/specs/openid-connect-discovery-1_0.html) +and [dynamic client registration](https://openid.net/specs/openid-connect-registration-1_0.html). +When using an OP that only supports statically registered clients, see the +[default configuration for using Google as the OP](../example/plugins/backends/google_backend.yaml.example) +and make sure to provide the redirect URI, constructed as described in the +section about Google configuration below, in the static registration. + +### Social login plugins The social login plugins can be used as backends for the proxy, allowing the proxy to act as a client to the social login services. @@ -537,6 +514,9 @@ for information on how to obtain them. A list of all user attributes released by Facebook can be found [here](https://developers.facebook.com/docs/graph-api/reference/v2.5/user), which should be used when configuring the attribute mapping (see above). + +## Dummy adapters + ### Ping frontend for simple heartbeat monitoring The ping frontend responds to a query with a simple @@ -544,15 +524,17 @@ The ping frontend responds to a query with a simple for example by a load balancer. The default configuration file can be found [here](../example/plugins/frontends/ping_frontend.yaml.example). -### Micro services -Additional behaviour can be configured in the proxy through so called *micro services*. There are two different types -of micro services: *request micro services* which are applied to the incoming request, and *response micro services* -which are applied to the incoming response from the target provider. +## Micro-services + +Additional behaviour can be configured in the proxy through so called *micro +services*. There are two different types of micro services: *request micro +services* which are applied to the incoming request, and *response micro +services* which are applied to the incoming response from the target provider. The following micro services are bundled with SATOSA. -#### Adding static attributes to all responses +### Adding static attributes to all responses To add a set of static attributes, use the `AddStaticAttributes` class which will add pre-configured (static) attributes, see the @@ -567,7 +549,7 @@ country: Sweden where the keys are the internal attribute names defined in `internal_attributes.yaml`. -#### Filtering attribute values +### Filtering attribute values Attribute values delivered from the target provider can be filtered based on a per target provider per requester basis using the `FilterAttributeValues` class. See the [example configuration](../example/plugins/microservices/filter_attributes.yaml.example). @@ -584,7 +566,7 @@ where the empty string (`""`) can be used as a key on any level to describe a de The filters are applied such that all attribute values matched by the regular expression are preserved, while any non-matching attribute values will be discarded. -##### Examples +#### Examples Filter attributes from the target provider `https://provider.example.com`, to only preserve values starting with the string `"foo:bar"`: @@ -612,7 +594,7 @@ the string `"foo:bar"`: "attr1": "foo:bar" ``` -#### Apply a Attribute Policy +### Apply an Attribute Policy Attributes delivered from the target provider can be filtered based on a list of allowed attributes per requester using the `AttributePolicy` class: @@ -625,25 +607,25 @@ attribute_policy: - attr2 ``` -#### Route to a specific backend based on the requester +### Route to a specific backend based on the requester To choose which backend (essentially choosing target provider) to use based on the requester, use the `DecideBackendByRequester` class which implements that special routing behavior. See the [example configuration](../example/plugins/microservices/requester_based_routing.yaml.example). -#### Route to a specific backend based on the target entity id +### Route to a specific backend based on the target entity id Use the `DecideBackendByTargetIssuer` class which implements that special routing behavior. See the [example configuration](../example/plugins/microservices/target_based_routing.yaml.example). -#### Route to a specific backend based on the discovery service response +### Route to a specific backend based on the discovery service response If a Discovery Service is in use and a target entity id is selected by users, you may want to use the `DiscoToTargetIssuer` class together with `DecideBackendByTargetIssuer` to be able to select a backend (essentially choosing target provider) based on the response from the discovery service. See the [example configuration](../example/plugins/microservices/disco_to_target_issuer.yaml.example). -#### Filter authentication requests to target SAML entities +### Filter authentication requests to target SAML entities If using the `SAMLMirrorFrontend` module and some of the target providers should support some additional SP's, the `DecideIfRequesterIsAllowed` micro service can be used. It provides a rules mechanism to describe which SP's are @@ -683,7 +665,7 @@ rules: deny: ["requester1"] ``` -#### Account linking +### Account linking To allow account linking (multiple accounts at possibly different target providers are linked together as belonging to the same user), an external service can be used. See the [example config](../example/plugins/microservices/account_linking.yaml.example) @@ -693,16 +675,17 @@ the same REST API). This micro service must be the first in the list of configured micro services in the `proxy_conf.yaml` to ensure correct functionality. -#### User consent management +### User consent management -To handle user consent of released information, an external service can be used. See the [example config](../example/plugins/microservices/consent.yaml.example) -which is intended to work with the [CMService](https://github.com/its-dirg/CMservice) (or any other service providing -the same REST API). +To handle user consent of released information, an external service can be +used. See the [example config](../example/plugins/microservices/consent.yaml.example) +which is intended to work with the [CMService](https://github.com/its-dirg/CMservice) +(or any other service providing the same RESTish API). -This micro service must be the last in the list of configured micro services in the `proxy_conf.yaml` to ensure -correct functionality. +This micro service must be the last in the list of configured micro services in +the `proxy_conf.yaml` to ensure correct functionality. -#### LDAP attribute store +### LDAP attribute store An identifier such as eduPersonPrincipalName asserted by an IdP can be used to look up a person record in an LDAP directory to find attributes to assert about the authenticated user to the SP. The identifier @@ -712,7 +695,7 @@ persistent NameID may also be obtained from attributes returned from the LDAP di LDAP microservice install the extra necessary dependencies with `pip install satosa[ldap]` and then see the [example config](../example/plugins/microservices/ldap_attribute_store.yaml.example). -#### Support for IdP Hinting +### Support for IdP Hinting It's possible to hint an IdP to SaToSa using the `IdpHinting` micro-service. @@ -734,7 +717,8 @@ methods: * Request micro services must inherit `satosa.micro_services.base.RequestMicroService`. * Response micro services must inherit `satosa.micro_services.base.ResponseMicroService`. -# Generate proxy metadata + +# Generate proxy metadata The proxy metadata is generated based on the front-/backend plugins listed in `proxy_conf.yaml` using the `satosa-saml-metadata` (installed globally by SATOSA installation). @@ -747,6 +731,7 @@ satosa-saml-metadata Date: Fri, 6 May 2022 19:13:41 +0300 Subject: [PATCH 187/288] Release version 8.1.0 ## 8.1.0 (2022-05-06) - OIDC frontend: support stateless code flow - OIDC frontend: support Redis and session expiration - orcid backend: allow family-name to be optional - docs: add references to external contributions - docs: update structure Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 9 +++++++++ setup.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 621e3e9a0..98fcf1b31 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 8.0.1 +current_version = 8.1.0 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index bbdade9d7..6eef4a532 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## 8.1.0 (2022-05-06) + +- OIDC frontend: support stateless code flow +- OIDC frontend: support Redis and session expiration +- orcid backend: allow family-name to be optional +- docs: add references to external contributions +- docs: update structure + + ## 8.0.1 (2022-02-22) - Reinitialize state if error occurs while loading state diff --git a/setup.py b/setup.py index 175b97b29..c1718cba9 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='8.0.1', + version='8.1.0', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', From 02367a3a87eb01ae58463ef98ce5e84ce03b38fe Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 31 May 2022 14:29:46 +0300 Subject: [PATCH 188/288] Set minimum pyop version to v3.4.0 to ensure the needed methods are available Signed-off-by: Ivan Kanakarakis --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c1718cba9..ca750f0d3 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ packages=find_packages('src/'), package_dir={'': 'src'}, install_requires=[ - "pyop >= 3.3.1", + "pyop >= v3.4.0", "pysaml2 >= 6.5.1", "pycryptodomex", "requests", From 043300c554de4d5a4f7cff8b9dc5010c71707616 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Mon, 20 Jun 2022 12:41:48 +0200 Subject: [PATCH 189/288] docs: fix internal_attributes.xml example ORCID attributes have different names (coming from OrcidBackend) --- example/internal_attributes.yaml.example | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/example/internal_attributes.yaml.example b/example/internal_attributes.yaml.example index 02e1a131e..a1c5dff9a 100644 --- a/example/internal_attributes.yaml.example +++ b/example/internal_attributes.yaml.example @@ -1,43 +1,43 @@ attributes: address: openid: [address.street_address] - orcid: [addresses.str] + orcid: [address] saml: [postaladdress] displayname: openid: [nickname] - orcid: [name.credit-name] + orcid: [displayname] github: [login] saml: [displayName] edupersontargetedid: facebook: [id] linkedin: [id] - orcid: [orcid] + orcid: [edupersontargetedid] github: [id] openid: [sub] saml: [eduPersonTargetedID] givenname: facebook: [first_name] linkedin: [email-address] - orcid: [name.given-names.value] + orcid: [givenname] openid: [given_name] saml: [givenName] mail: facebook: [email] linkedin: [email-address] - orcid: [emails.str] + orcid: [mail] github: [email] openid: [email] saml: [email, emailAddress, mail] name: facebook: [name] - orcid: [name.credit-name] + orcid: [name] github: [name] openid: [name] saml: [cn] surname: facebook: [last_name] linkedin: [lastName] - orcid: [name.family-name.value] + orcid: [surname] openid: [family_name] saml: [sn, surname] user_id_from_attrs: [edupersontargetedid] From 1dc9565d68d2d909c58921225c8a2adce069120d Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 23 Jun 2022 01:41:35 +0300 Subject: [PATCH 190/288] Release version 8.1.1 ## 8.1.1 (2022-06-23) - OIDC frontend: Set minimum pyop version to v3.4.0 to ensure the needed methods are available - docs: Fix orcid mapping in example internal_attributes Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 6 ++++++ setup.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 98fcf1b31..bb573d655 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 8.1.0 +current_version = 8.1.1 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index 6eef4a532..264005e65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 8.1.1 (2022-06-23) + +- OIDC frontend: Set minimum pyop version to v3.4.0 to ensure the needed methods are available +- docs: Fix orcid mapping in example internal_attributes + + ## 8.1.0 (2022-05-06) - OIDC frontend: support stateless code flow diff --git a/setup.py b/setup.py index ca750f0d3..4e4f9f0d1 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='8.1.0', + version='8.1.1', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', From 6a4a83bfef8a92f7e1a14dc717af5e427cfd410a Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 1 Jul 2022 16:41:27 +0300 Subject: [PATCH 191/288] Fix mailing list link Prefer idpy-discuss as the central point to have discussions. Signed-off-by: Ivan Kanakarakis --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a1f251318..112d9459b 100644 --- a/README.md +++ b/README.md @@ -81,4 +81,4 @@ OpenID connect provider [SAML2<->OIDC](doc/saml2-to-oidc.md) # Contact If you have any questions regarding operations/deployment of SATOSA please use -the satosa-users [mailing list](https://lists.sunet.se/listinfo/satosa-users). +the satosa-users [mailing list](https://lists.sunet.se/postorius/lists/idpy-discuss.lists.sunet.se/). From 385cc0987e8b0bd9c9bde57a05c38e2efc71a62c Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Fri, 12 Aug 2022 01:03:14 +0300 Subject: [PATCH 192/288] chore: Remove optional args to create_metadata_string Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 5 +++-- src/satosa/frontends/saml2.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index d50a93fb7..b8310aea7 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -452,8 +452,9 @@ def _metadata_endpoint(self, context): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - metadata_string = create_metadata_string(None, self.sp.config, 4, None, None, None, None, - None).decode("utf-8") + metadata_string = create_metadata_string( + configfile=None, config=self.sp.config, valid=4 + ).decode("utf-8") return Response(metadata_string, content="text/xml") def register_endpoints(self): diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index b481b5d25..4dcc40833 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -486,8 +486,9 @@ def _metadata_endpoint(self, context): msg = "Sending metadata response for entityId = {}".format(self.idp.config.entityid) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - metadata_string = create_metadata_string(None, self.idp.config, 4, None, None, None, None, - None).decode("utf-8") + metadata_string = create_metadata_string( + configfile=None, config=self.idp.config, valid=4 + ).decode("utf-8") return Response(metadata_string, content="text/xml") def _reload_metadata(self, context): From 6219d21b850988d06d7fa3eea13cc1a2de9f7b92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Fri, 19 Aug 2022 11:42:37 +0200 Subject: [PATCH 193/288] fix: name is optional in ORCID backend previously backend threw KeyError for users with empty name --- src/satosa/backends/orcid.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/satosa/backends/orcid.py b/src/satosa/backends/orcid.py index 6ad69fe96..d0ceee9b9 100644 --- a/src/satosa/backends/orcid.py +++ b/src/satosa/backends/orcid.py @@ -73,7 +73,7 @@ def _authn_response(self, context): request_args=rargs, state=aresp['state']) user_info = self.user_information( - atresp['access_token'], atresp['orcid'], atresp['name']) + atresp['access_token'], atresp['orcid'], atresp.get('name')) internal_response = InternalData( auth_info=self.auth_info(context.request)) internal_response.attributes = self.converter.to_internal( @@ -82,7 +82,7 @@ def _authn_response(self, context): del context.state[self.name] return self.auth_callback_func(context, internal_response) - def user_information(self, access_token, orcid, name): + def user_information(self, access_token, orcid, name=None): base_url = self.config['server_info']['user_info'] url = urljoin(base_url, '{}/person'.format(orcid)) headers = { @@ -92,13 +92,15 @@ def user_information(self, access_token, orcid, name): r = requests.get(url, headers=headers) r = r.json() emails, addresses = r['emails']['email'], r['addresses']['address'] + rname = r.get('name') or {} ret = dict( address=', '.join([e['country']['value'] for e in addresses]), displayname=name, edupersontargetedid=orcid, orcid=orcid, mail=' '.join([e['email'] for e in emails]), name=name, - givenname=r['name']['given-names']['value'], - surname=(r['name']['family-name'] or {}).get('value'), + givenname=(rname.get('given-names') or {}).get('value'), + surname=(rname.get('family-name') or {}).get('value'), ) + return ret From 2e3dcf8a8a7ba11fa8db293ff97bc207990bbc7f Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Thu, 25 Aug 2022 15:50:03 +0200 Subject: [PATCH 194/288] oidc_frontend: mirror public subject Add `sub_mirror_subject` configuration parameter. If this is set to true, the subject received from the backend will be mirrored to the client, if public sub is used. To maintain backwards compatibility, the default value is false. MirrorPublicSubjectIdentifierFactory would normally belong to pyop, but in order to keep the code and the configuration in the same place, this code overloads pyop's HashBasedSubjectIdentifierFactory. --- doc/README.md | 1 + src/satosa/frontends/openid_connect.py | 22 ++++++++++++++++--- tests/satosa/frontends/test_openid_connect.py | 20 +++++++++++++++++ 3 files changed, 40 insertions(+), 3 deletions(-) diff --git a/doc/README.md b/doc/README.md index bf6e56099..cc23d27cf 100644 --- a/doc/README.md +++ b/doc/README.md @@ -433,6 +433,7 @@ The configuration parameters available: * `client_db_uri`: connection URI to MongoDB or Redis instance where the client data will be persistent, if it's not specified the clients list will be received from the `client_db_path`. * `client_db_path`: path to a file containing the client database in json format. It will only be used if `client_db_uri` is not set. If `client_db_uri` and `client_db_path` are not set, clients will only be stored in-memory (not suitable for production use). * `sub_hash_salt`: salt which is hashed into the `sub` claim. If it's not specified, SATOSA will generate a random salt on each startup, which means that users will get new `sub` value after every restart. +* `sub_mirror_subject` (default: `No`): if this is set to `Yes` and SATOSA releases a public `sub` claim to the client, then the subject identifier received from the backend will be mirrored to the client. The default is to hash the public subject identifier with `sub_hash_salt`. Pairwise `sub` claims are always hashed. * `provider`: provider configuration information. MUST be configured, the following configuration are supported: * `response_types_supported` (default: `[id_token]`): list of all supported response types, see [Section 3 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#Authentication). * `subject_types_supported` (default: `[pairwise]`): list of all supported subject identifier types, see [Section 8 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes) diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index c2787ea5c..40b6730d1 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -46,6 +46,11 @@ logger = logging.getLogger(__name__) +class MirrorPublicSubjectIdentifierFactory(HashBasedSubjectIdentifierFactory): + def create_public_identifier(self, user_id): + return user_id + + class OpenIDConnectFrontend(FrontendModule): """ A OpenID Connect frontend module @@ -75,7 +80,10 @@ def __init__(self, auth_req_callback_func, internal_attributes, conf, base_url, ) sub_hash_salt = self.config.get("sub_hash_salt", rndstr(16)) - authz_state = _init_authorization_state(provider_config, db_uri, sub_hash_salt) + mirror_public = self.config.get("sub_mirror_public", False) + authz_state = _init_authorization_state( + provider_config, db_uri, sub_hash_salt, mirror_public + ) client_db_uri = self.config.get("client_db_uri") cdb_file = self.config.get("client_db_path") @@ -460,7 +468,9 @@ def _create_provider( return provider -def _init_authorization_state(provider_config, db_uri, sub_hash_salt): +def _init_authorization_state( + provider_config, db_uri, sub_hash_salt, mirror_public=False +): if db_uri: authz_code_db = StorageBase.from_uri( db_uri, @@ -499,8 +509,14 @@ def _init_authorization_state(provider_config, db_uri, sub_hash_salt): ] if k in provider_config } + + subject_id_factory = ( + MirrorPublicSubjectIdentifierFactory(sub_hash_salt) + if mirror_public + else HashBasedSubjectIdentifierFactory(sub_hash_salt) + ) return AuthorizationState( - HashBasedSubjectIdentifierFactory(sub_hash_salt), + subject_id_factory, authz_code_db, access_token_db, refresh_token_db, diff --git a/tests/satosa/frontends/test_openid_connect.py b/tests/satosa/frontends/test_openid_connect.py index cb322e680..3fad27e82 100644 --- a/tests/satosa/frontends/test_openid_connect.py +++ b/tests/satosa/frontends/test_openid_connect.py @@ -402,6 +402,26 @@ def test_register_endpoints_dynamic_client_registration_is_configurable( provider_info = ProviderConfigurationResponse().deserialize(frontend.provider_config(None).message, "json") assert ("registration_endpoint" in provider_info) == client_registration_enabled + @pytest.mark.parametrize("sub_mirror_public", [ + True, + False + ]) + def test_mirrored_subject(self, context, frontend_config, authn_req, sub_mirror_public): + frontend_config["sub_mirror_public"] = sub_mirror_public + frontend_config["provider"]["subject_types_supported"] = ["public"] + frontend = self.create_frontend(frontend_config) + + self.insert_client_in_client_db(frontend, authn_req["redirect_uri"]) + internal_response = self.setup_for_authn_response(context, frontend, authn_req) + http_resp = frontend.handle_authn_response(context, internal_response) + + resp = AuthorizationResponse().deserialize(urlparse(http_resp.message).fragment) + id_token = IdToken().from_jwt(resp["id_token"], key=[frontend.signing_key]) + if sub_mirror_public: + assert id_token["sub"] == OIDC_USERS["testuser1"]["eduPersonTargetedID"][0] + else: + assert id_token["sub"] != OIDC_USERS["testuser1"]["eduPersonTargetedID"][0] + def test_token_endpoint(self, context, frontend_config, authn_req): token_lifetime = 60 * 60 * 24 frontend_config["provider"]["access_token_lifetime"] = token_lifetime From 630ebfa429cd8e1398b8b07ddf4a47df9c0c6880 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Mon, 5 Sep 2022 12:25:42 +0200 Subject: [PATCH 195/288] feat: get name of user in Apple backend Apple sends the name only via POST in the first authentication (ever) --- src/satosa/backends/apple.py | 107 ++++++++++++++++++++++++----------- 1 file changed, 74 insertions(+), 33 deletions(-) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index 633e22c19..fd6d90534 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -76,10 +76,7 @@ def start_auth(self, context, request_info): """ oidc_nonce = rndstr() oidc_state = rndstr() - state_data = { - NONCE_KEY: oidc_nonce, - STATE_KEY: oidc_state - } + state_data = {NONCE_KEY: oidc_nonce, STATE_KEY: oidc_state} context.state[self.name] = state_data args = { @@ -88,7 +85,7 @@ def start_auth(self, context, request_info): "client_id": self.client.client_id, "redirect_uri": self.client.registration_response["redirect_uris"][0], "state": oidc_state, - "nonce": oidc_nonce + "nonce": oidc_nonce, } args.update(self.config["client"]["auth_req_params"]) auth_req = self.client.construct_AuthorizationRequest(request_args=args) @@ -104,7 +101,9 @@ def register_endpoints(self): :return: A list that can be used to map the request to SATOSA to this endpoint. """ url_map = [] - redirect_path = urlparse(self.config["client"]["client_metadata"]["redirect_uris"][0]).path + redirect_path = urlparse( + self.config["client"]["client_metadata"]["redirect_uris"][0] + ).path if not redirect_path: raise SATOSAError("Missing path in redirect uri") @@ -122,10 +121,16 @@ def _verify_nonce(self, nonce, context): """ backend_state = context.state[self.name] if nonce != backend_state[NONCE_KEY]: - msg = "Missing or invalid nonce in authn response for state: {}".format(backend_state) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + msg = "Missing or invalid nonce in authn response for state: {}".format( + backend_state + ) + logline = lu.LOG_FMT.format( + id=lu.get_session_id(context.state), message=msg + ) logger.debug(logline) - raise SATOSAAuthenticationError(context.state, "Missing or invalid nonce in authn response") + raise SATOSAAuthenticationError( + context.state, "Missing or invalid nonce in authn response" + ) def _get_tokens(self, authn_response, context): """ @@ -142,14 +147,14 @@ def _get_tokens(self, authn_response, context): "client_secret": self.client.client_secret, "code": authn_response["code"], "grant_type": "authorization_code", - "redirect_uri": self.client.registration_response['redirect_uris'][0], + "redirect_uri": self.client.registration_response["redirect_uris"][0], } token_resp = requests.post( "https://appleid.apple.com/auth/token", data=args, - headers={"Content-Type": "application/x-www-form-urlencoded"} - ).json() + headers={"Content-Type": "application/x-www-form-urlencoded"}, + ).json() logger.debug("apple response received") logger.debug(token_resp) @@ -157,7 +162,9 @@ def _get_tokens(self, authn_response, context): self._check_error_response(token_resp, context) keyjar = self.client.keyjar - id_token_claims = dict(Message().from_jwt(token_resp["id_token"], keyjar=keyjar)) + id_token_claims = dict( + Message().from_jwt(token_resp["id_token"], keyjar=keyjar) + ) return token_resp["access_token"], id_token_claims @@ -176,7 +183,9 @@ def _check_error_response(self, response, context): error=response["error"], description=response.get("error_description", ""), ) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format( + id=lu.get_session_id(context.state), message=msg + ) logger.debug(logline) raise SATOSAAuthenticationError(context.state, "Access denied") @@ -192,24 +201,49 @@ def response_endpoint(self, context, *args): :return: """ backend_state = context.state[self.name] - authn_resp = self.client.parse_response(AuthorizationResponse, info=context.request, sformat="dict") + + # Apple sends some user information only via POST in the first request + if "user" in context.request: + userinfo = json.load(context.request["user"]) + userinfo["name"] = " ".join( + filter( + None, + [ + userinfo.get("firstName", ""), + userinfo.get("middleName", ""), + userinfo.get("lastName", ""), + ], + ) + ) + else: + # Apple has no userinfo endpoint + userinfo = {} + + authn_resp = self.client.parse_response( + AuthorizationResponse, info=context.request, sformat="dict" + ) if backend_state[STATE_KEY] != authn_resp["state"]: - msg = "Missing or invalid state in authn response for state: {}".format(backend_state) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + msg = "Missing or invalid state in authn response for state: {}".format( + backend_state + ) + logline = lu.LOG_FMT.format( + id=lu.get_session_id(context.state), message=msg + ) logger.debug(logline) - raise SATOSAAuthenticationError(context.state, "Missing or invalid state in authn response") + raise SATOSAAuthenticationError( + context.state, "Missing or invalid state in authn response" + ) self._check_error_response(authn_resp, context) access_token, id_token_claims = self._get_tokens(authn_resp, context) if not id_token_claims: id_token_claims = {} - # Apple has no userinfo endpoint - userinfo = {} - if not id_token_claims and not userinfo: msg = "No id_token or userinfo, nothing to do.." - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logline = lu.LOG_FMT.format( + id=lu.get_session_id(context.state), message=msg + ) logger.error(logline) raise SATOSAAuthenticationError(context.state, "No user info available.") @@ -218,7 +252,9 @@ def response_endpoint(self, context, *args): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) del context.state[self.name] - internal_resp = self._translate_response(all_user_claims, self.client.authorization_endpoint) + internal_resp = self._translate_response( + all_user_claims, self.client.authorization_endpoint + ) return self.auth_callback_func(context, internal_resp) def _translate_response(self, response, issuer): @@ -245,7 +281,9 @@ def get_metadata_desc(self): See satosa.backends.oauth.get_metadata_desc :rtype: satosa.metadata_creation.description.MetadataDescription """ - return get_metadata_desc_for_oauth_backend(self.config["provider_metadata"]["issuer"], self.config) + return get_metadata_desc_for_oauth_backend( + self.config["provider_metadata"]["issuer"], self.config + ) def _create_client(provider_metadata, client_metadata, verify_ssl=True): @@ -258,15 +296,15 @@ def _create_client(provider_metadata, client_metadata, verify_ssl=True): :return: client instance to use for communicating with the configured provider :rtype: oic.oic.Client """ - client = oic.Client( - client_authn_method=CLIENT_AUTHN_METHOD, verify_ssl=verify_ssl - ) + client = oic.Client(client_authn_method=CLIENT_AUTHN_METHOD, verify_ssl=verify_ssl) # Provider configuration information if "authorization_endpoint" in provider_metadata: # no dynamic discovery necessary - client.handle_provider_config(ProviderConfigurationResponse(**provider_metadata), - provider_metadata["issuer"]) + client.handle_provider_config( + ProviderConfigurationResponse(**provider_metadata), + provider_metadata["issuer"], + ) else: # do dynamic discovery client.provider_config(provider_metadata["issuer"]) @@ -277,9 +315,12 @@ def _create_client(provider_metadata, client_metadata, verify_ssl=True): client.store_registration_info(RegistrationRequest(**client_metadata)) else: # do dynamic registration - client.register(client.provider_info['registration_endpoint'], - **client_metadata) + client.register( + client.provider_info["registration_endpoint"], **client_metadata + ) - client.subject_type = (client.registration_response.get("subject_type") or - client.provider_info["subject_types_supported"][0]) + client.subject_type = ( + client.registration_response.get("subject_type") + or client.provider_info["subject_types_supported"][0] + ) return client From 253a15f35fbd0c3e84f116fee84bceb16d4e1599 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Mon, 5 Sep 2022 17:07:08 +0200 Subject: [PATCH 196/288] Update apple.py --- src/satosa/backends/apple.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index fd6d90534..61b0351e4 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -203,19 +203,11 @@ def response_endpoint(self, context, *args): backend_state = context.state[self.name] # Apple sends some user information only via POST in the first request - if "user" in context.request: - userinfo = json.load(context.request["user"]) - userinfo["name"] = " ".join( - filter( - None, - [ - userinfo.get("firstName", ""), - userinfo.get("middleName", ""), - userinfo.get("lastName", ""), - ], - ) - ) - else: + # https://developer.apple.com/documentation/sign_in_with_apple/sign_in_with_apple_rest_api/authenticating_users_with_sign_in_with_apple + try: + userdata = context.request.get("user", "{}") + userinfo = json.load(userdata) + except Exception as e: # Apple has no userinfo endpoint userinfo = {} From 58421c86e5a92aee6f0b8ed5273e70bac9c01a3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Mon, 5 Sep 2022 22:21:09 +0200 Subject: [PATCH 197/288] Update src/satosa/backends/apple.py Co-authored-by: Ivan Kanakarakis --- src/satosa/backends/apple.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index 61b0351e4..d7b21da46 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -202,13 +202,16 @@ def response_endpoint(self, context, *args): """ backend_state = context.state[self.name] - # Apple sends some user information only via POST in the first request - # https://developer.apple.com/documentation/sign_in_with_apple/sign_in_with_apple_rest_api/authenticating_users_with_sign_in_with_apple + # Apple has no userinfo endpoint + # but may send some user information via POST in the first request. + # + # References: + # - https://developer.apple.com/documentation/sign_in_with_apple/sign_in_with_apple_rest_api/authenticating_users_with_sign_in_with_apple + # - https://developer.apple.com/documentation/sign_in_with_apple/namei try: userdata = context.request.get("user", "{}") userinfo = json.load(userdata) except Exception as e: - # Apple has no userinfo endpoint userinfo = {} authn_resp = self.client.parse_response( From 128b8e2355d2c1ed446a11b0642c4f63c39b78ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Wed, 5 Oct 2022 11:07:24 +0200 Subject: [PATCH 198/288] feat: is_passive option for SAML backend allows sending IsPassive to SAML IdP --- src/satosa/backends/saml2.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index b8310aea7..b6d0d8910 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -84,6 +84,7 @@ class SAMLBackend(BackendModule, SAMLBaseModule): KEY_SP_CONFIG = 'sp_config' KEY_SEND_REQUESTER_ID = 'send_requester_id' KEY_MIRROR_FORCE_AUTHN = 'mirror_force_authn' + KEY_IS_PASSIVE = 'is_passive' KEY_MEMORIZE_IDP = 'memorize_idp' KEY_USE_MEMORIZED_IDP_WHEN_FORCE_AUTHN = 'use_memorized_idp_when_force_authn' @@ -284,6 +285,8 @@ def authn_request(self, context, entity_id): if self.config.get(SAMLBackend.KEY_SEND_REQUESTER_ID): requester = context.state.state_dict[STATE_KEY_BASE]['requester'] kwargs["scoping"] = Scoping(requester_id=[RequesterID(text=requester)]) + if self.config.get(SAMLBackend.KEY_IS_PASSIVE): + kwargs["is_passive"] = "true" try: acs_endp, response_binding = self.sp.config.getattr("endpoints", "sp")["assertion_consumer_service"][0] From 1576878773e3279d990956d3ef75064ff346ba55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Thu, 13 Oct 2022 13:28:10 +0200 Subject: [PATCH 199/288] docs: correct attribute_generation.yaml.example --- example/plugins/microservices/attribute_generation.yaml.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example/plugins/microservices/attribute_generation.yaml.example b/example/plugins/microservices/attribute_generation.yaml.example index a1c65c91b..45f5b269f 100644 --- a/example/plugins/microservices/attribute_generation.yaml.example +++ b/example/plugins/microservices/attribute_generation.yaml.example @@ -7,5 +7,5 @@ config: eduPersonAffiliation: member;employee default: default: - schacHomeOrganization: {{eduPersonPrincipalName.scope}} + schacHomeOrganization: "{{eduPersonPrincipalName.scope}}" schacHomeOrganizationType: tomfoolery provider From 1220c310e38ccc86309dc4f39bdc49a8acaa48ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Sun, 16 Oct 2022 10:29:59 +0200 Subject: [PATCH 200/288] fix(attribute_generation): run mustach only on strings --- src/satosa/micro_services/attribute_generation.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/satosa/micro_services/attribute_generation.py b/src/satosa/micro_services/attribute_generation.py index d96d8e1e1..907a8462d 100644 --- a/src/satosa/micro_services/attribute_generation.py +++ b/src/satosa/micro_services/attribute_generation.py @@ -129,7 +129,11 @@ def _synthesize(self, attributes, requester, provider): for attr_name, values in attributes.items(): context[attr_name] = MustachAttrValue( attr_name, - values if values is not None else [] + values + if values + and isinstance(values, list) + and all(isinstance(value, str) for value in values) + else [], ) recipes = get_dict_defaults(self.synthetic_attributes, requester, provider) From 7d1f76d2b2651670f58edce2888d2df4ea9f72d2 Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Mon, 24 Oct 2022 12:08:55 +0200 Subject: [PATCH 201/288] doc: fix name_id_format vs name_id_policy_format ambiguity After `0c1873da1` in pysaml2, the ambiguity between the format in the of the and the in the metadata has been resolved. This change provides a followup in the SATOSA documentation and example. --- doc/README.md | 16 +++++++++++++--- .../plugins/backends/saml2_backend.yaml.example | 10 ++++++---- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/doc/README.md b/doc/README.md index cc23d27cf..5f0a6d610 100644 --- a/doc/README.md +++ b/doc/README.md @@ -339,15 +339,25 @@ found [here](../example/plugins/backends/saml2_backend.yaml.example). #### Name ID Format -The SAML backend can indicate which *Name ID* format it wants by specifying the key -`name_id_format` in the SP entity configuration in the backend plugin configuration: +The SAML backend has two ways to indicate which *Name ID* format it wants: +* `name_id_format`: is a list of strings to set the `` element in + SP metadata +* `name_id_policy_format`: is a string to set the `Format` attribute in the + `` element in the authentication request. + +The default is to not set any of the above. Note that if the IdP can not +provide the NameID in a format, which is requested in the ``, it +must return an error. ```yaml config: sp_config: service: sp: - name_id_format: urn:oasis:names:tc:SAML:2.0:nameid-format:transient + name_id_format: + - urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress + - urn:oasis:names:tc:SAML:2.0:nameid-format:transient + name_id_policy_format: urn:oasis:names:tc:SAML:2.0:nameid-format:transient ``` #### Use a discovery service diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index 335da8117..2dbe97092 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -64,8 +64,10 @@ config: - [//acs/post, 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST'] discovery_response: - [//disco, 'urn:oasis:names:tc:SAML:profiles:SSO:idp-discovery-protocol'] - name_id_format: 'urn:oasis:names:tc:SAML:2.0:nameid-format:transient' - # A name_id_format of 'None' will cause the authentication request to not - # include a Format attribute in the NameIDPolicy. - # name_id_format: 'None' + + # name_id_format: a list of strings to set the element in SP metadata + # name_id_policy_format: a string to set the Format attribute in the NameIDPolicy element + # of the authentication request + # name_id_format_allow_create: sets the AllowCreate attribute in the NameIDPolicy element + # of the authentication request name_id_format_allow_create: true From 3af4dddae4380b930c5fe415f0c855997e5a30d8 Mon Sep 17 00:00:00 2001 From: Nyiro Gergo Date: Thu, 27 Oct 2022 17:11:05 +0200 Subject: [PATCH 202/288] satosa.base: log state on debug level State of satosa can contain some encoded data (cookies_samesite_compat) which are to verbose on info level. Therefore The "Loaded state ..." log message is emitted on debug level. --- src/satosa/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/base.py b/src/satosa/base.py index 7468a4ca0..7288aca08 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -207,7 +207,7 @@ def _load_state(self, context): context.state = state msg = "Loaded state {state} from cookie {cookie}".format(state=state, cookie=context.cookie) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.info(logline) + logger.debug(logline) def _save_state(self, resp, context): """ From 6c988f672ca990a1ac8f3101f20e3a9a739d698c Mon Sep 17 00:00:00 2001 From: claycooper Date: Fri, 28 Oct 2022 14:56:34 -0400 Subject: [PATCH 203/288] Updated links to Docker Hub --- doc/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/README.md b/doc/README.md index 5f0a6d610..c5b8317ef 100644 --- a/doc/README.md +++ b/doc/README.md @@ -6,7 +6,7 @@ This document describes how to install and configure the SATOSA proxy. ## Docker -A pre-built Docker image is accessible at the [Docker Hub](https://hub.docker.com/r/satosa/), and is the +A pre-built Docker image is accessible at the [Docker Hub](https://hub.docker.com/_/satosa), and is the recommended ways of running the proxy. ## Manual installation @@ -30,7 +30,7 @@ apt-get install libffi-dev libssl-dev xmlsec1 pip install ``` -Alternatively the application can be installed directly from PyPI (`pip install satosa`), or the [Docker image](https://hub.docker.com/r/satosa/) can be used. +Alternatively the application can be installed directly from PyPI (`pip install satosa`), or the [Docker image](https://hub.docker.com/_/satosa) can be used. # Configuration From c629dd5dfce44675bc263241c880a6481ccb33d6 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 9 Nov 2022 10:25:05 +0100 Subject: [PATCH 204/288] fix: OpenIDConnectFrontend: check for empty db_uri (#420) With IdentityPython/pyop#44 merged, OpenIDConnectFrontend init fails when `db_uri` is not set, as `StorageBase.type` now throws a `ValueError` for db_uri values that do not match one of the recognised storage types (including when `db_uri` is `None`). Fix this by guarding the `StorageBase.type` with a pythonic test whether `db_uri` was provided. Same test already guards `StorageBase.from_uri`, add it also to the `StorageBase.type` call made to determine `self.stateless`. --- src/satosa/frontends/openid_connect.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index 40b6730d1..88041b373 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -72,7 +72,7 @@ def __init__(self, auth_req_callback_func, internal_attributes, conf, base_url, ) db_uri = self.config.get("db_uri") - self.stateless = StorageBase.type(db_uri) == "stateless" + self.stateless = db_uri and StorageBase.type(db_uri) == "stateless" self.user_db = ( StorageBase.from_uri(db_uri, db_name="satosa", collection="authz_codes") if db_uri and not self.stateless From e0a4fb3b142b6931bc7b864af377b929fe70c3c7 Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Thu, 10 Nov 2022 15:19:25 +0100 Subject: [PATCH 205/288] tests: remove real MongoDB dependency OIDC integration tests now use mongomock instead of launching a full mongodb server, which may not be available in a development environment. --- tests/conftest.py | 85 ------------------- tests/flows/test_oidc-saml.py | 29 ++++--- .../scripts/test_satosa_saml_metadata.py | 4 +- tests/test_requirements.txt | 1 + 4 files changed, 22 insertions(+), 97 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9e7a5e18f..e6c11fa36 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -361,88 +361,3 @@ def consent_module_config(signing_key_path): } } return consent_config - - -import atexit -import random -import shutil -import subprocess -import tempfile -import time - -import pymongo -import pytest - - -class MongoTemporaryInstance(object): - """Singleton to manage a temporary MongoDB instance - - Use this for testing purpose only. The instance is automatically destroyed - at the end of the program. - - """ - _instance = None - - @classmethod - def get_instance(cls): - if cls._instance is None: - cls._instance = cls() - atexit.register(cls._instance.shutdown) - return cls._instance - - def __init__(self): - self._tmpdir = tempfile.mkdtemp() - self._port = 27017 - self._process = subprocess.Popen(['mongod', '--bind_ip', 'localhost', - '--port', str(self._port), - '--dbpath', self._tmpdir, - '--nojournal', - '--noauth', - '--syncdelay', '0'], - stdout=open('/tmp/mongo-temp.log', 'wb'), - stderr=subprocess.STDOUT) - - # XXX: wait for the instance to be ready - # Mongo is ready in a glance, we just wait to be able to open a - # Connection. - for i in range(10): - time.sleep(0.2) - try: - self._conn = pymongo.MongoClient('localhost', self._port) - except pymongo.errors.ConnectionFailure: - continue - else: - break - else: - self.shutdown() - assert False, 'Cannot connect to the mongodb test instance' - - @property - def conn(self): - return self._conn - - @property - def port(self): - return self._port - - def shutdown(self): - if self._process: - self._process.terminate() - self._process.wait() - self._process = None - shutil.rmtree(self._tmpdir, ignore_errors=True) - - def get_uri(self): - """ - Convenience function to get a mongodb URI to the temporary database. - - :return: URI - """ - return 'mongodb://localhost:{port!s}'.format(port=self.port) - - -@pytest.fixture -def mongodb_instance(): - tmp_db = MongoTemporaryInstance() - yield tmp_db - tmp_db.shutdown() diff --git a/tests/flows/test_oidc-saml.py b/tests/flows/test_oidc-saml.py index 257a8f7c9..2a299bfef 100644 --- a/tests/flows/test_oidc-saml.py +++ b/tests/flows/test_oidc-saml.py @@ -3,11 +3,12 @@ import base64 from urllib.parse import urlparse, urlencode, parse_qsl +import mongomock import pytest from jwkest.jwk import rsa_load, RSAKey from jwkest.jws import JWS from oic.oic.message import ClaimsRequest, Claims -from pyop.storage import MongoWrapper +from pyop.storage import StorageBase from saml2 import BINDING_HTTP_REDIRECT from saml2.config import IdPConfig from werkzeug.test import Client @@ -25,6 +26,7 @@ CLIENT_SECRET = "secret" CLIENT_REDIRECT_URI = "https://client.example.com/cb" REDIRECT_URI = "https://client.example.com/cb" +DB_URI = "mongodb://localhost/satosa" @pytest.fixture(scope="session") def client_db_path(tmpdir_factory): @@ -45,7 +47,7 @@ def client_db_path(tmpdir_factory): return path @pytest.fixture -def oidc_frontend_config(signing_key_path, mongodb_instance): +def oidc_frontend_config(signing_key_path): data = { "module": "satosa.frontends.openid_connect.OpenIDConnectFrontend", "name": "OIDCFrontend", @@ -53,18 +55,11 @@ def oidc_frontend_config(signing_key_path, mongodb_instance): "issuer": "https://proxy-op.example.com", "signing_key_path": signing_key_path, "provider": {"response_types_supported": ["id_token"]}, - "client_db_uri": mongodb_instance.get_uri(), # use mongodb for integration testing - "db_uri": mongodb_instance.get_uri() # use mongodb for integration testing + "client_db_uri": DB_URI, # use mongodb for integration testing + "db_uri": DB_URI # use mongodb for integration testing } } - # insert client in mongodb - cdb = MongoWrapper(mongodb_instance.get_uri(), "satosa", "clients") - cdb[CLIENT_ID] = { - "redirect_uris": [REDIRECT_URI], - "response_types": ["id_token"] - } - return data @@ -87,8 +82,20 @@ def oidc_stateless_frontend_config(signing_key_path, client_db_path): return data +@mongomock.patch(servers=(('localhost', 27017),)) class TestOIDCToSAML: + def _client_setup(self): + """Insert client in mongodb.""" + self._cdb = StorageBase.from_uri( + DB_URI, db_name="satosa", collection="clients", ttl=None + ) + self._cdb[CLIENT_ID] = { + "redirect_uris": [REDIRECT_URI], + "response_types": ["id_token"] + } + def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_config, idp_conf): + self._client_setup() subject_id = "testuser1" # proxy config diff --git a/tests/satosa/scripts/test_satosa_saml_metadata.py b/tests/satosa/scripts/test_satosa_saml_metadata.py index 26809dc2a..f76f5d990 100644 --- a/tests/satosa/scripts/test_satosa_saml_metadata.py +++ b/tests/satosa/scripts/test_satosa_saml_metadata.py @@ -1,6 +1,7 @@ import glob import os +import mongomock import pytest from saml2.config import Config from saml2.mdstore import MetaDataFile @@ -10,7 +11,7 @@ @pytest.fixture -def oidc_frontend_config(signing_key_path, mongodb_instance): +def oidc_frontend_config(signing_key_path): data = { "module": "satosa.frontends.openid_connect.OpenIDConnectFrontend", "name": "OIDCFrontend", @@ -23,6 +24,7 @@ def oidc_frontend_config(signing_key_path, mongodb_instance): return data +@mongomock.patch(servers=(('localhost', 27017),)) class TestConstructSAMLMetadata: def test_saml_saml(self, tmpdir, cert_and_key, satosa_config_dict, saml_frontend_config, saml_backend_config): diff --git a/tests/test_requirements.txt b/tests/test_requirements.txt index bf7f30deb..1991e4cac 100644 --- a/tests/test_requirements.txt +++ b/tests/test_requirements.txt @@ -2,3 +2,4 @@ pytest responses beautifulsoup4 ldap3 +mongomock From f701c73caf6360b58c314dddb2b0ff08a138b734 Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Thu, 10 Nov 2022 22:21:05 +0100 Subject: [PATCH 206/288] tox.ini: ignore all current flake8 errors ...so that flake8 tests can pass without an error. Fixing flake8 tests should happen in a TDD fashion with removing the fixed test from the ignore list. --- tox.ini | 34 +++++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index e26b9ef89..36515d12c 100644 --- a/tox.ini +++ b/tox.ini @@ -24,4 +24,36 @@ commands = pytest -vvv -ra {posargs:tests/} [flake8] -ignore = E501 +ignore = + F401 + E402 + E501 + E111 + E117 + E121 + E123 + E125 + E126 + E201 + E202 + E203 + E221 + E226 + E231 + E261 + E262 + E265 + E275 + E302 + E303 + E703 + F601 + F811 + F821 + F841 + W291 + W292 + W293 + W503 + W504 + W605 From c5bbb9b4f95d31212a440867e5a2028cb4794f78 Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Thu, 10 Nov 2022 22:21:05 +0100 Subject: [PATCH 207/288] flake8: fix unused imports (F401) --- src/satosa/__init__.py | 2 +- src/satosa/backends/apple.py | 1 - src/satosa/base.py | 1 - src/satosa/frontends/ping.py | 1 - src/satosa/micro_services/attribute_processor.py | 1 - src/satosa/micro_services/primary_identifier.py | 1 - .../micro_services/processors/scope_remover_processor.py | 2 +- src/satosa/proxy_server.py | 1 - src/satosa/state.py | 4 +--- src/satosa/yaml.py | 3 +-- tests/flows/test_wsgi_flow.py | 2 -- tests/satosa/backends/test_openid_connect.py | 2 +- tests/satosa/metadata_creation/test_description.py | 2 -- tests/satosa/micro_services/test_attribute_generation.py | 1 - tests/satosa/micro_services/test_consent.py | 2 +- tests/satosa/micro_services/test_custom_routing.py | 3 +-- tests/satosa/micro_services/test_idp_hinting.py | 2 -- tests/satosa/test_satosa_config.py | 1 - tox.ini | 1 - 19 files changed, 7 insertions(+), 26 deletions(-) diff --git a/src/satosa/__init__.py b/src/satosa/__init__.py index 895e0166f..eeadbe8f8 100644 --- a/src/satosa/__init__.py +++ b/src/satosa/__init__.py @@ -1,4 +1,4 @@ # -*- coding: utf-8 -*- """SATOSA: An any to any Single Sign On (SSO) proxy.""" -from .version import version as __version__ +from .version import version as __version__ # noqa: F401 diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index d7b21da46..f197b0f38 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -22,7 +22,6 @@ from ..exception import SATOSAAuthenticationError, SATOSAError from ..response import Redirect -import base64 import json import requests diff --git a/src/satosa/base.py b/src/satosa/base.py index 7288aca08..0db349451 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -9,7 +9,6 @@ from satosa import util from .context import Context -from .exception import SATOSAConfigurationError from .exception import SATOSAError, SATOSAAuthenticationError, SATOSAUnknownError from .plugin_loader import load_backends, load_frontends from .plugin_loader import load_request_microservices, load_response_microservices diff --git a/src/satosa/frontends/ping.py b/src/satosa/frontends/ping.py index 8eda3948c..4444cd83d 100644 --- a/src/satosa/frontends/ping.py +++ b/src/satosa/frontends/ping.py @@ -2,7 +2,6 @@ import satosa.logging_util as lu import satosa.micro_services.base -from satosa.logging_util import satosa_logging from satosa.response import Response diff --git a/src/satosa/micro_services/attribute_processor.py b/src/satosa/micro_services/attribute_processor.py index 1973402b2..7232e484e 100644 --- a/src/satosa/micro_services/attribute_processor.py +++ b/src/satosa/micro_services/attribute_processor.py @@ -1,5 +1,4 @@ import importlib -import json import logging from satosa.exception import SATOSAError diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index 9c892570d..9275779f9 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -5,7 +5,6 @@ the value for a configured attribute, for example uid. """ -import copy import logging import urllib.parse diff --git a/src/satosa/micro_services/processors/scope_remover_processor.py b/src/satosa/micro_services/processors/scope_remover_processor.py index b6e61b7ed..82073b5b8 100644 --- a/src/satosa/micro_services/processors/scope_remover_processor.py +++ b/src/satosa/micro_services/processors/scope_remover_processor.py @@ -1,4 +1,4 @@ -from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning +from ..attribute_processor import AttributeProcessorWarning from .base_processor import BaseProcessor class ScopeRemoverProcessor(BaseProcessor): diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index ce7fd1459..03305d4ce 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -1,7 +1,6 @@ import json import logging import logging.config -import sys from io import BytesIO from urllib.parse import parse_qsl as _parse_query_string diff --git a/src/satosa/state.py b/src/satosa/state.py index 7feba1a9e..05e343529 100644 --- a/src/satosa/state.py +++ b/src/satosa/state.py @@ -11,9 +11,7 @@ from satosa.cookies import SimpleCookie from uuid import uuid4 -from lzma import LZMACompressor -from lzma import LZMADecompressor -from lzma import LZMAError +from lzma import LZMACompressor, LZMADecompressor from Cryptodome import Random from Cryptodome.Cipher import AES diff --git a/src/satosa/yaml.py b/src/satosa/yaml.py index 9efa202c6..2f8d51f1b 100644 --- a/src/satosa/yaml.py +++ b/src/satosa/yaml.py @@ -1,9 +1,8 @@ import os -import re from yaml import SafeLoader as _safe_loader from yaml import YAMLError -from yaml import safe_load as load +from yaml import safe_load as load # noqa: F401 def _constructor_env_variables(loader, node): diff --git a/tests/flows/test_wsgi_flow.py b/tests/flows/test_wsgi_flow.py index fcae4ce21..ab9d636f5 100644 --- a/tests/flows/test_wsgi_flow.py +++ b/tests/flows/test_wsgi_flow.py @@ -1,8 +1,6 @@ """ Complete test for a SAML to SAML proxy. """ -import json - from werkzeug.test import Client from werkzeug.wrappers import Response diff --git a/tests/satosa/backends/test_openid_connect.py b/tests/satosa/backends/test_openid_connect.py index b282e7725..b898e157c 100644 --- a/tests/satosa/backends/test_openid_connect.py +++ b/tests/satosa/backends/test_openid_connect.py @@ -9,7 +9,7 @@ import responses from Cryptodome.PublicKey import RSA from jwkest.jwk import RSAKey -from oic.oic.message import RegistrationRequest, IdToken +from oic.oic.message import IdToken from oic.utils.authn.client import CLIENT_AUTHN_METHOD from satosa.backends.openid_connect import OpenIDConnectBackend, _create_client, STATE_KEY, NONCE_KEY diff --git a/tests/satosa/metadata_creation/test_description.py b/tests/satosa/metadata_creation/test_description.py index 8b73ec923..ae8caf166 100644 --- a/tests/satosa/metadata_creation/test_description.py +++ b/tests/satosa/metadata_creation/test_description.py @@ -1,5 +1,3 @@ -from unittest.mock import mock_open, patch - import pytest from satosa.metadata_creation.description import ContactPersonDesc, UIInfoDesc, OrganizationDesc, MetadataDescription diff --git a/tests/satosa/micro_services/test_attribute_generation.py b/tests/satosa/micro_services/test_attribute_generation.py index e60ab36fc..67f669417 100644 --- a/tests/satosa/micro_services/test_attribute_generation.py +++ b/tests/satosa/micro_services/test_attribute_generation.py @@ -1,7 +1,6 @@ from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.micro_services.attribute_generation import AddSyntheticAttributes -from satosa.exception import SATOSAAuthenticationError from satosa.context import Context class TestAddSyntheticAttributes: diff --git a/tests/satosa/micro_services/test_consent.py b/tests/satosa/micro_services/test_consent.py index 514367300..a8eaed965 100644 --- a/tests/satosa/micro_services/test_consent.py +++ b/tests/satosa/micro_services/test_consent.py @@ -1,7 +1,7 @@ import json import re from collections import Counter -from urllib.parse import urlparse, parse_qs +from urllib.parse import urlparse import pytest import requests diff --git a/tests/satosa/micro_services/test_custom_routing.py b/tests/satosa/micro_services/test_custom_routing.py index d2022bc3e..ed834ef4b 100644 --- a/tests/satosa/micro_services/test_custom_routing.py +++ b/tests/satosa/micro_services/test_custom_routing.py @@ -5,11 +5,10 @@ from satosa.context import Context from satosa.state import State -from satosa.exception import SATOSAError, SATOSAConfigurationError, SATOSAStateError +from satosa.exception import SATOSAError, SATOSAConfigurationError from satosa.internal import InternalData from satosa.micro_services.custom_routing import DecideIfRequesterIsAllowed from satosa.micro_services.custom_routing import DecideBackendByTargetIssuer -from satosa.micro_services.custom_routing import CustomRoutingError TARGET_ENTITY = "entity1" diff --git a/tests/satosa/micro_services/test_idp_hinting.py b/tests/satosa/micro_services/test_idp_hinting.py index a13d3d7a3..2fa454253 100644 --- a/tests/satosa/micro_services/test_idp_hinting.py +++ b/tests/satosa/micro_services/test_idp_hinting.py @@ -1,7 +1,5 @@ from unittest import TestCase -import pytest - from satosa.context import Context from satosa.internal import InternalData from satosa.state import State diff --git a/tests/satosa/test_satosa_config.py b/tests/satosa/test_satosa_config.py index d291d9c87..bdc504384 100644 --- a/tests/satosa/test_satosa_config.py +++ b/tests/satosa/test_satosa_config.py @@ -3,7 +3,6 @@ from unittest.mock import mock_open, patch import pytest -from satosa.exception import SATOSAConfigurationError from satosa.exception import SATOSAConfigurationError from satosa.satosa_config import SATOSAConfig diff --git a/tox.ini b/tox.ini index 36515d12c..d255d02aa 100644 --- a/tox.ini +++ b/tox.ini @@ -25,7 +25,6 @@ commands = [flake8] ignore = - F401 E402 E501 E111 From 64f61b2bed8db433238468e03fde3ef21aecc23d Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Thu, 10 Nov 2022 22:21:05 +0100 Subject: [PATCH 208/288] flake8: fix accidental redefinition of test method (F811) --- tests/satosa/test_satosa_config.py | 14 +++----------- tox.ini | 1 - 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/tests/satosa/test_satosa_config.py b/tests/satosa/test_satosa_config.py index bdc504384..fd5045a93 100644 --- a/tests/satosa/test_satosa_config.py +++ b/tests/satosa/test_satosa_config.py @@ -59,22 +59,14 @@ def test_can_read_endpoint_configs_from_file(self, satosa_config_dict, modules_k satosa_config_dict[modules_key] = ["/fake_file_path"] expected_config = {"foo": "bar"} + with pytest.raises(SATOSAConfigurationError): + SATOSAConfig(satosa_config_dict) + with patch("builtins.open", mock_open(read_data=json.dumps(expected_config))): config = SATOSAConfig(satosa_config_dict) assert config[modules_key] == [expected_config] - @pytest.mark.parametrize("modules_key", [ - "BACKEND_MODULES", - "FRONTEND_MODULES", - "MICRO_SERVICES" - ]) - def test_can_read_endpoint_configs_from_file(self, satosa_config_dict, modules_key): - satosa_config_dict[modules_key] = ["/fake_file_path"] - - with pytest.raises(SATOSAConfigurationError): - SATOSAConfig(satosa_config_dict) - def test_can_substitute_from_environment_variable(self, monkeypatch): monkeypatch.setenv("SATOSA_COOKIE_STATE_NAME", "oatmeal_raisin") config = SATOSAConfig( diff --git a/tox.ini b/tox.ini index d255d02aa..066ad38ea 100644 --- a/tox.ini +++ b/tox.ini @@ -47,7 +47,6 @@ ignore = E303 E703 F601 - F811 F821 F841 W291 From 68c9fa3b7e30b3da10da9a437c308e21b919b19d Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Thu, 10 Nov 2022 22:21:05 +0100 Subject: [PATCH 209/288] flake8: fix missing import (F821) --- src/satosa/backends/reflector.py | 3 ++- tox.ini | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/satosa/backends/reflector.py b/src/satosa/backends/reflector.py index 6702dc733..6a9055485 100644 --- a/src/satosa/backends/reflector.py +++ b/src/satosa/backends/reflector.py @@ -1,6 +1,7 @@ """ A reflector backend module for the satosa proxy """ +import base64 from datetime import datetime from satosa.internal import AuthenticationInformation @@ -74,7 +75,7 @@ def get_metadata_desc(self): """ entity_descriptions = [] description = MetadataDescription( - urlsafe_b64encode(ReflectorBackend.ENTITY_ID.encode("utf-8")).decode( + base64.urlsafe_b64encode(ReflectorBackend.ENTITY_ID.encode("utf-8")).decode( "utf-8" ) ) diff --git a/tox.ini b/tox.ini index 066ad38ea..d5c3b6905 100644 --- a/tox.ini +++ b/tox.ini @@ -47,7 +47,6 @@ ignore = E303 E703 F601 - F821 F841 W291 W292 From d5de97696fb96e7ba477749d32631db0b16cb14a Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Thu, 10 Nov 2022 22:26:36 +0100 Subject: [PATCH 210/288] flake8: fix accidental dictionary key redefinition (F601) --- tests/conftest.py | 8 -------- tox.ini | 1 - 2 files changed, 9 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e6c11fa36..f0602a028 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -130,8 +130,6 @@ def satosa_config_dict(backend_plugin_config, frontend_plugin_config, request_mi config = { "BASE": BASE_URL, "COOKIE_STATE_NAME": "TEST_STATE", - "BACKEND_MODULES": ["foo"], - "FRONTEND_MODULES": ["bar"], "INTERNAL_ATTRIBUTES": {"attributes": {}}, "STATE_ENCRYPTION_KEY": "state_encryption_key", "CUSTOM_PLUGIN_MODULE_PATHS": [os.path.dirname(__file__)], @@ -190,12 +188,6 @@ def saml_frontend_config(cert_and_key, sp_conf): "config": { "idp_config": { "entityid": "frontend-entity_id", - "organization": {"display_name": "Test Identities", "name": "Test Identities Org.", - "url": "http://www.example.com"}, - "contact_person": [{"contact_type": "technical", "email_address": "technical@example.com", - "given_name": "Technical"}, - {"contact_type": "support", "email_address": "support@example.com", - "given_name": "Support"}], "service": { "idp": { "endpoints": { diff --git a/tox.ini b/tox.ini index d5c3b6905..1ed5cf92e 100644 --- a/tox.ini +++ b/tox.ini @@ -46,7 +46,6 @@ ignore = E302 E303 E703 - F601 F841 W291 W292 From 502e8757a1f3fe103bb69f70f185a003207a5b4a Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Fri, 11 Nov 2022 08:46:59 +0100 Subject: [PATCH 211/288] flake8: fix unused local variables (F841) --- src/satosa/backends/apple.py | 2 +- src/satosa/backends/oauth.py | 2 +- src/satosa/base.py | 2 +- src/satosa/frontends/saml2.py | 8 +++----- src/satosa/micro_services/consent.py | 4 ++-- src/satosa/micro_services/custom_logging.py | 4 +--- src/satosa/micro_services/primary_identifier.py | 4 ++-- tests/satosa/frontends/test_openid_connect.py | 2 +- .../test_attribute_authorization.py | 17 ++++++----------- tox.ini | 1 - 10 files changed, 18 insertions(+), 28 deletions(-) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index f197b0f38..edace8641 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -210,7 +210,7 @@ def response_endpoint(self, context, *args): try: userdata = context.request.get("user", "{}") userinfo = json.load(userdata) - except Exception as e: + except Exception: userinfo = {} authn_resp = self.client.parse_response( diff --git a/src/satosa/backends/oauth.py b/src/satosa/backends/oauth.py index 2308f1eee..1e584f617 100644 --- a/src/satosa/backends/oauth.py +++ b/src/satosa/backends/oauth.py @@ -259,7 +259,7 @@ def user_information(self, access_token): try: picture_url = data["picture"]["data"]["url"] data["picture"] = picture_url - except KeyError as e: + except KeyError: pass return data diff --git a/src/satosa/base.py b/src/satosa/base.py index 0db349451..404104920 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -200,7 +200,7 @@ def _load_state(self, context): self.config["COOKIE_STATE_NAME"], self.config["STATE_ENCRYPTION_KEY"], ) - except SATOSAStateError as e: + except SATOSAStateError: state = State() finally: context.state = state diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 4dcc40833..655e6da68 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -173,10 +173,8 @@ def _validate_config(self, config): raise ValueError("No configuration given") for key in required_keys: - try: - _val = config[key] - except KeyError as e: - raise ValueError("Missing configuration key: %s" % key) from e + if key not in config: + raise ValueError("Missing configuration key: %s" % key) def _handle_authn_request(self, context, binding_in, idp): """ @@ -630,7 +628,7 @@ def _get_sp_display_name(self, idp, entity_id): try: return extensions[0]["display_name"] - except (IndexError, KeyError) as e: + except (IndexError, KeyError): pass return None diff --git a/src/satosa/micro_services/consent.py b/src/satosa/micro_services/consent.py index 3823826da..a469e2189 100644 --- a/src/satosa/micro_services/consent.py +++ b/src/satosa/micro_services/consent.py @@ -66,7 +66,7 @@ def _handle_consent_response(self, context): except ConnectionError as e: msg = "Consent service is not reachable, no consent given." logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline) + logger.error(logline, exc_info=e) # Send an internal_response without any attributes consent_attributes = None @@ -136,7 +136,7 @@ def process(self, context, internal_response): except requests.exceptions.ConnectionError as e: msg = "Consent service is not reachable, no consent is given." logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline) + logger.error(logline, exc_info=e) # Send an internal_response without any attributes internal_response.attributes = {} return self._end_consent(context, internal_response) diff --git a/src/satosa/micro_services/custom_logging.py b/src/satosa/micro_services/custom_logging.py index c82d03449..14d435d8f 100644 --- a/src/satosa/micro_services/custom_logging.py +++ b/src/satosa/micro_services/custom_logging.py @@ -39,7 +39,7 @@ def process(self, context, data): try: spEntityID = context.state.state_dict['SATOSA_BASE']['requester'] idpEntityID = data.auth_info.issuer - except KeyError as err: + except KeyError: msg = "{} Unable to determine the entityID's for the IdP or SP".format(logprefix) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) @@ -71,8 +71,6 @@ def process(self, context, data): logger.error(logline) return super().process(context, data) - record = None - try: msg = "{} Using context {}".format(logprefix, context) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index 9275779f9..2a140a9e4 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -138,7 +138,7 @@ def process(self, context, data): # Find the entityID for the SP that initiated the flow try: spEntityID = context.state.state_dict['SATOSA_BASE']['requester'] - except KeyError as err: + except KeyError: msg = "{} Unable to determine the entityID for the SP requester".format(logprefix) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) @@ -151,7 +151,7 @@ def process(self, context, data): # Find the entityID for the IdP that issued the assertion try: idpEntityID = data.auth_info.issuer - except KeyError as err: + except KeyError: msg = "{} Unable to determine the entityID for the IdP issuer".format(logprefix) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) diff --git a/tests/satosa/frontends/test_openid_connect.py b/tests/satosa/frontends/test_openid_connect.py index 3fad27e82..f769b2c66 100644 --- a/tests/satosa/frontends/test_openid_connect.py +++ b/tests/satosa/frontends/test_openid_connect.py @@ -557,7 +557,7 @@ def test_full_flow(self, context, frontend_with_extra_scopes): frontend_with_extra_scopes.auth_req_callback_func = mock_callback # discovery http_response = frontend_with_extra_scopes.provider_config(context) - provider_config = ProviderConfigurationResponse().deserialize(http_response.message, "json") + _ = ProviderConfigurationResponse().deserialize(http_response.message, "json") # client registration registration_request = RegistrationRequest(redirect_uris=[redirect_uri], response_types=[response_type]) diff --git a/tests/satosa/micro_services/test_attribute_authorization.py b/tests/satosa/micro_services/test_attribute_authorization.py index 4bd0cfc54..15b1458ff 100644 --- a/tests/satosa/micro_services/test_attribute_authorization.py +++ b/tests/satosa/micro_services/test_attribute_authorization.py @@ -1,3 +1,4 @@ +import pytest from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.micro_services.attribute_authorization import AttributeAuthorization @@ -25,7 +26,7 @@ def test_authz_allow_success(self): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - except SATOSAAuthenticationError as ex: + except SATOSAAuthenticationError: assert False def test_authz_allow_fail(self): @@ -38,13 +39,10 @@ def test_authz_allow_fail(self): resp.attributes = { "a0": ["bar"], } - try: + with pytest.raises(SATOSAAuthenticationError): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - assert False - except SATOSAAuthenticationError as ex: - assert True def test_authz_allow_second(self): attribute_allow = { @@ -60,7 +58,7 @@ def test_authz_allow_second(self): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - except SATOSAAuthenticationError as ex: + except SATOSAAuthenticationError: assert False def test_authz_deny_success(self): @@ -73,13 +71,10 @@ def test_authz_deny_success(self): resp.attributes = { "a0": ["foo2"], } - try: + with pytest.raises(SATOSAAuthenticationError): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - assert False - except SATOSAAuthenticationError as ex: - assert True def test_authz_deny_fail(self): attribute_deny = { @@ -95,5 +90,5 @@ def test_authz_deny_fail(self): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - except SATOSAAuthenticationError as ex: + except SATOSAAuthenticationError: assert False diff --git a/tox.ini b/tox.ini index 1ed5cf92e..95cbdc864 100644 --- a/tox.ini +++ b/tox.ini @@ -46,7 +46,6 @@ ignore = E302 E303 E703 - F841 W291 W292 W293 From 28b3c363194b00e0db37092419f356617922b203 Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Tue, 15 Nov 2022 12:33:11 +0100 Subject: [PATCH 212/288] tests: remove real MongoDB dependency (#422) OIDC integration tests now use mongomock instead of launching a full mongodb server, which may not be available in a development environment. --- tests/conftest.py | 85 ------------------- tests/flows/test_oidc-saml.py | 29 ++++--- .../scripts/test_satosa_saml_metadata.py | 4 +- tests/test_requirements.txt | 1 + 4 files changed, 22 insertions(+), 97 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9e7a5e18f..e6c11fa36 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -361,88 +361,3 @@ def consent_module_config(signing_key_path): } } return consent_config - - -import atexit -import random -import shutil -import subprocess -import tempfile -import time - -import pymongo -import pytest - - -class MongoTemporaryInstance(object): - """Singleton to manage a temporary MongoDB instance - - Use this for testing purpose only. The instance is automatically destroyed - at the end of the program. - - """ - _instance = None - - @classmethod - def get_instance(cls): - if cls._instance is None: - cls._instance = cls() - atexit.register(cls._instance.shutdown) - return cls._instance - - def __init__(self): - self._tmpdir = tempfile.mkdtemp() - self._port = 27017 - self._process = subprocess.Popen(['mongod', '--bind_ip', 'localhost', - '--port', str(self._port), - '--dbpath', self._tmpdir, - '--nojournal', - '--noauth', - '--syncdelay', '0'], - stdout=open('/tmp/mongo-temp.log', 'wb'), - stderr=subprocess.STDOUT) - - # XXX: wait for the instance to be ready - # Mongo is ready in a glance, we just wait to be able to open a - # Connection. - for i in range(10): - time.sleep(0.2) - try: - self._conn = pymongo.MongoClient('localhost', self._port) - except pymongo.errors.ConnectionFailure: - continue - else: - break - else: - self.shutdown() - assert False, 'Cannot connect to the mongodb test instance' - - @property - def conn(self): - return self._conn - - @property - def port(self): - return self._port - - def shutdown(self): - if self._process: - self._process.terminate() - self._process.wait() - self._process = None - shutil.rmtree(self._tmpdir, ignore_errors=True) - - def get_uri(self): - """ - Convenience function to get a mongodb URI to the temporary database. - - :return: URI - """ - return 'mongodb://localhost:{port!s}'.format(port=self.port) - - -@pytest.fixture -def mongodb_instance(): - tmp_db = MongoTemporaryInstance() - yield tmp_db - tmp_db.shutdown() diff --git a/tests/flows/test_oidc-saml.py b/tests/flows/test_oidc-saml.py index 257a8f7c9..2a299bfef 100644 --- a/tests/flows/test_oidc-saml.py +++ b/tests/flows/test_oidc-saml.py @@ -3,11 +3,12 @@ import base64 from urllib.parse import urlparse, urlencode, parse_qsl +import mongomock import pytest from jwkest.jwk import rsa_load, RSAKey from jwkest.jws import JWS from oic.oic.message import ClaimsRequest, Claims -from pyop.storage import MongoWrapper +from pyop.storage import StorageBase from saml2 import BINDING_HTTP_REDIRECT from saml2.config import IdPConfig from werkzeug.test import Client @@ -25,6 +26,7 @@ CLIENT_SECRET = "secret" CLIENT_REDIRECT_URI = "https://client.example.com/cb" REDIRECT_URI = "https://client.example.com/cb" +DB_URI = "mongodb://localhost/satosa" @pytest.fixture(scope="session") def client_db_path(tmpdir_factory): @@ -45,7 +47,7 @@ def client_db_path(tmpdir_factory): return path @pytest.fixture -def oidc_frontend_config(signing_key_path, mongodb_instance): +def oidc_frontend_config(signing_key_path): data = { "module": "satosa.frontends.openid_connect.OpenIDConnectFrontend", "name": "OIDCFrontend", @@ -53,18 +55,11 @@ def oidc_frontend_config(signing_key_path, mongodb_instance): "issuer": "https://proxy-op.example.com", "signing_key_path": signing_key_path, "provider": {"response_types_supported": ["id_token"]}, - "client_db_uri": mongodb_instance.get_uri(), # use mongodb for integration testing - "db_uri": mongodb_instance.get_uri() # use mongodb for integration testing + "client_db_uri": DB_URI, # use mongodb for integration testing + "db_uri": DB_URI # use mongodb for integration testing } } - # insert client in mongodb - cdb = MongoWrapper(mongodb_instance.get_uri(), "satosa", "clients") - cdb[CLIENT_ID] = { - "redirect_uris": [REDIRECT_URI], - "response_types": ["id_token"] - } - return data @@ -87,8 +82,20 @@ def oidc_stateless_frontend_config(signing_key_path, client_db_path): return data +@mongomock.patch(servers=(('localhost', 27017),)) class TestOIDCToSAML: + def _client_setup(self): + """Insert client in mongodb.""" + self._cdb = StorageBase.from_uri( + DB_URI, db_name="satosa", collection="clients", ttl=None + ) + self._cdb[CLIENT_ID] = { + "redirect_uris": [REDIRECT_URI], + "response_types": ["id_token"] + } + def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_config, idp_conf): + self._client_setup() subject_id = "testuser1" # proxy config diff --git a/tests/satosa/scripts/test_satosa_saml_metadata.py b/tests/satosa/scripts/test_satosa_saml_metadata.py index 26809dc2a..f76f5d990 100644 --- a/tests/satosa/scripts/test_satosa_saml_metadata.py +++ b/tests/satosa/scripts/test_satosa_saml_metadata.py @@ -1,6 +1,7 @@ import glob import os +import mongomock import pytest from saml2.config import Config from saml2.mdstore import MetaDataFile @@ -10,7 +11,7 @@ @pytest.fixture -def oidc_frontend_config(signing_key_path, mongodb_instance): +def oidc_frontend_config(signing_key_path): data = { "module": "satosa.frontends.openid_connect.OpenIDConnectFrontend", "name": "OIDCFrontend", @@ -23,6 +24,7 @@ def oidc_frontend_config(signing_key_path, mongodb_instance): return data +@mongomock.patch(servers=(('localhost', 27017),)) class TestConstructSAMLMetadata: def test_saml_saml(self, tmpdir, cert_and_key, satosa_config_dict, saml_frontend_config, saml_backend_config): diff --git a/tests/test_requirements.txt b/tests/test_requirements.txt index bf7f30deb..1991e4cac 100644 --- a/tests/test_requirements.txt +++ b/tests/test_requirements.txt @@ -2,3 +2,4 @@ pytest responses beautifulsoup4 ldap3 +mongomock From 0a57dab317e1d61dbff26d124649b2e4333fee80 Mon Sep 17 00:00:00 2001 From: Dick Visser Date: Tue, 15 Nov 2022 12:41:53 +0100 Subject: [PATCH 213/288] docs: fix typos and grammar --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 112d9459b..4a8d757eb 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ configure and run such a proxy instance please visit [Single Service Provider<->Multiple Identity providers](doc/one-to-many.md) If an identity provider can not communicate with service providers in for -example a federation the can convert request and make the communication +example a federation, they can convert requests and make the communication possible. @@ -65,8 +65,8 @@ possible. This setup makes it possible to connect a SAML2 service provider to multiple social media identity providers such as Google and Facebook. The proxy makes it -possible to mirror a identity provider by generating SAML2 metadata -corresponding that provider and create dynamic endpoint which are connected to +possible to mirror an identity provider by generating SAML2 metadata +corresponding to that provider and create dynamic endpoints which are connected to a single identity provider. For more information about how to set up, configure and run such a proxy From 802ec54a3521b65d22a0008c0e25914939326dad Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Tue, 15 Nov 2022 13:05:41 +0100 Subject: [PATCH 214/288] saml2 backend: support using multiple ACS URLs (#409) * saml2 backend: support using multiple ACS URLs When Satosa sends out a SAML2 AuthnRequest, it specifies the AssertionConsumerServiceUrl parameter as well, unless the `hide_assertion_consumer_service` configuration parameter is set. However, Satosa might be deployed in an environment where not all interfaces and host names are accessible for all users. After this change, Satosa tries to select the ACS URL based on the current request, and falls back to the first ACS if there is no match. * squash! saml2 backend: support using multiple ACS URLs Make ACS selection configurable with the `acs_selection_strategy` parameter, keeping the default backwards-compatible (`use_first_acs`). Added the relevant example and documentation. Additionally, log an error (instead of debug) message if the authentication request can not be constructed, since most of the time this is a configuration or environment error. --- doc/README.md | 20 ++++++ .../backends/saml2_backend.yaml.example | 1 + src/satosa/backends/saml2.py | 57 ++++++++++++++++- tests/satosa/backends/test_saml2.py | 62 ++++++++++++++++++- 4 files changed, 137 insertions(+), 3 deletions(-) diff --git a/doc/README.md b/doc/README.md index c5b8317ef..8d001847e 100644 --- a/doc/README.md +++ b/doc/README.md @@ -424,6 +424,26 @@ config: [...] ``` +#### Assertion Consumer Service selection + +When SATOSA sends the SAML2 authentication request to the IDP, it always +specifies the AssertionConsumerServiceURL and binding. When +`acs_selection_strategy` configuration option is set to `use_first_acs` (the +default), then the first element of the `assertion_consumer_service` list will +be selected. If `acs_selection_strategy` is `prefer_matching_host`, then SATOSA +will try to select the `assertion_consumer_service`, which matches the host in +the HTTP request (in simple words, it tries to select an ACS that matches the +URL in the user's browser). If there is no match, it will fall back to using the +first assertion consumer service. + +Default value: `use_first_acs`. + +```yaml +config: + acs_selection_strategy: prefer_matching_host + [...] +``` + ## OpenID Connect plugins ### OIDC Frontend diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index 2dbe97092..3fb30fb2a 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -16,6 +16,7 @@ config: use_memorized_idp_when_force_authn: no send_requester_id: no enable_metadata_reload: no + acs_selection_strategy: prefer_matching_host sp_config: name: "SP Name" diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index b6d0d8910..be7a095fb 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -289,10 +289,11 @@ def authn_request(self, context, entity_id): kwargs["is_passive"] = "true" try: - acs_endp, response_binding = self.sp.config.getattr("endpoints", "sp")["assertion_consumer_service"][0] + acs_endp, response_binding = self._get_acs(context) relay_state = util.rndstr() req_id, binding, http_info = self.sp.prepare_for_negotiated_authenticate( entityid=entity_id, + assertion_consumer_service_url=acs_endp, response_binding=response_binding, relay_state=relay_state, **kwargs, @@ -300,7 +301,7 @@ def authn_request(self, context, entity_id): except Exception as e: msg = "Failed to construct the AuthnRequest for state" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline, exc_info=True) + logger.error(logline, exc_info=True) raise SATOSAAuthenticationError(context.state, "Failed to construct the AuthnRequest") from e if self.sp.config.getattr('allow_unsolicited', 'sp') is False: @@ -314,6 +315,58 @@ def authn_request(self, context, entity_id): context.state[self.name] = {"relay_state": relay_state} return make_saml_response(binding, http_info) + def _get_acs(self, context): + """ + Select the AssertionConsumerServiceURL and binding. + + :param context: The current context + :type context: satosa.context.Context + :return: Selected ACS URL and binding + :rtype: tuple(str, str) + """ + acs_strategy = self.config.get("acs_selection_strategy", "use_first_acs") + if acs_strategy == "use_first_acs": + acs_strategy_fn = self._use_first_acs + elif acs_strategy == "prefer_matching_host": + acs_strategy_fn = self._prefer_matching_host + else: + msg = "Invalid value for '{}' ({}). Using the first ACS instead".format( + "acs_selection_strategy", acs_strategy + ) + logger.error(msg) + acs_strategy_fn = self._use_first_acs + return acs_strategy_fn(context) + + def _use_first_acs(self, context): + return self.sp.config.getattr("endpoints", "sp")["assertion_consumer_service"][ + 0 + ] + + def _prefer_matching_host(self, context): + acs_config = self.sp.config.getattr("endpoints", "sp")[ + "assertion_consumer_service" + ] + try: + hostname = context.http_headers["HTTP_HOST"] + for acs, binding in acs_config: + parsed_acs = urlparse(acs) + if hostname == parsed_acs.netloc: + msg = "Selected ACS '{}' based on the request".format(acs) + logline = lu.LOG_FMT.format( + id=lu.get_session_id(context.state), message=msg + ) + logger.debug(logline) + return acs, binding + except (TypeError, KeyError): + pass + + msg = "Can't find an ACS URL to this hostname ({}), selecting the first one".format( + context.http_headers.get("HTTP_HOST", "") if context.http_headers else "" + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + return self._use_first_acs(context) + def authn_response(self, context, binding): """ Endpoint for the idp response diff --git a/tests/satosa/backends/test_saml2.py b/tests/satosa/backends/test_saml2.py index eed74db6c..dcfdb0fa9 100644 --- a/tests/satosa/backends/test_saml2.py +++ b/tests/satosa/backends/test_saml2.py @@ -12,9 +12,11 @@ import pytest import saml2 -from saml2 import BINDING_HTTP_REDIRECT +from saml2 import BINDING_HTTP_REDIRECT, BINDING_HTTP_POST from saml2.authn_context import PASSWORD from saml2.config import IdPConfig, SPConfig +from saml2.entity import Entity +from saml2.samlp import authn_request_from_string from saml2.s_utils import deflate_and_base64_encode from satosa.backends.saml2 import SAMLBackend @@ -179,6 +181,64 @@ def test_authn_request(self, context, idp_conf): req_params = dict(parse_qsl(urlparse(resp.message).query)) assert context.state[self.samlbackend.name]["relay_state"] == req_params["RelayState"] + @pytest.mark.parametrize("hostname", ["example.com:8443", "example.net"]) + @pytest.mark.parametrize( + "strat", + ["", "use_first_acs", "prefer_matching_host", "invalid"], + ) + def test_acs_selection_strategy(self, context, sp_conf, idp_conf, hostname, strat): + acs_endpoints = [ + ("https://example.com/saml2/acs/post", BINDING_HTTP_POST), + ("https://example.net/saml2/acs/post", BINDING_HTTP_POST), + ("https://example.com:8443/saml2/acs/post", BINDING_HTTP_POST), + ] + config = {"sp_config": sp_conf} + config["sp_config"]["service"]["sp"]["endpoints"][ + "assertion_consumer_service" + ] = acs_endpoints + if strat: + config["acs_selection_strategy"] = strat + + req = self._make_authn_request(hostname, context, config, idp_conf["entityid"]) + + if strat == "prefer_matching_host": + expected_acs = hostname + else: + expected_acs = urlparse(acs_endpoints[0][0]).netloc + assert urlparse(req.assertion_consumer_service_url).netloc == expected_acs + + def _make_authn_request(self, http_host, context, config, entity_id): + context.http_headers = {"HTTP_HOST": http_host} if http_host else {} + self.samlbackend = SAMLBackend( + Mock(), + INTERNAL_ATTRIBUTES, + config, + "base_url", + "samlbackend", + ) + resp = self.samlbackend.authn_request(context, entity_id) + req_params = dict(parse_qsl(urlparse(resp.message).query)) + req_xml = Entity.unravel(req_params["SAMLRequest"], BINDING_HTTP_REDIRECT) + return authn_request_from_string(req_xml) + + @pytest.mark.parametrize("hostname", ["unknown-hostname", None]) + def test_unknown_or_no_hostname_selects_first_acs( + self, context, sp_conf, idp_conf, hostname + ): + config = {"sp_config": sp_conf} + config["sp_config"]["service"]["sp"]["endpoints"][ + "assertion_consumer_service" + ] = ( + ("https://first-hostname/saml2/acs/post", BINDING_HTTP_POST), + ("https://other-hostname/saml2/acs/post", BINDING_HTTP_POST), + ) + config["acs_selection_strategy"] = "prefer_matching_host" + req = self._make_authn_request(hostname, context, config, idp_conf["entityid"]) + assert ( + req.assertion_consumer_service_url + == "https://first-hostname/saml2/acs/post" + ) + def test_authn_response(self, context, idp_conf, sp_conf): response_binding = BINDING_HTTP_REDIRECT fakesp = FakeSP(SPConfig().load(sp_conf)) From d7de930be9483d9e70567e564bcdb6bca71d7416 Mon Sep 17 00:00:00 2001 From: Johan Wassberg Date: Wed, 16 Nov 2022 09:21:45 +0100 Subject: [PATCH 215/288] Deny auth if requested attribute is missing If a requested attribute is missing the authorization should fail --- .../micro_services/attribute_authorization.py | 3 +++ .../micro_services/test_attribute_authorization.py | 14 ++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/src/satosa/micro_services/attribute_authorization.py b/src/satosa/micro_services/attribute_authorization.py index 1bcaf8cda..9aca606a7 100644 --- a/src/satosa/micro_services/attribute_authorization.py +++ b/src/satosa/micro_services/attribute_authorization.py @@ -53,6 +53,9 @@ def _check_authz(self, context, attributes, requester, provider): if attribute_name in attributes: if not any([any(filter(re.compile(af).search, attributes[attribute_name])) for af in attribute_filters]): raise SATOSAAuthenticationError(context.state, "Permission denied") + else: + raise SATOSAAuthenticationError(context.state, "Permission denied") + for attribute_name, attribute_filters in get_dict_defaults(self.attribute_deny, requester, provider).items(): if attribute_name in attributes: diff --git a/tests/satosa/micro_services/test_attribute_authorization.py b/tests/satosa/micro_services/test_attribute_authorization.py index 15b1458ff..10de7d0f7 100644 --- a/tests/satosa/micro_services/test_attribute_authorization.py +++ b/tests/satosa/micro_services/test_attribute_authorization.py @@ -44,6 +44,20 @@ def test_authz_allow_fail(self): ctx.state = dict() authz_service.process(ctx, resp) + def test_authz_allow_missing(self): + attribute_allow = { + "": { "default": {"a0": ['foo1','foo2']} } + } + attribute_deny = {} + authz_service = self.create_authz_service(attribute_allow, attribute_deny) + resp = InternalData(auth_info=AuthenticationInformation()) + resp.attributes = { + } + with pytest.raises(SATOSAAuthenticationError): + ctx = Context() + ctx.state = dict() + authz_service.process(ctx, resp) + def test_authz_allow_second(self): attribute_allow = { "": { "default": {"a0": ['foo1','foo2']} } From 9d6c1bec573edad613f403f2e84632ba4b4c0d58 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 16 Nov 2022 16:31:47 +0200 Subject: [PATCH 216/288] Make attribute presence enforcement configurable Signed-off-by: Ivan Kanakarakis --- .../micro_services/attribute_authorization.py | 94 ++++++++++++------- .../test_attribute_authorization.py | 22 ++++- 2 files changed, 77 insertions(+), 39 deletions(-) diff --git a/src/satosa/micro_services/attribute_authorization.py b/src/satosa/micro_services/attribute_authorization.py index 9aca606a7..60f4afe4b 100644 --- a/src/satosa/micro_services/attribute_authorization.py +++ b/src/satosa/micro_services/attribute_authorization.py @@ -5,62 +5,86 @@ from ..util import get_dict_defaults class AttributeAuthorization(ResponseMicroService): - """ -A microservice that performs simple regexp-based authorization based on response -attributes. The configuration assumes a dict with two keys: attributes_allow -and attributes_deny. An examples speaks volumes: + A microservice that performs simple regexp-based authorization based on response + attributes. There are two configuration options to match attribute values in order + to allow or deny authorization. + + The configuration is wrapped in two nested dicts that specialize the options per + requester (SP/RP) and issuer (IdP/OP). + + There are also two options to enforce presence of the attributes that are going to + be checked. + + Example configuration: -```yaml -config: - attribute_allow: - target_provider1: + ```yaml + config: + force_attributes_presence_on_allow: true + attribute_allow: + target_provider1: requester1: - attr1: - - "^foo:bar$" - - "^kaka$" + attr1: + - "^foo:bar$" + - "^kaka$" default: - attr1: - - "plupp@.+$" - "": + attr1: + - "plupp@.+$" + "": "": - attr2: - - "^knytte:.*$" - attribute_deny: - default: - default: - eppn: - - "^[^@]+$" + attr2: + - "^knytte:.*$" -``` + force_attributes_presence_on_deny: false + attribute_deny: + default: + default: + eppn: + - "^[^@]+$" + ``` -The use of "" and 'default' is synonymous. Attribute rules are not overloaded -or inherited. For instance a response from "provider2" would only be allowed -through if the eppn attribute had all values containing an '@' (something -perhaps best implemented via an allow rule in practice). Responses from -target_provider1 bound for requester1 would be allowed through only if attr1 -contained foo:bar or kaka. Note that attribute filters (the leaves of the -structure above) are ORed together - i.e any attribute match is sufficient. + The use of "" and "default" is synonymous. Attribute rules are not overloaded + or inherited. For instance a response from "provider2" would only be allowed + through if the eppn attribute had all values containing an '@' (something + perhaps best implemented via an allow rule in practice). Responses from + target_provider1 bound for requester1 would be allowed through only if attr1 + contained foo:bar or kaka. Note that attribute filters (the leaves of the + structure above) are ORed together - i.e any attribute match is sufficient. """ def __init__(self, config, *args, **kwargs): super().__init__(*args, **kwargs) self.attribute_allow = config.get("attribute_allow", {}) self.attribute_deny = config.get("attribute_deny", {}) + self.force_attributes_presence_on_allow = config.get("force_attributes_presence_on_allow", False) + self.force_attributes_presence_on_deny = config.get("force_attributes_presence_on_deny", False) def _check_authz(self, context, attributes, requester, provider): for attribute_name, attribute_filters in get_dict_defaults(self.attribute_allow, requester, provider).items(): - if attribute_name in attributes: - if not any([any(filter(re.compile(af).search, attributes[attribute_name])) for af in attribute_filters]): + attr_values = attributes.get(attribute_name) + if attr_values is not None: + if not any( + [ + any(filter(lambda x: re.search(af, x), attr_values)) + for af in attribute_filters + ] + ): raise SATOSAAuthenticationError(context.state, "Permission denied") - else: + elif self.force_attributes_presence_on_allow: raise SATOSAAuthenticationError(context.state, "Permission denied") - for attribute_name, attribute_filters in get_dict_defaults(self.attribute_deny, requester, provider).items(): - if attribute_name in attributes: - if any([any(filter(re.compile(af).search, attributes[attribute_name])) for af in attribute_filters]): + attr_values = attributes.get(attribute_name) + if attr_values is not None: + if any( + [ + any(filter(lambda x: re.search(af, x), attributes[attribute_name])) + for af in attribute_filters + ] + ): raise SATOSAAuthenticationError(context.state, "Permission denied") + elif self.force_attributes_presence_on_deny: + raise SATOSAAuthenticationError(context.state, "Permission denied") def process(self, context, data): self._check_authz(context, data.attributes, data.requester, data.auth_info.issuer) diff --git a/tests/satosa/micro_services/test_attribute_authorization.py b/tests/satosa/micro_services/test_attribute_authorization.py index 10de7d0f7..6fb277d15 100644 --- a/tests/satosa/micro_services/test_attribute_authorization.py +++ b/tests/satosa/micro_services/test_attribute_authorization.py @@ -6,9 +6,23 @@ from satosa.context import Context class TestAttributeAuthorization: - def create_authz_service(self, attribute_allow, attribute_deny): - authz_service = AttributeAuthorization(config=dict(attribute_allow=attribute_allow,attribute_deny=attribute_deny), name="test_authz", - base_url="https://satosa.example.com") + def create_authz_service( + self, + attribute_allow, + attribute_deny, + force_attributes_presence_on_allow=False, + force_attributes_presence_on_deny=False, + ): + authz_service = AttributeAuthorization( + config=dict( + force_attributes_presence_on_allow=force_attributes_presence_on_allow, + force_attributes_presence_on_deny=force_attributes_presence_on_deny, + attribute_allow=attribute_allow, + attribute_deny=attribute_deny, + ), + name="test_authz", + base_url="https://satosa.example.com", + ) authz_service.next = lambda ctx, data: data return authz_service @@ -49,7 +63,7 @@ def test_authz_allow_missing(self): "": { "default": {"a0": ['foo1','foo2']} } } attribute_deny = {} - authz_service = self.create_authz_service(attribute_allow, attribute_deny) + authz_service = self.create_authz_service(attribute_allow, attribute_deny, force_attributes_presence_on_allow=True) resp = InternalData(auth_info=AuthenticationInformation()) resp.attributes = { } From cfd24b42092d4e2dcce576627385563444c87f52 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 17 Nov 2022 19:55:14 +0200 Subject: [PATCH 217/288] docker: remove local Dockerfile; see satosa-docker instead Repo: https://github.com/IdentityPython/satosa-docker Images: https://hub.docker.com/_/satosa/ Signed-off-by: Ivan Kanakarakis --- docker/attributemaps/__init__.py | 2 - docker/attributemaps/adfs_v1x.py | 18 -- docker/attributemaps/adfs_v20.py | 49 ---- docker/attributemaps/basic.py | 341 ------------------------- docker/attributemaps/saml_uri.py | 307 ---------------------- docker/attributemaps/shibboleth_uri.py | 197 -------------- docker/setup.sh | 10 - docker/start.sh | 66 ----- 8 files changed, 990 deletions(-) delete mode 100644 docker/attributemaps/__init__.py delete mode 100644 docker/attributemaps/adfs_v1x.py delete mode 100644 docker/attributemaps/adfs_v20.py delete mode 100644 docker/attributemaps/basic.py delete mode 100644 docker/attributemaps/saml_uri.py delete mode 100644 docker/attributemaps/shibboleth_uri.py delete mode 100755 docker/setup.sh delete mode 100755 docker/start.sh diff --git a/docker/attributemaps/__init__.py b/docker/attributemaps/__init__.py deleted file mode 100644 index d041d3f13..000000000 --- a/docker/attributemaps/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -__author__ = 'rohe0002' -__all__ = ["adfs_v1x", "adfs_v20", "basic", "saml_uri", "shibboleth_uri"] diff --git a/docker/attributemaps/adfs_v1x.py b/docker/attributemaps/adfs_v1x.py deleted file mode 100644 index 0f8d01a5d..000000000 --- a/docker/attributemaps/adfs_v1x.py +++ /dev/null @@ -1,18 +0,0 @@ -CLAIMS = 'http://schemas.xmlsoap.org/claims/' - - -MAP = { - "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified", - 'fro': { - CLAIMS+'commonname': 'commonName', - CLAIMS+'emailaddress': 'emailAddress', - CLAIMS+'group': 'group', - CLAIMS+'upn': 'upn', - }, - 'to': { - 'commonName': CLAIMS+'commonname', - 'emailAddress': CLAIMS+'emailaddress', - 'group': CLAIMS+'group', - 'upn': CLAIMS+'upn', - } -} diff --git a/docker/attributemaps/adfs_v20.py b/docker/attributemaps/adfs_v20.py deleted file mode 100644 index 94150d077..000000000 --- a/docker/attributemaps/adfs_v20.py +++ /dev/null @@ -1,49 +0,0 @@ -CLAIMS = 'http://schemas.xmlsoap.org/claims/' -COM_WS_CLAIMS = 'http://schemas.xmlsoap.com/ws/2005/05/identity/claims/' -MS_CLAIMS = 'http://schemas.microsoft.com/ws/2008/06/identity/claims/' -ORG_WS_CLAIMS = 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/' - - -MAP = { - "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified", - 'fro': { - CLAIMS+'commonname': 'commonName', - CLAIMS+'group': 'group', - COM_WS_CLAIMS+'denyonlysid': 'denyOnlySid', - MS_CLAIMS+'authenticationmethod': 'authenticationMethod', - MS_CLAIMS+'denyonlyprimarygroupsid': 'denyOnlyPrimaryGroupSid', - MS_CLAIMS+'denyonlyprimarysid': 'denyOnlyPrimarySid', - MS_CLAIMS+'groupsid': 'groupSid', - MS_CLAIMS+'primarygroupsid': 'primaryGroupSid', - MS_CLAIMS+'primarysid': 'primarySid', - MS_CLAIMS+'role': 'role', - MS_CLAIMS+'windowsaccountname': 'windowsAccountName', - ORG_WS_CLAIMS+'emailaddress': 'emailAddress', - ORG_WS_CLAIMS+'givenname': 'givenName', - ORG_WS_CLAIMS+'name': 'name', - ORG_WS_CLAIMS+'nameidentifier': 'nameId', - ORG_WS_CLAIMS+'privatepersonalidentifier': 'privatePersonalId', - ORG_WS_CLAIMS+'surname': 'surname', - ORG_WS_CLAIMS+'upn': 'upn', - }, - 'to': { - 'authenticationMethod': MS_CLAIMS+'authenticationmethod', - 'commonName': CLAIMS+'commonname', - 'denyOnlyPrimaryGroupSid': MS_CLAIMS+'denyonlyprimarygroupsid', - 'denyOnlyPrimarySid': MS_CLAIMS+'denyonlyprimarysid', - 'denyOnlySid': COM_WS_CLAIMS+'denyonlysid', - 'emailAddress': ORG_WS_CLAIMS+'emailaddress', - 'givenName': ORG_WS_CLAIMS+'givenname', - 'group': CLAIMS+'group', - 'groupSid': MS_CLAIMS+'groupsid', - 'name': ORG_WS_CLAIMS+'name', - 'nameId': ORG_WS_CLAIMS+'nameidentifier', - 'primaryGroupSid': MS_CLAIMS+'primarygroupsid', - 'primarySid': MS_CLAIMS+'primarysid', - 'privatePersonalId': ORG_WS_CLAIMS+'privatepersonalidentifier', - 'role': MS_CLAIMS+'role', - 'surname': ORG_WS_CLAIMS+'surname', - 'upn': ORG_WS_CLAIMS+'upn', - 'windowsAccountName': MS_CLAIMS+'windowsaccountname', - } -} diff --git a/docker/attributemaps/basic.py b/docker/attributemaps/basic.py deleted file mode 100644 index 9d84b8236..000000000 --- a/docker/attributemaps/basic.py +++ /dev/null @@ -1,341 +0,0 @@ -DEF = 'urn:mace:dir:attribute-def:' - - -MAP = { - "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic", - 'fro': { - DEF+'aRecord': 'aRecord', - DEF+'aliasedEntryName': 'aliasedEntryName', - DEF+'aliasedObjectName': 'aliasedObjectName', - DEF+'associatedDomain': 'associatedDomain', - DEF+'associatedName': 'associatedName', - DEF+'audio': 'audio', - DEF+'authorityRevocationList': 'authorityRevocationList', - DEF+'buildingName': 'buildingName', - DEF+'businessCategory': 'businessCategory', - DEF+'c': 'c', - DEF+'cACertificate': 'cACertificate', - DEF+'cNAMERecord': 'cNAMERecord', - DEF+'carLicense': 'carLicense', - DEF+'certificateRevocationList': 'certificateRevocationList', - DEF+'cn': 'cn', - DEF+'co': 'co', - DEF+'commonName': 'commonName', - DEF+'countryName': 'countryName', - DEF+'crossCertificatePair': 'crossCertificatePair', - DEF+'dITRedirect': 'dITRedirect', - DEF+'dSAQuality': 'dSAQuality', - DEF+'dc': 'dc', - DEF+'deltaRevocationList': 'deltaRevocationList', - DEF+'departmentNumber': 'departmentNumber', - DEF+'description': 'description', - DEF+'destinationIndicator': 'destinationIndicator', - DEF+'displayName': 'displayName', - DEF+'distinguishedName': 'distinguishedName', - DEF+'dmdName': 'dmdName', - DEF+'dnQualifier': 'dnQualifier', - DEF+'documentAuthor': 'documentAuthor', - DEF+'documentIdentifier': 'documentIdentifier', - DEF+'documentLocation': 'documentLocation', - DEF+'documentPublisher': 'documentPublisher', - DEF+'documentTitle': 'documentTitle', - DEF+'documentVersion': 'documentVersion', - DEF+'domainComponent': 'domainComponent', - DEF+'drink': 'drink', - DEF+'eduOrgHomePageURI': 'eduOrgHomePageURI', - DEF+'eduOrgIdentityAuthNPolicyURI': 'eduOrgIdentityAuthNPolicyURI', - DEF+'eduOrgLegalName': 'eduOrgLegalName', - DEF+'eduOrgSuperiorURI': 'eduOrgSuperiorURI', - DEF+'eduOrgWhitePagesURI': 'eduOrgWhitePagesURI', - DEF+'eduCourseOffering': 'eduCourseOffering', - DEF+'eduCourseMember': 'eduCourseMember', - DEF+'eduPersonAffiliation': 'eduPersonAffiliation', - DEF+'eduPersonEntitlement': 'eduPersonEntitlement', - DEF+'eduPersonNickname': 'eduPersonNickname', - DEF+'eduPersonOrgDN': 'eduPersonOrgDN', - DEF+'eduPersonOrgUnitDN': 'eduPersonOrgUnitDN', - DEF+'eduPersonPrimaryAffiliation': 'eduPersonPrimaryAffiliation', - DEF+'eduPersonPrimaryOrgUnitDN': 'eduPersonPrimaryOrgUnitDN', - DEF+'eduPersonPrincipalName': 'eduPersonPrincipalName', - DEF+'eduPersonPrincipalNamePrior': 'eduPersonPrincipalNamePrior', - DEF+'eduPersonScopedAffiliation': 'eduPersonScopedAffiliation', - DEF+'eduPersonTargetedID': 'eduPersonTargetedID', - DEF+'eduPersonAssurance': 'eduPersonAssurance', - DEF+'eduPersonUniqueId': 'eduPersonUniqueId', - DEF+'eduPersonOrcid': 'eduPersonOrcid', - DEF+'email': 'email', - DEF+'emailAddress': 'emailAddress', - DEF+'employeeNumber': 'employeeNumber', - DEF+'employeeType': 'employeeType', - DEF+'enhancedSearchGuide': 'enhancedSearchGuide', - DEF+'facsimileTelephoneNumber': 'facsimileTelephoneNumber', - DEF+'favouriteDrink': 'favouriteDrink', - DEF+'fax': 'fax', - DEF+'federationFeideSchemaVersion': 'federationFeideSchemaVersion', - DEF+'friendlyCountryName': 'friendlyCountryName', - DEF+'generationQualifier': 'generationQualifier', - DEF+'givenName': 'givenName', - DEF+'gn': 'gn', - DEF+'homePhone': 'homePhone', - DEF+'homePostalAddress': 'homePostalAddress', - DEF+'homeTelephoneNumber': 'homeTelephoneNumber', - DEF+'host': 'host', - DEF+'houseIdentifier': 'houseIdentifier', - DEF+'info': 'info', - DEF+'initials': 'initials', - DEF+'internationaliSDNNumber': 'internationaliSDNNumber', - DEF+'isMemberOf': 'isMemberOf', - DEF+'janetMailbox': 'janetMailbox', - DEF+'jpegPhoto': 'jpegPhoto', - DEF+'knowledgeInformation': 'knowledgeInformation', - DEF+'l': 'l', - DEF+'labeledURI': 'labeledURI', - DEF+'localityName': 'localityName', - DEF+'mDRecord': 'mDRecord', - DEF+'mXRecord': 'mXRecord', - DEF+'mail': 'mail', - DEF+'mailPreferenceOption': 'mailPreferenceOption', - DEF+'manager': 'manager', - DEF+'member': 'member', - DEF+'mobile': 'mobile', - DEF+'mobileTelephoneNumber': 'mobileTelephoneNumber', - DEF+'nSRecord': 'nSRecord', - DEF+'name': 'name', - DEF+'norEduOrgAcronym': 'norEduOrgAcronym', - DEF+'norEduOrgNIN': 'norEduOrgNIN', - DEF+'norEduOrgSchemaVersion': 'norEduOrgSchemaVersion', - DEF+'norEduOrgUniqueIdentifier': 'norEduOrgUniqueIdentifier', - DEF+'norEduOrgUniqueNumber': 'norEduOrgUniqueNumber', - DEF+'norEduOrgUnitUniqueIdentifier': 'norEduOrgUnitUniqueIdentifier', - DEF+'norEduOrgUnitUniqueNumber': 'norEduOrgUnitUniqueNumber', - DEF+'norEduPersonBirthDate': 'norEduPersonBirthDate', - DEF+'norEduPersonLIN': 'norEduPersonLIN', - DEF+'norEduPersonNIN': 'norEduPersonNIN', - DEF+'o': 'o', - DEF+'objectClass': 'objectClass', - DEF+'organizationName': 'organizationName', - DEF+'organizationalStatus': 'organizationalStatus', - DEF+'organizationalUnitName': 'organizationalUnitName', - DEF+'otherMailbox': 'otherMailbox', - DEF+'ou': 'ou', - DEF+'owner': 'owner', - DEF+'pager': 'pager', - DEF+'pagerTelephoneNumber': 'pagerTelephoneNumber', - DEF+'personalSignature': 'personalSignature', - DEF+'personalTitle': 'personalTitle', - DEF+'photo': 'photo', - DEF+'physicalDeliveryOfficeName': 'physicalDeliveryOfficeName', - DEF+'pkcs9email': 'pkcs9email', - DEF+'postOfficeBox': 'postOfficeBox', - DEF+'postalAddress': 'postalAddress', - DEF+'postalCode': 'postalCode', - DEF+'preferredDeliveryMethod': 'preferredDeliveryMethod', - DEF+'preferredLanguage': 'preferredLanguage', - DEF+'presentationAddress': 'presentationAddress', - DEF+'protocolInformation': 'protocolInformation', - DEF+'pseudonym': 'pseudonym', - DEF+'registeredAddress': 'registeredAddress', - DEF+'rfc822Mailbox': 'rfc822Mailbox', - DEF+'roleOccupant': 'roleOccupant', - DEF+'roomNumber': 'roomNumber', - DEF+'sOARecord': 'sOARecord', - DEF+'searchGuide': 'searchGuide', - DEF+'secretary': 'secretary', - DEF+'seeAlso': 'seeAlso', - DEF+'serialNumber': 'serialNumber', - DEF+'singleLevelQuality': 'singleLevelQuality', - DEF+'sn': 'sn', - DEF+'st': 'st', - DEF+'stateOrProvinceName': 'stateOrProvinceName', - DEF+'street': 'street', - DEF+'streetAddress': 'streetAddress', - DEF+'subtreeMaximumQuality': 'subtreeMaximumQuality', - DEF+'subtreeMinimumQuality': 'subtreeMinimumQuality', - DEF+'supportedAlgorithms': 'supportedAlgorithms', - DEF+'supportedApplicationContext': 'supportedApplicationContext', - DEF+'surname': 'surname', - DEF+'telephoneNumber': 'telephoneNumber', - DEF+'teletexTerminalIdentifier': 'teletexTerminalIdentifier', - DEF+'telexNumber': 'telexNumber', - DEF+'textEncodedORAddress': 'textEncodedORAddress', - DEF+'title': 'title', - DEF+'uid': 'uid', - DEF+'uniqueIdentifier': 'uniqueIdentifier', - DEF+'uniqueMember': 'uniqueMember', - DEF+'userCertificate': 'userCertificate', - DEF+'userClass': 'userClass', - DEF+'userPKCS12': 'userPKCS12', - DEF+'userPassword': 'userPassword', - DEF+'userSMIMECertificate': 'userSMIMECertificate', - DEF+'userid': 'userid', - DEF+'x121Address': 'x121Address', - DEF+'x500UniqueIdentifier': 'x500UniqueIdentifier', - }, - 'to': { - 'aRecord': DEF+'aRecord', - 'aliasedEntryName': DEF+'aliasedEntryName', - 'aliasedObjectName': DEF+'aliasedObjectName', - 'associatedDomain': DEF+'associatedDomain', - 'associatedName': DEF+'associatedName', - 'audio': DEF+'audio', - 'authorityRevocationList': DEF+'authorityRevocationList', - 'buildingName': DEF+'buildingName', - 'businessCategory': DEF+'businessCategory', - 'c': DEF+'c', - 'cACertificate': DEF+'cACertificate', - 'cNAMERecord': DEF+'cNAMERecord', - 'carLicense': DEF+'carLicense', - 'certificateRevocationList': DEF+'certificateRevocationList', - 'cn': DEF+'cn', - 'co': DEF+'co', - 'commonName': DEF+'commonName', - 'countryName': DEF+'countryName', - 'crossCertificatePair': DEF+'crossCertificatePair', - 'dITRedirect': DEF+'dITRedirect', - 'dSAQuality': DEF+'dSAQuality', - 'dc': DEF+'dc', - 'deltaRevocationList': DEF+'deltaRevocationList', - 'departmentNumber': DEF+'departmentNumber', - 'description': DEF+'description', - 'destinationIndicator': DEF+'destinationIndicator', - 'displayName': DEF+'displayName', - 'distinguishedName': DEF+'distinguishedName', - 'dmdName': DEF+'dmdName', - 'dnQualifier': DEF+'dnQualifier', - 'documentAuthor': DEF+'documentAuthor', - 'documentIdentifier': DEF+'documentIdentifier', - 'documentLocation': DEF+'documentLocation', - 'documentPublisher': DEF+'documentPublisher', - 'documentTitle': DEF+'documentTitle', - 'documentVersion': DEF+'documentVersion', - 'domainComponent': DEF+'domainComponent', - 'drink': DEF+'drink', - 'eduOrgHomePageURI': DEF+'eduOrgHomePageURI', - 'eduOrgIdentityAuthNPolicyURI': DEF+'eduOrgIdentityAuthNPolicyURI', - 'eduOrgLegalName': DEF+'eduOrgLegalName', - 'eduOrgSuperiorURI': DEF+'eduOrgSuperiorURI', - 'eduOrgWhitePagesURI': DEF+'eduOrgWhitePagesURI', - 'eduCourseMember': DEF+'eduCourseMember', - 'eduCourseOffering': DEF+'eduCourseOffering', - 'eduPersonAffiliation': DEF+'eduPersonAffiliation', - 'eduPersonEntitlement': DEF+'eduPersonEntitlement', - 'eduPersonNickname': DEF+'eduPersonNickname', - 'eduPersonOrgDN': DEF+'eduPersonOrgDN', - 'eduPersonOrgUnitDN': DEF+'eduPersonOrgUnitDN', - 'eduPersonPrimaryAffiliation': DEF+'eduPersonPrimaryAffiliation', - 'eduPersonPrimaryOrgUnitDN': DEF+'eduPersonPrimaryOrgUnitDN', - 'eduPersonPrincipalName': DEF+'eduPersonPrincipalName', - 'eduPersonPrincipalNamePrior': DEF+'eduPersonPrincipalNamePrior', - 'eduPersonScopedAffiliation': DEF+'eduPersonScopedAffiliation', - 'eduPersonTargetedID': DEF+'eduPersonTargetedID', - 'eduPersonAssurance': DEF+'eduPersonAssurance', - 'eduPersonUniqueId': DEF+'eduPersonUniqueId', - 'eduPersonOrcid': DEF+'eduPersonOrcid', - 'email': DEF+'email', - 'emailAddress': DEF+'emailAddress', - 'employeeNumber': DEF+'employeeNumber', - 'employeeType': DEF+'employeeType', - 'enhancedSearchGuide': DEF+'enhancedSearchGuide', - 'facsimileTelephoneNumber': DEF+'facsimileTelephoneNumber', - 'favouriteDrink': DEF+'favouriteDrink', - 'fax': DEF+'fax', - 'federationFeideSchemaVersion': DEF+'federationFeideSchemaVersion', - 'friendlyCountryName': DEF+'friendlyCountryName', - 'generationQualifier': DEF+'generationQualifier', - 'givenName': DEF+'givenName', - 'gn': DEF+'gn', - 'homePhone': DEF+'homePhone', - 'homePostalAddress': DEF+'homePostalAddress', - 'homeTelephoneNumber': DEF+'homeTelephoneNumber', - 'host': DEF+'host', - 'houseIdentifier': DEF+'houseIdentifier', - 'info': DEF+'info', - 'initials': DEF+'initials', - 'internationaliSDNNumber': DEF+'internationaliSDNNumber', - 'janetMailbox': DEF+'janetMailbox', - 'jpegPhoto': DEF+'jpegPhoto', - 'knowledgeInformation': DEF+'knowledgeInformation', - 'l': DEF+'l', - 'labeledURI': DEF+'labeledURI', - 'localityName': DEF+'localityName', - 'mDRecord': DEF+'mDRecord', - 'mXRecord': DEF+'mXRecord', - 'mail': DEF+'mail', - 'mailPreferenceOption': DEF+'mailPreferenceOption', - 'manager': DEF+'manager', - 'member': DEF+'member', - 'mobile': DEF+'mobile', - 'mobileTelephoneNumber': DEF+'mobileTelephoneNumber', - 'nSRecord': DEF+'nSRecord', - 'name': DEF+'name', - 'norEduOrgAcronym': DEF+'norEduOrgAcronym', - 'norEduOrgNIN': DEF+'norEduOrgNIN', - 'norEduOrgSchemaVersion': DEF+'norEduOrgSchemaVersion', - 'norEduOrgUniqueIdentifier': DEF+'norEduOrgUniqueIdentifier', - 'norEduOrgUniqueNumber': DEF+'norEduOrgUniqueNumber', - 'norEduOrgUnitUniqueIdentifier': DEF+'norEduOrgUnitUniqueIdentifier', - 'norEduOrgUnitUniqueNumber': DEF+'norEduOrgUnitUniqueNumber', - 'norEduPersonBirthDate': DEF+'norEduPersonBirthDate', - 'norEduPersonLIN': DEF+'norEduPersonLIN', - 'norEduPersonNIN': DEF+'norEduPersonNIN', - 'o': DEF+'o', - 'objectClass': DEF+'objectClass', - 'organizationName': DEF+'organizationName', - 'organizationalStatus': DEF+'organizationalStatus', - 'organizationalUnitName': DEF+'organizationalUnitName', - 'otherMailbox': DEF+'otherMailbox', - 'ou': DEF+'ou', - 'owner': DEF+'owner', - 'pager': DEF+'pager', - 'pagerTelephoneNumber': DEF+'pagerTelephoneNumber', - 'personalSignature': DEF+'personalSignature', - 'personalTitle': DEF+'personalTitle', - 'photo': DEF+'photo', - 'physicalDeliveryOfficeName': DEF+'physicalDeliveryOfficeName', - 'pkcs9email': DEF+'pkcs9email', - 'postOfficeBox': DEF+'postOfficeBox', - 'postalAddress': DEF+'postalAddress', - 'postalCode': DEF+'postalCode', - 'preferredDeliveryMethod': DEF+'preferredDeliveryMethod', - 'preferredLanguage': DEF+'preferredLanguage', - 'presentationAddress': DEF+'presentationAddress', - 'protocolInformation': DEF+'protocolInformation', - 'pseudonym': DEF+'pseudonym', - 'registeredAddress': DEF+'registeredAddress', - 'rfc822Mailbox': DEF+'rfc822Mailbox', - 'roleOccupant': DEF+'roleOccupant', - 'roomNumber': DEF+'roomNumber', - 'sOARecord': DEF+'sOARecord', - 'searchGuide': DEF+'searchGuide', - 'secretary': DEF+'secretary', - 'seeAlso': DEF+'seeAlso', - 'serialNumber': DEF+'serialNumber', - 'singleLevelQuality': DEF+'singleLevelQuality', - 'sn': DEF+'sn', - 'st': DEF+'st', - 'stateOrProvinceName': DEF+'stateOrProvinceName', - 'street': DEF+'street', - 'streetAddress': DEF+'streetAddress', - 'subtreeMaximumQuality': DEF+'subtreeMaximumQuality', - 'subtreeMinimumQuality': DEF+'subtreeMinimumQuality', - 'supportedAlgorithms': DEF+'supportedAlgorithms', - 'supportedApplicationContext': DEF+'supportedApplicationContext', - 'surname': DEF+'surname', - 'telephoneNumber': DEF+'telephoneNumber', - 'teletexTerminalIdentifier': DEF+'teletexTerminalIdentifier', - 'telexNumber': DEF+'telexNumber', - 'textEncodedORAddress': DEF+'textEncodedORAddress', - 'title': DEF+'title', - 'uid': DEF+'uid', - 'uniqueIdentifier': DEF+'uniqueIdentifier', - 'uniqueMember': DEF+'uniqueMember', - 'userCertificate': DEF+'userCertificate', - 'userClass': DEF+'userClass', - 'userPKCS12': DEF+'userPKCS12', - 'userPassword': DEF+'userPassword', - 'userSMIMECertificate': DEF+'userSMIMECertificate', - 'userid': DEF+'userid', - 'x121Address': DEF+'x121Address', - 'x500UniqueIdentifier': DEF+'x500UniqueIdentifier', - } -} diff --git a/docker/attributemaps/saml_uri.py b/docker/attributemaps/saml_uri.py deleted file mode 100644 index ca6dfd840..000000000 --- a/docker/attributemaps/saml_uri.py +++ /dev/null @@ -1,307 +0,0 @@ -EDUCOURSE_OID = 'urn:oid:1.3.6.1.4.1.5923.1.6.1.' -EDUPERSON_OID = 'urn:oid:1.3.6.1.4.1.5923.1.1.1.' -EDUMEMBER1_OID = 'urn:oid:1.3.6.1.4.1.5923.1.5.1.' -LDAPGVAT_OID = 'urn:oid:1.2.40.0.10.2.1.1.' # ldap.gv.at definitions as specified in http://www.ref.gv.at/AG-IZ-PVP2-Version-2-1-0-2.2754.0.html -UCL_DIR_PILOT = 'urn:oid:0.9.2342.19200300.100.1.' -X500ATTR_OID = 'urn:oid:2.5.4.' -LDAPGVAT_UCL_DIR_PILOT = UCL_DIR_PILOT -LDAPGVAT_X500ATTR_OID = X500ATTR_OID -NETSCAPE_LDAP = 'urn:oid:2.16.840.1.113730.3.1.' -NOREDUPERSON_OID = 'urn:oid:1.3.6.1.4.1.2428.90.1.' -PKCS_9 = 'urn:oid:1.2.840.113549.1.9.1.' -SCHAC = 'urn:oid:1.3.6.1.4.1.25178.1.2.' -SIS = 'urn:oid:1.2.752.194.10.2.' -UMICH = 'urn:oid:1.3.6.1.4.1.250.1.57.' -OPENOSI_OID = 'urn:oid:1.3.6.1.4.1.27630.2.1.1.' #openosi-0.82.schema http://www.openosi.org/osi/display/ldap/Home - -MAP = { - 'identifier': 'urn:oasis:names:tc:SAML:2.0:attrname-format:uri', - 'fro': { - EDUCOURSE_OID+'1': 'eduCourseOffering', - EDUCOURSE_OID+'2': 'eduCourseMember', - EDUMEMBER1_OID+'1': 'isMemberOf', - EDUPERSON_OID+'1': 'eduPersonAffiliation', - EDUPERSON_OID+'2': 'eduPersonNickname', - EDUPERSON_OID+'3': 'eduPersonOrgDN', - EDUPERSON_OID+'4': 'eduPersonOrgUnitDN', - EDUPERSON_OID+'5': 'eduPersonPrimaryAffiliation', - EDUPERSON_OID+'6': 'eduPersonPrincipalName', - EDUPERSON_OID+'7': 'eduPersonEntitlement', - EDUPERSON_OID+'8': 'eduPersonPrimaryOrgUnitDN', - EDUPERSON_OID+'9': 'eduPersonScopedAffiliation', - EDUPERSON_OID+'10': 'eduPersonTargetedID', - EDUPERSON_OID+'11': 'eduPersonAssurance', - EDUPERSON_OID+'12': 'eduPersonPrincipalNamePrior', - EDUPERSON_OID+'13': 'eduPersonUniqueId', - EDUPERSON_OID+'16': 'eduPersonOrcid', - LDAPGVAT_OID+'1': 'PVP-GID', - LDAPGVAT_OID+'149': 'PVP-BPK', - LDAPGVAT_OID+'153': 'PVP-OU-OKZ', - LDAPGVAT_OID+'261.10': 'PVP-VERSION', - LDAPGVAT_OID+'261.20': 'PVP-PRINCIPAL-NAME', - LDAPGVAT_OID+'261.24': 'PVP-PARTICIPANT-OKZ', - LDAPGVAT_OID+'261.30': 'PVP-ROLES', - LDAPGVAT_OID+'261.40': 'PVP-INVOICE-RECPT-ID', - LDAPGVAT_OID+'261.50': 'PVP-COST-CENTER-ID', - LDAPGVAT_OID+'261.60': 'PVP-CHARGE-CODE', - LDAPGVAT_OID+'3': 'PVP-OU-GV-OU-ID', - LDAPGVAT_OID+'33': 'PVP-FUNCTION', - LDAPGVAT_OID+'55': 'PVP-BIRTHDATE', - LDAPGVAT_OID+'71': 'PVP-PARTICIPANT-ID', - LDAPGVAT_UCL_DIR_PILOT+'1': 'PVP-USERID', - LDAPGVAT_UCL_DIR_PILOT+'3': 'PVP-MAIL', - LDAPGVAT_X500ATTR_OID+'11': 'PVP-OU', - LDAPGVAT_X500ATTR_OID+'20': 'PVP-TEL', - LDAPGVAT_X500ATTR_OID+'42': 'PVP-GIVENNAME', - NETSCAPE_LDAP+'1': 'carLicense', - NETSCAPE_LDAP+'2': 'departmentNumber', - NETSCAPE_LDAP+'3': 'employeeNumber', - NETSCAPE_LDAP+'4': 'employeeType', - NETSCAPE_LDAP+'39': 'preferredLanguage', - NETSCAPE_LDAP+'40': 'userSMIMECertificate', - NETSCAPE_LDAP+'216': 'userPKCS12', - NETSCAPE_LDAP+'241': 'displayName', - NOREDUPERSON_OID+'1': 'norEduOrgUniqueNumber', - NOREDUPERSON_OID+'2': 'norEduOrgUnitUniqueNumber', - NOREDUPERSON_OID+'3': 'norEduPersonBirthDate', - NOREDUPERSON_OID+'4': 'norEduPersonLIN', - NOREDUPERSON_OID+'5': 'norEduPersonNIN', - NOREDUPERSON_OID+'6': 'norEduOrgAcronym', - NOREDUPERSON_OID+'7': 'norEduOrgUniqueIdentifier', - NOREDUPERSON_OID+'8': 'norEduOrgUnitUniqueIdentifier', - NOREDUPERSON_OID+'9': 'federationFeideSchemaVersion', - NOREDUPERSON_OID+'10': 'norEduPersonLegalName', - NOREDUPERSON_OID+'11': 'norEduOrgSchemaVersion', - NOREDUPERSON_OID+'12': 'norEduOrgNIN', - OPENOSI_OID+'17': 'osiHomeUrl', - OPENOSI_OID+'19': 'osiPreferredTZ', - OPENOSI_OID+'72': 'osiICardTimeLastUpdated', - OPENOSI_OID+'104': 'osiMiddleName', - OPENOSI_OID+'107': 'osiOtherEmail', - OPENOSI_OID+'109': 'osiOtherHomePhone', - OPENOSI_OID+'120': 'osiWorkURL', - PKCS_9+'1': 'email', - SCHAC+'1': 'schacMotherTongue', - SCHAC+'2': 'schacGender', - SCHAC+'3': 'schacDateOfBirth', - SCHAC+'4': 'schacPlaceOfBirth', - SCHAC+'5': 'schacCountryOfCitizenship', - SCHAC+'6': 'schacSn1', - SCHAC+'7': 'schacSn2', - SCHAC+'8': 'schacPersonalTitle', - SCHAC+'9': 'schacHomeOrganization', - SCHAC+'10': 'schacHomeOrganizationType', - SCHAC+'11': 'schacCountryOfResidence', - SCHAC+'12': 'schacUserPresenceID', - SCHAC+'13': 'schacPersonalPosition', - SCHAC+'14': 'schacPersonalUniqueCode', - SCHAC+'15': 'schacPersonalUniqueID', - SCHAC+'17': 'schacExpiryDate', - SCHAC+'18': 'schacUserPrivateAttribute', - SCHAC+'19': 'schacUserStatus', - SCHAC+'20': 'schacProjectMembership', - SCHAC+'21': 'schacProjectSpecificRole', - SIS+'1': 'sisLegalGuardianFor', - SIS+'2': 'sisSchoolGrade', - UCL_DIR_PILOT+'1': 'uid', - UCL_DIR_PILOT+'3': 'mail', - UCL_DIR_PILOT+'25': 'dc', - UCL_DIR_PILOT+'37': 'associatedDomain', - UCL_DIR_PILOT+'43': 'co', - UCL_DIR_PILOT+'60': 'jpegPhoto', - UMICH+'57': 'labeledURI', - X500ATTR_OID+'2': 'knowledgeInformation', - X500ATTR_OID+'3': 'cn', - X500ATTR_OID+'4': 'sn', - X500ATTR_OID+'5': 'serialNumber', - X500ATTR_OID+'6': 'c', - X500ATTR_OID+'7': 'l', - X500ATTR_OID+'8': 'st', - X500ATTR_OID+'9': 'street', - X500ATTR_OID+'10': 'o', - X500ATTR_OID+'11': 'ou', - X500ATTR_OID+'12': 'title', - X500ATTR_OID+'14': 'searchGuide', - X500ATTR_OID+'15': 'businessCategory', - X500ATTR_OID+'16': 'postalAddress', - X500ATTR_OID+'17': 'postalCode', - X500ATTR_OID+'18': 'postOfficeBox', - X500ATTR_OID+'19': 'physicalDeliveryOfficeName', - X500ATTR_OID+'20': 'telephoneNumber', - X500ATTR_OID+'21': 'telexNumber', - X500ATTR_OID+'22': 'teletexTerminalIdentifier', - X500ATTR_OID+'23': 'facsimileTelephoneNumber', - X500ATTR_OID+'24': 'x121Address', - X500ATTR_OID+'25': 'internationaliSDNNumber', - X500ATTR_OID+'26': 'registeredAddress', - X500ATTR_OID+'27': 'destinationIndicator', - X500ATTR_OID+'28': 'preferredDeliveryMethod', - X500ATTR_OID+'29': 'presentationAddress', - X500ATTR_OID+'30': 'supportedApplicationContext', - X500ATTR_OID+'31': 'member', - X500ATTR_OID+'32': 'owner', - X500ATTR_OID+'33': 'roleOccupant', - X500ATTR_OID+'36': 'userCertificate', - X500ATTR_OID+'37': 'cACertificate', - X500ATTR_OID+'38': 'authorityRevocationList', - X500ATTR_OID+'39': 'certificateRevocationList', - X500ATTR_OID+'40': 'crossCertificatePair', - X500ATTR_OID+'42': 'givenName', - X500ATTR_OID+'43': 'initials', - X500ATTR_OID+'44': 'generationQualifier', - X500ATTR_OID+'45': 'x500UniqueIdentifier', - X500ATTR_OID+'46': 'dnQualifier', - X500ATTR_OID+'47': 'enhancedSearchGuide', - X500ATTR_OID+'48': 'protocolInformation', - X500ATTR_OID+'50': 'uniqueMember', - X500ATTR_OID+'51': 'houseIdentifier', - X500ATTR_OID+'52': 'supportedAlgorithms', - X500ATTR_OID+'53': 'deltaRevocationList', - X500ATTR_OID+'54': 'dmdName', - X500ATTR_OID+'65': 'pseudonym', - }, - 'to': { - 'associatedDomain': UCL_DIR_PILOT+'37', - 'authorityRevocationList': X500ATTR_OID+'38', - 'businessCategory': X500ATTR_OID+'15', - 'c': X500ATTR_OID+'6', - 'cACertificate': X500ATTR_OID+'37', - 'carLicense': NETSCAPE_LDAP+'1', - 'certificateRevocationList': X500ATTR_OID+'39', - 'cn': X500ATTR_OID+'3', - 'co': UCL_DIR_PILOT+'43', - 'crossCertificatePair': X500ATTR_OID+'40', - 'dc': UCL_DIR_PILOT+'25', - 'deltaRevocationList': X500ATTR_OID+'53', - 'departmentNumber': NETSCAPE_LDAP+'2', - 'destinationIndicator': X500ATTR_OID+'27', - 'displayName': NETSCAPE_LDAP+'241', - 'dmdName': X500ATTR_OID+'54', - 'dnQualifier': X500ATTR_OID+'46', - 'eduCourseMember': EDUCOURSE_OID+'2', - 'eduCourseOffering': EDUCOURSE_OID+'1', - 'eduPersonAffiliation': EDUPERSON_OID+'1', - 'eduPersonEntitlement': EDUPERSON_OID+'7', - 'eduPersonNickname': EDUPERSON_OID+'2', - 'eduPersonOrgDN': EDUPERSON_OID+'3', - 'eduPersonOrgUnitDN': EDUPERSON_OID+'4', - 'eduPersonPrimaryAffiliation': EDUPERSON_OID+'5', - 'eduPersonPrimaryOrgUnitDN': EDUPERSON_OID+'8', - 'eduPersonPrincipalName': EDUPERSON_OID+'6', - 'eduPersonPrincipalNamePrior': EDUPERSON_OID+'12', - 'eduPersonScopedAffiliation': EDUPERSON_OID+'9', - 'eduPersonTargetedID': EDUPERSON_OID+'10', - 'eduPersonAssurance': EDUPERSON_OID+'11', - 'eduPersonUniqueId': EDUPERSON_OID+'13', - 'eduPersonOrcid': EDUPERSON_OID+'16', - 'email': PKCS_9+'1', - 'employeeNumber': NETSCAPE_LDAP+'3', - 'employeeType': NETSCAPE_LDAP+'4', - 'enhancedSearchGuide': X500ATTR_OID+'47', - 'facsimileTelephoneNumber': X500ATTR_OID+'23', - 'federationFeideSchemaVersion': NOREDUPERSON_OID+'9', - 'generationQualifier': X500ATTR_OID+'44', - 'givenName': X500ATTR_OID+'42', - 'houseIdentifier': X500ATTR_OID+'51', - 'initials': X500ATTR_OID+'43', - 'internationaliSDNNumber': X500ATTR_OID+'25', - 'isMemberOf': EDUMEMBER1_OID+'1', - 'jpegPhoto': UCL_DIR_PILOT+'60', - 'knowledgeInformation': X500ATTR_OID+'2', - 'l': X500ATTR_OID+'7', - 'labeledURI': UMICH+'57', - 'mail': UCL_DIR_PILOT+'3', - 'member': X500ATTR_OID+'31', - 'norEduOrgAcronym': NOREDUPERSON_OID+'6', - 'norEduOrgNIN': NOREDUPERSON_OID+'12', - 'norEduOrgSchemaVersion': NOREDUPERSON_OID+'11', - 'norEduOrgUniqueIdentifier': NOREDUPERSON_OID+'7', - 'norEduOrgUniqueNumber': NOREDUPERSON_OID+'1', - 'norEduOrgUnitUniqueIdentifier': NOREDUPERSON_OID+'8', - 'norEduOrgUnitUniqueNumber': NOREDUPERSON_OID+'2', - 'norEduPersonBirthDate': NOREDUPERSON_OID+'3', - 'norEduPersonLIN': NOREDUPERSON_OID+'4', - 'norEduPersonLegalName': NOREDUPERSON_OID+'10', - 'norEduPersonNIN': NOREDUPERSON_OID+'5', - 'o': X500ATTR_OID+'10', - 'osiHomeUrl': OPENOSI_OID+'17', - 'osiPreferredTZ': OPENOSI_OID+'19', - 'osiICardTimeLastUpdated': OPENOSI_OID+'72', - 'osiMiddleName': OPENOSI_OID+'104', - 'osiOtherEmail': OPENOSI_OID+'107', - 'osiOtherHomePhone': OPENOSI_OID+'109', - 'osiWorkURL': OPENOSI_OID+'120', - 'ou': X500ATTR_OID+'11', - 'owner': X500ATTR_OID+'32', - 'physicalDeliveryOfficeName': X500ATTR_OID+'19', - 'postOfficeBox': X500ATTR_OID+'18', - 'postalAddress': X500ATTR_OID+'16', - 'postalCode': X500ATTR_OID+'17', - 'preferredDeliveryMethod': X500ATTR_OID+'28', - 'preferredLanguage': NETSCAPE_LDAP+'39', - 'presentationAddress': X500ATTR_OID+'29', - 'protocolInformation': X500ATTR_OID+'48', - 'pseudonym': X500ATTR_OID+'65', - 'PVP-USERID': LDAPGVAT_UCL_DIR_PILOT+'1', - 'PVP-MAIL': LDAPGVAT_UCL_DIR_PILOT+'3', - 'PVP-GID': LDAPGVAT_OID+'1', - 'PVP-BPK': LDAPGVAT_OID+'149', - 'PVP-OU-OKZ': LDAPGVAT_OID+'153', - 'PVP-VERSION': LDAPGVAT_OID+'261.10', - 'PVP-PRINCIPAL-NAME': LDAPGVAT_OID+'261.20', - 'PVP-PARTICIPANT-OKZ': LDAPGVAT_OID+'261.24', - 'PVP-ROLES': LDAPGVAT_OID+'261.30', - 'PVP-INVOICE-RECPT-ID': LDAPGVAT_OID+'261.40', - 'PVP-COST-CENTER-ID': LDAPGVAT_OID+'261.50', - 'PVP-CHARGE-CODE': LDAPGVAT_OID+'261.60', - 'PVP-OU-GV-OU-ID': LDAPGVAT_OID+'3', - 'PVP-FUNCTION': LDAPGVAT_OID+'33', - 'PVP-BIRTHDATE': LDAPGVAT_OID+'55', - 'PVP-PARTICIPANT-ID': LDAPGVAT_OID+'71', - 'PVP-OU': LDAPGVAT_X500ATTR_OID+'11', - 'PVP-TEL': LDAPGVAT_X500ATTR_OID+'20', - 'PVP-GIVENNAME': LDAPGVAT_X500ATTR_OID+'42', - 'registeredAddress': X500ATTR_OID+'26', - 'roleOccupant': X500ATTR_OID+'33', - 'schacCountryOfCitizenship': SCHAC+'5', - 'schacCountryOfResidence': SCHAC+'11', - 'schacDateOfBirth': SCHAC+'3', - 'schacExpiryDate': SCHAC+'17', - 'schacGender': SCHAC+'2', - 'schacHomeOrganization': SCHAC+'9', - 'schacHomeOrganizationType': SCHAC+'10', - 'schacMotherTongue': SCHAC+'1', - 'schacPersonalPosition': SCHAC+'13', - 'schacPersonalTitle': SCHAC+'8', - 'schacPersonalUniqueCode': SCHAC+'14', - 'schacPersonalUniqueID': SCHAC+'15', - 'schacPlaceOfBirth': SCHAC+'4', - 'schacProjectMembership': SCHAC+'20', - 'schacProjectSpecificRole': SCHAC+'21', - 'schacSn1': SCHAC+'6', - 'schacSn2': SCHAC+'7', - 'schacUserPresenceID': SCHAC+'12', - 'schacUserPrivateAttribute': SCHAC+'18', - 'schacUserStatus': SCHAC+'19', - 'searchGuide': X500ATTR_OID+'14', - 'serialNumber': X500ATTR_OID+'5', - 'sisLegalGuardianFor': SIS+'1', - 'sisSchoolGrade': SIS+'2', - 'sn': X500ATTR_OID+'4', - 'st': X500ATTR_OID+'8', - 'street': X500ATTR_OID+'9', - 'supportedAlgorithms': X500ATTR_OID+'52', - 'supportedApplicationContext': X500ATTR_OID+'30', - 'telephoneNumber': X500ATTR_OID+'20', - 'teletexTerminalIdentifier': X500ATTR_OID+'22', - 'telexNumber': X500ATTR_OID+'21', - 'title': X500ATTR_OID+'12', - 'uid': UCL_DIR_PILOT+'1', - 'uniqueMember': X500ATTR_OID+'50', - 'userCertificate': X500ATTR_OID+'36', - 'userPKCS12': NETSCAPE_LDAP+'216', - 'userSMIMECertificate': NETSCAPE_LDAP+'40', - 'x121Address': X500ATTR_OID+'24', - 'x500UniqueIdentifier': X500ATTR_OID+'45', - } -} diff --git a/docker/attributemaps/shibboleth_uri.py b/docker/attributemaps/shibboleth_uri.py deleted file mode 100644 index 54de47353..000000000 --- a/docker/attributemaps/shibboleth_uri.py +++ /dev/null @@ -1,197 +0,0 @@ -EDUPERSON_OID = 'urn:oid:1.3.6.1.4.1.5923.1.1.1.' -NETSCAPE_LDAP = 'urn:oid:2.16.840.1.113730.3.1.' -NOREDUPERSON_OID = 'urn:oid:1.3.6.1.4.1.2428.90.1.' -PKCS_9 = 'urn:oid:1.2.840.113549.1.9.' -UCL_DIR_PILOT = 'urn:oid:0.9.2342.19200300.100.1.' -UMICH = 'urn:oid:1.3.6.1.4.1.250.1.57.' -X500ATTR = 'urn:oid:2.5.4.' - - -MAP = { - "identifier": "urn:mace:shibboleth:1.0:attributeNamespace:uri", - 'fro': { - EDUPERSON_OID+'1': 'eduPersonAffiliation', - EDUPERSON_OID+'2': 'eduPersonNickname', - EDUPERSON_OID+'3': 'eduPersonOrgDN', - EDUPERSON_OID+'4': 'eduPersonOrgUnitDN', - EDUPERSON_OID+'5': 'eduPersonPrimaryAffiliation', - EDUPERSON_OID+'6': 'eduPersonPrincipalName', - EDUPERSON_OID+'7': 'eduPersonEntitlement', - EDUPERSON_OID+'8': 'eduPersonPrimaryOrgUnitDN', - EDUPERSON_OID+'9': 'eduPersonScopedAffiliation', - EDUPERSON_OID+'10': 'eduPersonTargetedID', - EDUPERSON_OID+'11': 'eduPersonAssurance', - EDUPERSON_OID+'12': 'eduPersonPrincipalNamePrior', - EDUPERSON_OID+'13': 'eduPersonUniqueId', - EDUPERSON_OID+'16': 'eduPersonOrcid', - NETSCAPE_LDAP+'1': 'carLicense', - NETSCAPE_LDAP+'2': 'departmentNumber', - NETSCAPE_LDAP+'3': 'employeeNumber', - NETSCAPE_LDAP+'4': 'employeeType', - NETSCAPE_LDAP+'39': 'preferredLanguage', - NETSCAPE_LDAP+'40': 'userSMIMECertificate', - NETSCAPE_LDAP+'216': 'userPKCS12', - NETSCAPE_LDAP+'241': 'displayName', - NOREDUPERSON_OID+'1': 'norEduOrgUniqueNumber', - NOREDUPERSON_OID+'2': 'norEduOrgUnitUniqueNumber', - NOREDUPERSON_OID+'3': 'norEduPersonBirthDate', - NOREDUPERSON_OID+'4': 'norEduPersonLIN', - NOREDUPERSON_OID+'5': 'norEduPersonNIN', - NOREDUPERSON_OID+'6': 'norEduOrgAcronym', - NOREDUPERSON_OID+'7': 'norEduOrgUniqueIdentifier', - NOREDUPERSON_OID+'8': 'norEduOrgUnitUniqueIdentifier', - NOREDUPERSON_OID+'9': 'federationFeideSchemaVersion', - PKCS_9+'1': 'email', - UCL_DIR_PILOT+'3': 'mail', - UCL_DIR_PILOT+'25': 'dc', - UCL_DIR_PILOT+'37': 'associatedDomain', - UCL_DIR_PILOT+'60': 'jpegPhoto', - X500ATTR+'2': 'knowledgeInformation', - X500ATTR+'4': 'sn', - X500ATTR+'5': 'serialNumber', - X500ATTR+'6': 'c', - X500ATTR+'7': 'l', - X500ATTR+'8': 'st', - X500ATTR+'9': 'street', - X500ATTR+'10': 'o', - X500ATTR+'11': 'ou', - X500ATTR+'12': 'title', - X500ATTR+'14': 'searchGuide', - X500ATTR+'15': 'businessCategory', - X500ATTR+'16': 'postalAddress', - X500ATTR+'17': 'postalCode', - X500ATTR+'18': 'postOfficeBox', - X500ATTR+'19': 'physicalDeliveryOfficeName', - X500ATTR+'20': 'telephoneNumber', - X500ATTR+'21': 'telexNumber', - X500ATTR+'22': 'teletexTerminalIdentifier', - X500ATTR+'23': 'facsimileTelephoneNumber', - X500ATTR+'24': 'x121Address', - X500ATTR+'25': 'internationaliSDNNumber', - X500ATTR+'26': 'registeredAddress', - X500ATTR+'27': 'destinationIndicator', - X500ATTR+'28': 'preferredDeliveryMethod', - X500ATTR+'29': 'presentationAddress', - X500ATTR+'30': 'supportedApplicationContext', - X500ATTR+'31': 'member', - X500ATTR+'32': 'owner', - X500ATTR+'33': 'roleOccupant', - X500ATTR+'36': 'userCertificate', - X500ATTR+'37': 'cACertificate', - X500ATTR+'38': 'authorityRevocationList', - X500ATTR+'39': 'certificateRevocationList', - X500ATTR+'40': 'crossCertificatePair', - X500ATTR+'42': 'givenName', - X500ATTR+'43': 'initials', - X500ATTR+'44': 'generationQualifier', - X500ATTR+'45': 'x500UniqueIdentifier', - X500ATTR+'46': 'dnQualifier', - X500ATTR+'47': 'enhancedSearchGuide', - X500ATTR+'48': 'protocolInformation', - X500ATTR+'50': 'uniqueMember', - X500ATTR+'51': 'houseIdentifier', - X500ATTR+'52': 'supportedAlgorithms', - X500ATTR+'53': 'deltaRevocationList', - X500ATTR+'54': 'dmdName', - X500ATTR+'65': 'pseudonym', - }, - 'to': { - 'associatedDomain': UCL_DIR_PILOT+'37', - 'authorityRevocationList': X500ATTR+'38', - 'businessCategory': X500ATTR+'15', - 'c': X500ATTR+'6', - 'cACertificate': X500ATTR+'37', - 'carLicense': NETSCAPE_LDAP+'1', - 'certificateRevocationList': X500ATTR+'39', - 'countryName': X500ATTR+'6', - 'crossCertificatePair': X500ATTR+'40', - 'dc': UCL_DIR_PILOT+'25', - 'deltaRevocationList': X500ATTR+'53', - 'departmentNumber': NETSCAPE_LDAP+'2', - 'destinationIndicator': X500ATTR+'27', - 'displayName': NETSCAPE_LDAP+'241', - 'dmdName': X500ATTR+'54', - 'dnQualifier': X500ATTR+'46', - 'domainComponent': UCL_DIR_PILOT+'25', - 'eduPersonAffiliation': EDUPERSON_OID+'1', - 'eduPersonEntitlement': EDUPERSON_OID+'7', - 'eduPersonNickname': EDUPERSON_OID+'2', - 'eduPersonOrgDN': EDUPERSON_OID+'3', - 'eduPersonOrgUnitDN': EDUPERSON_OID+'4', - 'eduPersonPrimaryAffiliation': EDUPERSON_OID+'5', - 'eduPersonPrimaryOrgUnitDN': EDUPERSON_OID+'8', - 'eduPersonPrincipalName': EDUPERSON_OID+'6', - 'eduPersonPrincipalNamePrior': EDUPERSON_OID+'12', - 'eduPersonScopedAffiliation': EDUPERSON_OID+'9', - 'eduPersonTargetedID': EDUPERSON_OID+'10', - 'eduPersonAssurance': EDUPERSON_OID+'11', - 'eduPersonUniqueId': EDUPERSON_OID+'13', - 'eduPersonOrcid': EDUPERSON_OID+'16', - 'email': PKCS_9+'1', - 'emailAddress': PKCS_9+'1', - 'employeeNumber': NETSCAPE_LDAP+'3', - 'employeeType': NETSCAPE_LDAP+'4', - 'enhancedSearchGuide': X500ATTR+'47', - 'facsimileTelephoneNumber': X500ATTR+'23', - 'fax': X500ATTR+'23', - 'federationFeideSchemaVersion': NOREDUPERSON_OID+'9', - 'generationQualifier': X500ATTR+'44', - 'givenName': X500ATTR+'42', - 'gn': X500ATTR+'42', - 'houseIdentifier': X500ATTR+'51', - 'initials': X500ATTR+'43', - 'internationaliSDNNumber': X500ATTR+'25', - 'jpegPhoto': UCL_DIR_PILOT+'60', - 'knowledgeInformation': X500ATTR+'2', - 'l': X500ATTR+'7', - 'localityName': X500ATTR+'7', - 'mail': UCL_DIR_PILOT+'3', - 'member': X500ATTR+'31', - 'norEduOrgAcronym': NOREDUPERSON_OID+'6', - 'norEduOrgUniqueIdentifier': NOREDUPERSON_OID+'7', - 'norEduOrgUniqueNumber': NOREDUPERSON_OID+'1', - 'norEduOrgUnitUniqueIdentifier': NOREDUPERSON_OID+'8', - 'norEduOrgUnitUniqueNumber': NOREDUPERSON_OID+'2', - 'norEduPersonBirthDate': NOREDUPERSON_OID+'3', - 'norEduPersonLIN': NOREDUPERSON_OID+'4', - 'norEduPersonNIN': NOREDUPERSON_OID+'5', - 'o': X500ATTR+'10', - 'organizationName': X500ATTR+'10', - 'organizationalUnitName': X500ATTR+'11', - 'ou': X500ATTR+'11', - 'owner': X500ATTR+'32', - 'physicalDeliveryOfficeName': X500ATTR+'19', - 'pkcs9email': PKCS_9+'1', - 'postOfficeBox': X500ATTR+'18', - 'postalAddress': X500ATTR+'16', - 'postalCode': X500ATTR+'17', - 'preferredDeliveryMethod': X500ATTR+'28', - 'preferredLanguage': NETSCAPE_LDAP+'39', - 'presentationAddress': X500ATTR+'29', - 'protocolInformation': X500ATTR+'48', - 'pseudonym': X500ATTR+'65', - 'registeredAddress': X500ATTR+'26', - 'rfc822Mailbox': UCL_DIR_PILOT+'3', - 'roleOccupant': X500ATTR+'33', - 'searchGuide': X500ATTR+'14', - 'serialNumber': X500ATTR+'5', - 'sn': X500ATTR+'4', - 'st': X500ATTR+'8', - 'stateOrProvinceName': X500ATTR+'8', - 'street': X500ATTR+'9', - 'streetAddress': X500ATTR+'9', - 'supportedAlgorithms': X500ATTR+'52', - 'supportedApplicationContext': X500ATTR+'30', - 'surname': X500ATTR+'4', - 'telephoneNumber': X500ATTR+'20', - 'teletexTerminalIdentifier': X500ATTR+'22', - 'telexNumber': X500ATTR+'21', - 'title': X500ATTR+'12', - 'uniqueMember': X500ATTR+'50', - 'userCertificate': X500ATTR+'36', - 'userPKCS12': NETSCAPE_LDAP+'216', - 'userSMIMECertificate': NETSCAPE_LDAP+'40', - 'x121Address': X500ATTR+'24', - 'x500UniqueIdentifier': X500ATTR+'45', - } -} diff --git a/docker/setup.sh b/docker/setup.sh deleted file mode 100755 index 3545c5156..000000000 --- a/docker/setup.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -set -e - -VENV_DIR=/opt/satosa - -python3 -m venv "$VENV_DIR" - -"${VENV_DIR}/bin/pip" install --upgrade pip -"${VENV_DIR}/bin/pip" install -e /src/satosa/ diff --git a/docker/start.sh b/docker/start.sh deleted file mode 100755 index dd57b2ee2..000000000 --- a/docker/start.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env sh - -set -e - -# for Click library to work in satosa-saml-metadata -export LC_ALL="C.UTF-8" -export LANG="C.UTF-8" - -if [ -z "${DATA_DIR}" ] -then DATA_DIR=/opt/satosa/etc -fi - -if [ ! -d "${DATA_DIR}" ] -then mkdir -p "${DATA_DIR}" -fi - -if [ -z "${PROXY_PORT}" ] -then PROXY_PORT="8000" -fi - -if [ -z "${METADATA_DIR}" ] -then METADATA_DIR="${DATA_DIR}" -fi - -if [ ! -d "${DATA_DIR}/attributemaps" ] -then cp -pr /opt/satosa/attributemaps "${DATA_DIR}/attributemaps" -fi - -# activate virtualenv -. /opt/satosa/bin/activate - -# generate metadata for frontend(IdP interface) and backend(SP interface) -# write the result to mounted volume -mkdir -p "${METADATA_DIR}" -satosa-saml-metadata \ - "${DATA_DIR}/proxy_conf.yaml" \ - "${DATA_DIR}/metadata.key" \ - "${DATA_DIR}/metadata.crt" \ - --dir "${METADATA_DIR}" - -# if the user provided a gunicorn configuration, use it -if [ -f "$GUNICORN_CONF" ] -then conf_opt="--config ${GUNICORN_CONF}" -else conf_opt="--chdir ${DATA_DIR}" -fi - -# if HTTPS cert is available, use it -https_key="${DATA_DIR}/https.key" -https_crt="${DATA_DIR}/https.crt" -if [ -f "$https_key" -a -f "$https_crt" ] -then https_opts="--keyfile ${https_key} --certfile ${https_crt}" -fi - -# if a chain is available, use it -chain_pem="${DATA_DIR}/chain.pem" -if [ -f "$chain_pem" ] -then chain_opts="--ca-certs chain.pem" -fi - -# start the proxy -exec gunicorn $conf_opt \ - -b 0.0.0.0:"${PROXY_PORT}" \ - satosa.wsgi:app \ - $https_opts \ - $chain_opts \ - ; From 103f477caf46d25fa5831fecc51db81f3f6b4d18 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 17 Nov 2022 19:55:49 +0200 Subject: [PATCH 218/288] Release v.8.2.0 Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 20 ++++++++++++++++++++ setup.py | 2 +- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index bb573d655..e1fc6a06a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 8.1.1 +current_version = 8.2.0 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index 264005e65..620912c35 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## 8.2.0 (2022-11-17) + +- attribute_authorization: new configuration options `force_attributes_presence_on_allow` and `force_attributes_presence_on_deny` to enforce attribute presence enforcement +- saml2 backend: new configuration option `acs_selection_strategy` to support different ways of selecting an ACS URL +- saml2 backend: new configuration option `is_passive` to set whether the discovery service is allowed to visibly interact with the user agent. +- orcid backend: make the name claim optional +- apple backend: retrieve the name of user when available. +- openid_connect frontend: new configuration option `sub_mirror_subject` the set sub to mirror the subject identifier as received in the backend. +- openid_connect frontend: check for empty `db_uri` before using it with a storage backend +- attribute_generation: try to render mustach tempate only on string values +- logging: move cookie state log to the debug level +- chore: fix non-formatting flake8 changes +- tests: remove dependency on actual MongoDB instance +- build: update links for the Docker image on Docker Hub +- docs: properly document the `name_id_format` and `name_id_policy_format` options +- docs attribute_generation: correct example configuration +- docs: fix mailing list link. +- docs: fix typos and grammar + + ## 8.1.1 (2022-06-23) - OIDC frontend: Set minimum pyop version to v3.4.0 to ensure the needed methods are available diff --git a/setup.py b/setup.py index 4e4f9f0d1..727e469ec 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='8.1.1', + version='8.2.0', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', From 1ee1696498b3a0aceda2db51cbd6a08cbef9870b Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 23 Nov 2022 00:36:55 +1300 Subject: [PATCH 219/288] new: add example for DecideBackendByRequester --- .../custom_routing_decide_by_requester.yaml.example | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 example/plugins/microservices/custom_routing_decide_by_requester.yaml.example diff --git a/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example b/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example new file mode 100644 index 000000000..a4ec441e3 --- /dev/null +++ b/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example @@ -0,0 +1,7 @@ +module: satosa.micro_services.custom_routing.DecideBackendByRequester +name: DecideBackendByRequester +config: + requester_mapping: + 'requestor-id': 'backend_custom' + + From 11ceb2ea4e5615cdb8082c0230724c77b1c3d8be Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 23 Nov 2022 01:08:23 +1300 Subject: [PATCH 220/288] fix: DecideBackendByRequester: lookup via get This will make it easier to check for and deal with lookup failures. The microservice will return a None backend instead of raising an exception, but that's behaviour that's easier to deal with. And if nothing else provides a default backend, SATOSA will raise KeyError: None --- src/satosa/micro_services/custom_routing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/micro_services/custom_routing.py b/src/satosa/micro_services/custom_routing.py index 541b824f1..1dbca48c5 100644 --- a/src/satosa/micro_services/custom_routing.py +++ b/src/satosa/micro_services/custom_routing.py @@ -79,7 +79,7 @@ def process(self, context, data): :param context: request context :param data: the internal request """ - context.target_backend = self.requester_mapping[data.requester] + context.target_backend = self.requester_mapping.get(data.requester) return super().process(context, data) From da2c135ed3411656f59392ff757062348f0c60e8 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 23 Nov 2022 01:10:48 +1300 Subject: [PATCH 221/288] new: add missing tests for DecideBackendByRequester --- .../micro_services/test_custom_routing.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/tests/satosa/micro_services/test_custom_routing.py b/tests/satosa/micro_services/test_custom_routing.py index ed834ef4b..61a12a341 100644 --- a/tests/satosa/micro_services/test_custom_routing.py +++ b/tests/satosa/micro_services/test_custom_routing.py @@ -9,6 +9,7 @@ from satosa.internal import InternalData from satosa.micro_services.custom_routing import DecideIfRequesterIsAllowed from satosa.micro_services.custom_routing import DecideBackendByTargetIssuer +from satosa.micro_services.custom_routing import DecideBackendByRequester TARGET_ENTITY = "entity1" @@ -202,3 +203,37 @@ def test_when_target_is_mapped_choose_mapping_backend(self): data.requester = 'somebody else' newctx, newdata = self.plugin.process(self.context, data) assert newctx.target_backend == 'mapped_backend' + + +class TestDecideBackendByRequester(TestCase): + def setUp(self): + context = Context() + context.state = State() + + config = { + 'requester_mapping': { + 'test_requester': 'mapped_backend', + }, + } + + plugin = DecideBackendByRequester( + config=config, + name='test_decide_service', + base_url='https://satosa.example.org', + ) + plugin.next = lambda ctx, data: (ctx, data) + + self.config = config + self.context = context + self.plugin = plugin + + def test_when_requester_is_not_mapped_skip(self): + data = InternalData(requester='other_test_requester') + newctx, newdata = self.plugin.process(self.context, data) + assert not newctx.target_backend + + def test_when_requester_is_mapped_choose_mapping_backend(self): + data = InternalData(requester='test_requester') + data.requester = 'test_requester' + newctx, newdata = self.plugin.process(self.context, data) + assert newctx.target_backend == 'mapped_backend' From ec0f620f9d1997a77a4d3f8bc03f59f0f04ab36a Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 23 Nov 2022 02:32:15 +1300 Subject: [PATCH 222/288] new: DecideBackendByRequester: add default_backend setting To avoid having to spell out all requesters: with default backend, only exceptions/overrides need to be listed. --- ...stom_routing_decide_by_requester.yaml.example | 1 + src/satosa/micro_services/custom_routing.py | 6 ++++-- .../satosa/micro_services/test_custom_routing.py | 16 +++++++++++++++- 3 files changed, 20 insertions(+), 3 deletions(-) diff --git a/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example b/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example index a4ec441e3..90aed60eb 100644 --- a/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example +++ b/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example @@ -1,6 +1,7 @@ module: satosa.micro_services.custom_routing.DecideBackendByRequester name: DecideBackendByRequester config: + default_backend: Saml2 requester_mapping: 'requestor-id': 'backend_custom' diff --git a/src/satosa/micro_services/custom_routing.py b/src/satosa/micro_services/custom_routing.py index 1dbca48c5..5706ce9aa 100644 --- a/src/satosa/micro_services/custom_routing.py +++ b/src/satosa/micro_services/custom_routing.py @@ -67,11 +67,13 @@ def __init__(self, config, *args, **kwargs): """ Constructor. :param config: mapping from requester identifier to - backend module name under the key 'requester_mapping' + backend module name under the key 'requester_mapping'. + May also include default backend under key 'default_backend'. :type config: Dict[str, Dict[str, str]] """ super().__init__(*args, **kwargs) self.requester_mapping = config['requester_mapping'] + self.default_backend = config.get('default_backend') def process(self, context, data): """ @@ -79,7 +81,7 @@ def process(self, context, data): :param context: request context :param data: the internal request """ - context.target_backend = self.requester_mapping.get(data.requester) + context.target_backend = self.requester_mapping.get(data.requester) or self.default_backend return super().process(context, data) diff --git a/tests/satosa/micro_services/test_custom_routing.py b/tests/satosa/micro_services/test_custom_routing.py index 61a12a341..1be124877 100644 --- a/tests/satosa/micro_services/test_custom_routing.py +++ b/tests/satosa/micro_services/test_custom_routing.py @@ -227,11 +227,25 @@ def setUp(self): self.context = context self.plugin = plugin - def test_when_requester_is_not_mapped_skip(self): + def test_when_requester_is_not_mapped_and_no_default_backend_skip(self): data = InternalData(requester='other_test_requester') newctx, newdata = self.plugin.process(self.context, data) assert not newctx.target_backend + def test_when_requester_is_not_mapped_choose_default_backend(self): + # override config to set default backend + self.config['default_backend'] = 'default_backend' + self.plugin = DecideBackendByRequester( + config=self.config, + name='test_decide_service', + base_url='https://satosa.example.org', + ) + self.plugin.next = lambda ctx, data: (ctx, data) + + data = InternalData(requester='other_test_requester') + newctx, newdata = self.plugin.process(self.context, data) + assert newctx.target_backend == 'default_backend' + def test_when_requester_is_mapped_choose_mapping_backend(self): data = InternalData(requester='test_requester') data.requester = 'test_requester' From 5d543e0907008763f99d0eea9ed05e2db98faaf7 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 29 Nov 2022 15:49:04 +0200 Subject: [PATCH 223/288] frontends: ping: minor adjustments and fixes for interface compliance Signed-off-by: Ivan Kanakarakis --- src/satosa/frontends/ping.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/satosa/frontends/ping.py b/src/satosa/frontends/ping.py index 4444cd83d..27fec279c 100644 --- a/src/satosa/frontends/ping.py +++ b/src/satosa/frontends/ping.py @@ -1,14 +1,14 @@ import logging import satosa.logging_util as lu -import satosa.micro_services.base +from satosa.frontends.base import FrontendModule from satosa.response import Response logger = logging.getLogger(__name__) -class PingFrontend(satosa.frontends.base.FrontendModule): +class PingFrontend(FrontendModule): """ SATOSA frontend that responds to a query with a simple 200 OK, intended to be used as a simple heartbeat monitor. @@ -19,12 +19,12 @@ def __init__(self, auth_req_callback_func, internal_attributes, config, base_url self.config = config - def handle_authn_response(self, context, internal_resp, extra_id_token_claims=None): + def handle_authn_response(self, context, internal_resp): """ See super class method satosa.frontends.base.FrontendModule#handle_authn_response :type context: satosa.context.Context :type internal_response: satosa.internal.InternalData - :rtype oic.utils.http_util.Response + :rtype: satosa.response.Response """ raise NotImplementedError() @@ -32,7 +32,7 @@ def handle_backend_error(self, exception): """ See super class satosa.frontends.base.FrontendModule :type exception: satosa.exception.SATOSAError - :rtype: oic.utils.http_util.Response + :rtype: satosa.response.Response """ raise NotImplementedError() @@ -49,6 +49,8 @@ def register_endpoints(self, backend_names): def ping_endpoint(self, context): """ + :type context: satosa.context.Context + :rtype: satosa.response.Response """ msg = "Ping returning 200 OK" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) From 4e8d27c0697f5fc302f4387a7a4ca170a6a1e393 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Tue, 31 Jan 2023 23:36:25 +1300 Subject: [PATCH 224/288] Examples: minor fixes and enhancements for ContactPerson examples for SAML backend and frontend (#430) * fix: example: prefix ContactPerson emailAddress with "mailto:" As per SAML 2.0 spec, this should be URIs - so should start with "mailto:" * new: example/saml: add example for REFEDS security contact As per https://refeds.org/metadata/contactType/security --- example/plugins/backends/saml2_backend.yaml.example | 5 +++-- example/plugins/frontends/saml2_frontend.yaml.example | 5 +++-- .../plugins/frontends/saml2_virtualcofrontend.yaml.example | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index 3fb30fb2a..3d3f25c0d 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -25,8 +25,9 @@ config: cert_file: backend.crt organization: {display_name: Example Identities, name: Example Identities Org., url: 'http://www.example.com'} contact_person: - - {contact_type: technical, email_address: technical@example.com, given_name: Technical} - - {contact_type: support, email_address: support@example.com, given_name: Support} + - {contact_type: technical, email_address: 'mailto:technical@example.com', given_name: Technical} + - {contact_type: support, email_address: 'mailto:support@example.com', given_name: Support} + - {contact_type: other, email_address: 'mailto:security@example.com', given_name: Security, extension_attributes: {'xmlns:remd': 'http://refeds.org/metadata', 'remd:contactType': 'http://refeds.org/metadata/contactType/security'}} metadata: local: [idp.xml] diff --git a/example/plugins/frontends/saml2_frontend.yaml.example b/example/plugins/frontends/saml2_frontend.yaml.example index 058c7746e..a527ab652 100644 --- a/example/plugins/frontends/saml2_frontend.yaml.example +++ b/example/plugins/frontends/saml2_frontend.yaml.example @@ -24,8 +24,9 @@ config: idp_config: organization: {display_name: Example Identities, name: Example Identities Org., url: 'http://www.example.com'} contact_person: - - {contact_type: technical, email_address: technical@example.com, given_name: Technical} - - {contact_type: support, email_address: support@example.com, given_name: Support} + - {contact_type: technical, email_address: 'mailto:technical@example.com', given_name: Technical} + - {contact_type: support, email_address: 'mailto:support@example.com', given_name: Support} + - {contact_type: other, email_address: 'mailto:security@example.com', given_name: Security, extension_attributes: {'xmlns:remd': 'http://refeds.org/metadata', 'remd:contactType': 'http://refeds.org/metadata/contactType/security'}} key_file: frontend.key cert_file: frontend.crt metadata: diff --git a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example index e7415c55e..a1ed8ad8f 100644 --- a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example +++ b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example @@ -22,7 +22,7 @@ config: url: https://messproject.org contact_person: - contact_type: technical - email_address: help@messproject.org + email_address: 'mailto:help@messproject.org' given_name: MESS Technical Support # SAML attributes and static values about the CO to be asserted for each user. # The key is the SATOSA internal attribute name. From ad6154e92ca8a6a4b694fea9c94a629d4f2e3cb6 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 15 Mar 2023 13:27:35 +1300 Subject: [PATCH 225/288] new: examples/filter_attributes: enforce controlled vocabulary for eduPerson*Affiliation attributes --- .../plugins/microservices/filter_attributes.yaml.example | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/example/plugins/microservices/filter_attributes.yaml.example b/example/plugins/microservices/filter_attributes.yaml.example index f368493b5..f8ae2fb0a 100644 --- a/example/plugins/microservices/filter_attributes.yaml.example +++ b/example/plugins/microservices/filter_attributes.yaml.example @@ -2,6 +2,15 @@ module: satosa.micro_services.attribute_modifications.FilterAttributeValues name: AttributeFilter config: attribute_filters: + # default rules for any IdentityProvider + "": + # default rules for any requester + "": + # enforce controlled vocabulary + eduPersonAffiliation: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)$" + eduPersonPrimaryAffiliation: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)$" + eduPersonScopedAffiliation: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)@" + target_provider1: requester1: attr1: "^foo:bar$" From 1b58acd6b94179c81ad023edff2877b5426e20a3 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 15 Mar 2023 13:43:42 +1300 Subject: [PATCH 226/288] new: FilterAttributeValues: allow more filter types besides regexp Introduce an extended filter notation where for each attribute, instead of a single value with a regexp, the filter can be a dict indexed by filter type, with (optional) filter value. Define `regexp` filter matching existing behaviour. Support existing syntax (regexp as direct filter value) by mapping it to a dict explictily pointing to regexp filter. --- .../micro_services/attribute_modifications.py | 33 ++++++++++++------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/src/satosa/micro_services/attribute_modifications.py b/src/satosa/micro_services/attribute_modifications.py index 67633af27..447d42e8e 100644 --- a/src/satosa/micro_services/attribute_modifications.py +++ b/src/satosa/micro_services/attribute_modifications.py @@ -1,6 +1,7 @@ import re from .base import ResponseMicroService +from ..exception import SATOSAError class AddStaticAttributes(ResponseMicroService): @@ -40,17 +41,27 @@ def process(self, context, data): def _apply_requester_filters(self, attributes, provider_filters, requester): # apply default requester filters default_requester_filters = provider_filters.get("", {}) - self._apply_filter(attributes, default_requester_filters) + self._apply_filters(attributes, default_requester_filters) # apply requester specific filters requester_filters = provider_filters.get(requester, {}) - self._apply_filter(attributes, requester_filters) - - def _apply_filter(self, attributes, attribute_filters): - for attribute_name, attribute_filter in attribute_filters.items(): - regex = re.compile(attribute_filter) - if attribute_name == "": # default filter for all attributes - for attribute, values in attributes.items(): - attributes[attribute] = list(filter(regex.search, attributes[attribute])) - elif attribute_name in attributes: - attributes[attribute_name] = list(filter(regex.search, attributes[attribute_name])) + self._apply_filters(attributes, requester_filters) + + def _apply_filters(self, attributes, attribute_filters): + for attribute_name, attribute_filters in attribute_filters.items(): + if type(attribute_filters) == str: + # convert simple notation to filter list + attribute_filters = {'regexp': attribute_filters} + + for filter_type, filter_value in attribute_filters.items(): + + if filter_type == "regexp": + filter_func = re.compile(filter_value).search + else: + raise SATOSAError("Unknown filter type") + + if attribute_name == "": # default filter for all attributes + for attribute, values in attributes.items(): + attributes[attribute] = list(filter(filter_func, attributes[attribute])) + elif attribute_name in attributes: + attributes[attribute_name] = list(filter(filter_func, attributes[attribute_name])) From df563efbd7534e95f007efffac338d286f5c2b31 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 15 Mar 2023 13:48:45 +1300 Subject: [PATCH 227/288] new: FilterAttributeValues: pass context and target_provider through _apply_requester_filters and _apply_filters ... for use by specific filters --- .../micro_services/attribute_modifications.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/satosa/micro_services/attribute_modifications.py b/src/satosa/micro_services/attribute_modifications.py index 447d42e8e..3ed063161 100644 --- a/src/satosa/micro_services/attribute_modifications.py +++ b/src/satosa/micro_services/attribute_modifications.py @@ -30,24 +30,24 @@ def __init__(self, config, *args, **kwargs): def process(self, context, data): # apply default filters provider_filters = self.attribute_filters.get("", {}) - self._apply_requester_filters(data.attributes, provider_filters, data.requester) + target_provider = data.auth_info.issuer + self._apply_requester_filters(data.attributes, provider_filters, data.requester, context, target_provider) # apply target provider specific filters - target_provider = data.auth_info.issuer provider_filters = self.attribute_filters.get(target_provider, {}) - self._apply_requester_filters(data.attributes, provider_filters, data.requester) + self._apply_requester_filters(data.attributes, provider_filters, data.requester, context, target_provider) return super().process(context, data) - def _apply_requester_filters(self, attributes, provider_filters, requester): + def _apply_requester_filters(self, attributes, provider_filters, requester, context, target_provider): # apply default requester filters default_requester_filters = provider_filters.get("", {}) - self._apply_filters(attributes, default_requester_filters) + self._apply_filters(attributes, default_requester_filters, context, target_provider) # apply requester specific filters requester_filters = provider_filters.get(requester, {}) - self._apply_filters(attributes, requester_filters) + self._apply_filters(attributes, requester_filters, context, target_provider) - def _apply_filters(self, attributes, attribute_filters): + def _apply_filters(self, attributes, attribute_filters, context, target_provider): for attribute_name, attribute_filters in attribute_filters.items(): if type(attribute_filters) == str: # convert simple notation to filter list From c14f0a0b60ad524947b6f74a9a00d76f161725c8 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 15 Mar 2023 13:50:58 +1300 Subject: [PATCH 228/288] new: FilterAttributeValues: add new filter types shibmdscope_match_scope and shibmdscope_match_value Equivalent to ScopeMatchesShibMDScope and ValueMatchesShibMDScope from the Shibboleth project. --- .../micro_services/attribute_modifications.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/src/satosa/micro_services/attribute_modifications.py b/src/satosa/micro_services/attribute_modifications.py index 3ed063161..29ca7298c 100644 --- a/src/satosa/micro_services/attribute_modifications.py +++ b/src/satosa/micro_services/attribute_modifications.py @@ -1,8 +1,11 @@ import re +import logging from .base import ResponseMicroService +from ..context import Context from ..exception import SATOSAError +logger = logging.getLogger(__name__) class AddStaticAttributes(ResponseMicroService): """ @@ -57,6 +60,14 @@ def _apply_filters(self, attributes, attribute_filters, context, target_provider if filter_type == "regexp": filter_func = re.compile(filter_value).search + elif filter_type == "shibmdscope_match_scope": + mdstore = context.get_decoration(Context.KEY_METADATA_STORE) + md_scopes = list(mdstore.shibmd_scopes(target_provider,"idpsso_descriptor")) + filter_func = lambda v: self._shibmdscope_match_scope(v, md_scopes) + elif filter_type == "shibmdscope_match_value": + mdstore = context.get_decoration(Context.KEY_METADATA_STORE) + md_scopes = list(mdstore.shibmd_scopes(target_provider,"idpsso_descriptor")) + filter_func = lambda v: self._shibmdscope_match_value(v, md_scopes) else: raise SATOSAError("Unknown filter type") @@ -65,3 +76,19 @@ def _apply_filters(self, attributes, attribute_filters, context, target_provider attributes[attribute] = list(filter(filter_func, attributes[attribute])) elif attribute_name in attributes: attributes[attribute_name] = list(filter(filter_func, attributes[attribute_name])) + + def _shibmdscope_match_value(self, value, md_scopes): + for md_scope in md_scopes: + if not md_scope['regexp'] and md_scope['text'] == value: + return True + elif md_scope['regexp'] and re.compile(md_scope['text']).match(value): + return True + return False + + def _shibmdscope_match_scope(self, value, md_scopes): + split_value = value.split('@') + if len(split_value) != 2: + logger.info(f"Discarding invalid scoped value {value}") + return False + value_scope = split_value[1] + return self._shibmdscope_match_value(value_scope, md_scopes) From f7fcadff16ae9fcdf6a3aaead3182720a54fb2a6 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Wed, 15 Mar 2023 13:53:28 +1300 Subject: [PATCH 229/288] new: examples/filter_attributes: enforce scope on scoped attributes (and also enforce scoping rules on schacHomeOrganization value) --- .../microservices/filter_attributes.yaml.example | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/example/plugins/microservices/filter_attributes.yaml.example b/example/plugins/microservices/filter_attributes.yaml.example index f8ae2fb0a..9d445765c 100644 --- a/example/plugins/microservices/filter_attributes.yaml.example +++ b/example/plugins/microservices/filter_attributes.yaml.example @@ -6,10 +6,20 @@ config: "": # default rules for any requester "": - # enforce controlled vocabulary + # enforce controlled vocabulary (via simple notation) eduPersonAffiliation: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)$" eduPersonPrimaryAffiliation: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)$" - eduPersonScopedAffiliation: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)@" + eduPersonScopedAffiliation: + # enforce controlled vocabulary (via extended notation) + regexp: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)@" + # enforce correct scope + shibmdscope_match_scope: + eduPersonPrincipalName: + # enforce correct scope + shibmdscope_match_scope: + schacHomeOrganization: + # enforce scoping rule on attribute value + shibmdscope_match_value: target_provider1: requester1: From fcbd4ddf42f4bf15d38ed3333b090b13b71144a8 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Fri, 17 Mar 2023 15:59:29 +1300 Subject: [PATCH 230/288] fix: FilterAttributeValues: use re.fullmatch, remove unnecessary compile ... as per review in #432 --- src/satosa/micro_services/attribute_modifications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/micro_services/attribute_modifications.py b/src/satosa/micro_services/attribute_modifications.py index 29ca7298c..6fe6dfa4a 100644 --- a/src/satosa/micro_services/attribute_modifications.py +++ b/src/satosa/micro_services/attribute_modifications.py @@ -81,7 +81,7 @@ def _shibmdscope_match_value(self, value, md_scopes): for md_scope in md_scopes: if not md_scope['regexp'] and md_scope['text'] == value: return True - elif md_scope['regexp'] and re.compile(md_scope['text']).match(value): + elif md_scope['regexp'] and re.fullmatch(md_scope['text'], value): return True return False From a7491502d88d7f2196ac3ef211e902276e03a19e Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Fri, 17 Mar 2023 16:00:58 +1300 Subject: [PATCH 231/288] fix: FilterAttributeValues: call mdstore only if available --- src/satosa/micro_services/attribute_modifications.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/satosa/micro_services/attribute_modifications.py b/src/satosa/micro_services/attribute_modifications.py index 6fe6dfa4a..bb00761b4 100644 --- a/src/satosa/micro_services/attribute_modifications.py +++ b/src/satosa/micro_services/attribute_modifications.py @@ -62,11 +62,11 @@ def _apply_filters(self, attributes, attribute_filters, context, target_provider filter_func = re.compile(filter_value).search elif filter_type == "shibmdscope_match_scope": mdstore = context.get_decoration(Context.KEY_METADATA_STORE) - md_scopes = list(mdstore.shibmd_scopes(target_provider,"idpsso_descriptor")) + md_scopes = list(mdstore.shibmd_scopes(target_provider,"idpsso_descriptor")) if mdstore else [] filter_func = lambda v: self._shibmdscope_match_scope(v, md_scopes) elif filter_type == "shibmdscope_match_value": mdstore = context.get_decoration(Context.KEY_METADATA_STORE) - md_scopes = list(mdstore.shibmd_scopes(target_provider,"idpsso_descriptor")) + md_scopes = list(mdstore.shibmd_scopes(target_provider,"idpsso_descriptor")) if mdstore else [] filter_func = lambda v: self._shibmdscope_match_value(v, md_scopes) else: raise SATOSAError("Unknown filter type") From e5a67cdad638621d5c552087b14fee53bba776df Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Fri, 17 Mar 2023 16:04:49 +1300 Subject: [PATCH 232/288] new: FilterAttributeValues: add tests for new filter notation Test regexp filter via new notation, test invalid filter type. --- .../test_attribute_modifications.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/tests/satosa/micro_services/test_attribute_modifications.py b/tests/satosa/micro_services/test_attribute_modifications.py index 0efaec43e..3e3b1d815 100644 --- a/tests/satosa/micro_services/test_attribute_modifications.py +++ b/tests/satosa/micro_services/test_attribute_modifications.py @@ -1,3 +1,5 @@ +import pytest +from satosa.exception import SATOSAError from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.micro_services.attribute_modifications import FilterAttributeValues @@ -116,3 +118,43 @@ def test_filter_one_attribute_for_one_target_provider_for_one_requester(self): } filtered = filter_service.process(None, resp) assert filtered.attributes == {"a1": ["1:foo:bar:2"]} + + def test_filter_one_attribute_from_all_target_providers_for_all_requesters_in_extended_notation(self): + attribute_filters = { + "": { + "": { + "a2": { + "regexp": "^foo:bar$" + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo:bar", "1:foo:bar:2"], + } + filtered = filter_service.process(None, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["foo:bar"]} + + def test_invalid_filter_type(self): + attribute_filters = { + "": { + "": { + "a2": { + "invalid_filter": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo:bar", "1:foo:bar:2"], + } + with pytest.raises(SATOSAError): + filtered = filter_service.process(None, resp) From 92b9dc7576070887cf9f1540beb69fb53e582c39 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Fri, 17 Mar 2023 16:05:47 +1300 Subject: [PATCH 233/288] new: FilterAttributeValues: add tests for shibmdscope_match_scope and shibmdscope_match_value filters --- .../test_attribute_modifications.py | 240 ++++++++++++++++++ 1 file changed, 240 insertions(+) diff --git a/tests/satosa/micro_services/test_attribute_modifications.py b/tests/satosa/micro_services/test_attribute_modifications.py index 3e3b1d815..2bd1db0fc 100644 --- a/tests/satosa/micro_services/test_attribute_modifications.py +++ b/tests/satosa/micro_services/test_attribute_modifications.py @@ -1,4 +1,8 @@ import pytest +from tests.util import FakeIdP, create_metadata_from_config_dict, FakeSP +from saml2.mdstore import MetadataStore +from saml2.config import Config +from satosa.context import Context from satosa.exception import SATOSAError from satosa.internal import AuthenticationInformation from satosa.internal import InternalData @@ -12,6 +16,22 @@ def create_filter_service(self, attribute_filters): filter_service.next = lambda ctx, data: data return filter_service + def create_idp_metadata_conf_with_shibmd_scopes(self, idp_entityid, shibmd_scopes): + idp_conf = { + "entityid": idp_entityid, + "service": { + "idp":{} + } + } + + if shibmd_scopes is not None: + idp_conf["service"]["idp"]["scope"] = shibmd_scopes + + metadata_conf = { + "inline": [create_metadata_from_config_dict(idp_conf)] + } + return metadata_conf + def test_filter_all_attributes_from_all_target_providers_for_all_requesters(self): attribute_filters = { "": { # all providers @@ -158,3 +178,223 @@ def test_invalid_filter_type(self): } with pytest.raises(SATOSAError): filtered = filter_service.process(None, resp) + + def test_shibmdscope_match_value_filter_with_no_md_store_in_context(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo:bar", "1:foo:bar:2"], + } + ctx = Context() + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": []} + + def test_shibmdscope_match_value_filter_with_empty_md_store_in_context(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo:bar", "1:foo:bar:2"], + } + ctx = Context() + mdstore = MetadataStore(None, None) + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": []} + + def test_shibmdscope_match_value_filter_with_idp_md_with_no_scope(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "1.foo.bar.2"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, None)) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": []} + + def test_shibmdscope_match_value_filter_with_idp_md_with_single_scope(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "1.foo.bar.2"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["foo.bar"])) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["foo.bar"]} + + def test_shibmdscope_match_value_filter_with_idp_md_with_single_regexp_scope(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["test.foo.bar", "1.foo.bar.2"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["[^.]*\.foo\.bar$"])) + mdstore[idp_entityid]['idpsso_descriptor'][0]['extensions']['extension_elements'][0]['regexp'] = 'true' + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["test.foo.bar"]} + + def test_shibmdscope_match_value_filter_with_idp_md_with_multiple_scopes(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "1.foo.bar.2", "foo.baz", "foo.baz.com"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["foo.bar", "foo.baz"])) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["foo.bar", "foo.baz"]} + + def test_shibmdscope_match_scope_filter_with_single_scope(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_scope": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "value@foo.bar", "1.foo.bar.2", "value@foo.bar.2", "value@extra@foo.bar"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["foo.bar"])) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["value@foo.bar"]} + + def test_multiple_filters_for_single_attribute(self): + attribute_filters = { + "": { + "": { + "a2": { + "regexp": "^value1@", + "shibmdscope_match_scope": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "value1@foo.bar", "value2@foo.bar", "1.foo.bar.2", "value@foo.bar.2", "value@extra@foo.bar"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["foo.bar"])) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["value1@foo.bar"]} From cfda9cedaa137ae9eb8d0759089cd2f166a8ed87 Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Mon, 20 Mar 2023 10:29:55 +1300 Subject: [PATCH 234/288] new: examples/filter_attributes: add sample rules for saml-subject-id and saml-pairwise-id --- .../microservices/filter_attributes.yaml.example | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/example/plugins/microservices/filter_attributes.yaml.example b/example/plugins/microservices/filter_attributes.yaml.example index 9d445765c..185f2dec0 100644 --- a/example/plugins/microservices/filter_attributes.yaml.example +++ b/example/plugins/microservices/filter_attributes.yaml.example @@ -17,6 +17,16 @@ config: eduPersonPrincipalName: # enforce correct scope shibmdscope_match_scope: + subject-id: + # enforce attribute syntax + regexp: "^[0-9A-Za-z][-=0-9A-Za-z]{0,126}@[0-9A-Za-z][-.0-9A-Za-z]{0,126}\\Z" + # enforce correct scope + shibmdscope_match_scope: + pairwise-id: + # enforce attribute syntax + regexp: "^[0-9A-Za-z][-=0-9A-Za-z]{0,126}@[0-9A-Za-z][-.0-9A-Za-z]{0,126}\\Z" + # enforce correct scope + shibmdscope_match_scope: schacHomeOrganization: # enforce scoping rule on attribute value shibmdscope_match_value: From f8529f158620e49eb9ebc05db8d1205dfc286b2d Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Mon, 20 Mar 2023 10:32:35 +1300 Subject: [PATCH 235/288] nfc: FilterAttributeValues: add clarifying comment to shibmdscope_match_scope test --- tests/satosa/micro_services/test_attribute_modifications.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/satosa/micro_services/test_attribute_modifications.py b/tests/satosa/micro_services/test_attribute_modifications.py index 2bd1db0fc..aa1fcb8d5 100644 --- a/tests/satosa/micro_services/test_attribute_modifications.py +++ b/tests/satosa/micro_services/test_attribute_modifications.py @@ -304,6 +304,7 @@ def test_shibmdscope_match_value_filter_with_idp_md_with_single_regexp_scope(sel mdstore = MetadataStore(None, Config()) mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["[^.]*\.foo\.bar$"])) + # mark scope as regexp (cannot be done via pysaml2 YAML config) mdstore[idp_entityid]['idpsso_descriptor'][0]['extensions']['extension_elements'][0]['regexp'] = 'true' ctx = Context() ctx.decorate(Context.KEY_METADATA_STORE, mdstore) From 754dcc2099adba680ba6d70690524ed367efbaa5 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 16 May 2023 18:10:33 +0300 Subject: [PATCH 236/288] Improve configuration readability of the primary-identifier plugin Signed-off-by: Ivan Kanakarakis --- .../plugins/microservices/primary_identifier.yaml.example | 6 +++++- src/satosa/micro_services/primary_identifier.py | 1 - 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/example/plugins/microservices/primary_identifier.yaml.example b/example/plugins/microservices/primary_identifier.yaml.example index 0406f578e..0b14d7127 100644 --- a/example/plugins/microservices/primary_identifier.yaml.example +++ b/example/plugins/microservices/primary_identifier.yaml.example @@ -22,20 +22,24 @@ config: - attribute_names: [eppn] - attribute_names: [name_id] name_id_format: urn:oasis:names:tc:SAML:2.0:nameid-format:persistent - # The line below addes the IdP entityID to the value for the SAML2 + # The line below adds the IdP entityID to the value for the SAML2 # Persistent NameID to ensure the value is fully scoped. add_scope: issuer_entityid - attribute_names: [edupersontargetedid] add_scope: issuer_entityid + # The internal SATOSA attribute into which to place the primary # identifier value once found from the above configured ordered # candidates. primary_identifier: uid + # Whether or not to clear the input attributes after setting the # primary identifier value. clear_input_attributes: no + # Whether to replace subject_id with the constructed primary identifier replace_subject_id: no + # If defined redirect to this page if no primary identifier can # be found. on_error: https://my.org/errors/no_primary_identifier diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index 2a140a9e4..1df2479eb 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -62,7 +62,6 @@ def constructPrimaryIdentifier(self, data, ordered_identifier_candidates): # name_id_format add the value for the NameID of that format if it was asserted by the IdP # or else add the value None. if 'name_id' in candidate['attribute_names']: - candidate_nameid_value = None candidate_nameid_value = None candidate_name_id_format = candidate.get('name_id_format') name_id_value = data.subject_id From f4f55b0d5664953cae38d39700aaab9997cca4f5 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Tue, 16 May 2023 18:53:26 +0300 Subject: [PATCH 237/288] tests: use matchers to mock responses Signed-off-by: Ivan Kanakarakis --- .../micro_services/test_account_linking.py | 19 +++++++++++++------ tests/test_requirements.txt | 2 +- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/tests/satosa/micro_services/test_account_linking.py b/tests/satosa/micro_services/test_account_linking.py index 1c6dad5e4..859f3517d 100644 --- a/tests/satosa/micro_services/test_account_linking.py +++ b/tests/satosa/micro_services/test_account_linking.py @@ -3,7 +3,10 @@ import pytest import requests + import responses +from responses import matchers + from jwkest.jwk import rsa_load, RSAKey from jwkest.jws import JWS @@ -46,13 +49,15 @@ def test_existing_account_linking_with_known_known_uuid(self, account_linking_co } key = RSAKey(key=rsa_load(account_linking_config["sign_key"]), use="sig", alg="RS256") jws = JWS(json.dumps(data), alg=key.alg).sign_compact([key]) + url = "%s/get_id" % account_linking_config["api_url"] + params = {"jwt": jws} responses.add( responses.GET, - "%s/get_id?jwt=%s" % (account_linking_config["api_url"], jws), - status=200, + url=url, body=uuid, + match=[matchers.query_param_matcher(params)], content_type="text/html", - match_querystring=True + status=200, ) self.account_linking.process(context, internal_response) @@ -82,13 +87,15 @@ def test_full_flow(self, account_linking_config, internal_response, context): uuid = "uuid" with responses.RequestsMock() as rsps: # account is linked, 200 OK + url = "%s/get_id" % account_linking_config["api_url"] + params = {"jwt": jws} rsps.add( responses.GET, - "%s/get_id?jwt=%s" % (account_linking_config["api_url"], jws), - status=200, + url=url, body=uuid, + match=[matchers.query_param_matcher(params)], content_type="text/html", - match_querystring=True + status=200, ) internal_response = self.account_linking._handle_al_response(context) assert internal_response.subject_id == uuid diff --git a/tests/test_requirements.txt b/tests/test_requirements.txt index 1991e4cac..fa872ab2a 100644 --- a/tests/test_requirements.txt +++ b/tests/test_requirements.txt @@ -1,5 +1,5 @@ pytest -responses +responses >= 0.14 beautifulsoup4 ldap3 mongomock From 501a63af262220a51c390fbd5bbeb906e3100476 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 8 Jun 2023 19:45:58 +0300 Subject: [PATCH 238/288] opend_connect backend: use PyoidcSettings class to configure pyoidc/oic based clients Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/openid_connect.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/satosa/backends/openid_connect.py b/src/satosa/backends/openid_connect.py index 87772f565..cb97154f6 100644 --- a/src/satosa/backends/openid_connect.py +++ b/src/satosa/backends/openid_connect.py @@ -12,6 +12,7 @@ from oic.oic.message import RegistrationRequest from oic.utils.authn.authn_context import UNSPECIFIED from oic.utils.authn.client import CLIENT_AUTHN_METHOD +from oic.utils.settings import PyoidcSettings import satosa.logging_util as lu from satosa.internal import AuthenticationInformation @@ -55,10 +56,12 @@ def __init__(self, auth_callback_func, internal_attributes, config, base_url, na super().__init__(auth_callback_func, internal_attributes, base_url, name) self.auth_callback_func = auth_callback_func self.config = config + cfg_verify_ssl = config["client"].get("verify_ssl", True) + oidc_settings = PyoidcSettings(verify_ssl=cfg_verify_ssl) self.client = _create_client( - config["provider_metadata"], - config["client"]["client_metadata"], - config["client"].get("verify_ssl", True), + provider_metadata=config["provider_metadata"], + client_metadata=config["client"]["client_metadata"], + settings=oidc_settings, ) if "scope" not in config["client"]["auth_req_params"]: config["auth_req_params"]["scope"] = "openid" @@ -243,7 +246,7 @@ def get_metadata_desc(self): return get_metadata_desc_for_oauth_backend(self.config["provider_metadata"]["issuer"], self.config) -def _create_client(provider_metadata, client_metadata, verify_ssl=True): +def _create_client(provider_metadata, client_metadata, settings=None): """ Create a pyoidc client instance. :param provider_metadata: provider configuration information @@ -254,7 +257,7 @@ def _create_client(provider_metadata, client_metadata, verify_ssl=True): :rtype: oic.oic.Client """ client = oic.Client( - client_authn_method=CLIENT_AUTHN_METHOD, verify_ssl=verify_ssl + client_authn_method=CLIENT_AUTHN_METHOD, settings=settings ) # Provider configuration information From 770ad420c26ac604ef9bce6d585c2ef6ca773a3b Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 8 Jun 2023 19:54:13 +0300 Subject: [PATCH 239/288] saml frontend: remove metadata param when applying the set policy This param was deprecated by pysaml2 v6.3.0 Signed-off-by: Ivan Kanakarakis --- src/satosa/frontends/saml2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 655e6da68..379635fc2 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -282,7 +282,7 @@ def _get_approved_attributes(self, idp, idp_policy, sp_entity_id, state): for aconv in attrconvs: if aconv.name_format == name_format: all_attributes = {v: None for v in aconv._fro.values()} - attribute_filter = list(idp_policy.restrict(all_attributes, sp_entity_id, idp.metadata).keys()) + attribute_filter = list(idp_policy.restrict(all_attributes, sp_entity_id).keys()) break attribute_filter = self.converter.to_internal_filter(self.attribute_profile, attribute_filter) msg = "Filter: {}".format(attribute_filter) From 45651e871eb0445e374e96ae51673ba662aa7cba Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Thu, 8 Jun 2023 18:19:05 +0300 Subject: [PATCH 240/288] Release v8.3.0 - FilterAttributeValues plugin: add new filter types shibmdscope_match_scope and shibmdscope_match_value; add tests - FilterAttributeValues plugin: add example rules for saml-subject-id and saml-pairwise-id - FilterAttributeValues plugin: add example rules enforcing controlled vocabulary for eduPersonAffiliation and eduPersonScopedAffiliation attributes - DecideBackendByRequester plugin: add default_backend setting; add tests; minor fixes - opend_connect backend: use PyoidcSettings class to configure pyoidc/oic based clients - ping frontend: minor adjustments and fixes for interface compliance - tests: update code to use matchers API to mock responses - examples: improve configuration readability of the primary-identifier plugin - examples: minor fixes and enhancements for ContactPerson examples for SAML backend and frontend Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 13 +++++++++++++ setup.py | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e1fc6a06a..c1fc9a358 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 8.2.0 +current_version = 8.3.0 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index 620912c35..2824cc0a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## 8.3.0 (2023-06-08) + +- FilterAttributeValues plugin: add new filter types shibmdscope_match_scope and shibmdscope_match_value; add tests +- FilterAttributeValues plugin: add example rules for saml-subject-id and saml-pairwise-id +- FilterAttributeValues plugin: add example rules enforcing controlled vocabulary for eduPersonAffiliation and eduPersonScopedAffiliation attributes +- DecideBackendByRequester plugin: add default_backend setting; add tests; minor fixes +- opend_connect backend: use PyoidcSettings class to configure pyoidc/oic based clients +- ping frontend: minor adjustments and fixes for interface compliance +- tests: update code to use matchers API to mock responses +- examples: improve configuration readability of the primary-identifier plugin +- examples: minor fixes and enhancements for ContactPerson examples for SAML backend and frontend + + ## 8.2.0 (2022-11-17) - attribute_authorization: new configuration options `force_attributes_presence_on_allow` and `force_attributes_presence_on_deny` to enforce attribute presence enforcement diff --git a/setup.py b/setup.py index 727e469ec..b01ef2dc1 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='8.2.0', + version='8.3.0', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', From 00e7ada6f5104f7e67a294de6fb5470c93729650 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 13:58:28 +0300 Subject: [PATCH 241/288] Move away from pkg_resources when deriving the package version at runtime Signed-off-by: Ivan Kanakarakis --- setup.py | 1 + src/satosa/version.py | 11 ++++++----- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index b01ef2dc1..644ed1d19 100644 --- a/setup.py +++ b/setup.py @@ -25,6 +25,7 @@ "click", "chevron", "cookies-samesite-compat", + "importlib-metadata >= 1.7.0; python_version <= '3.8'", ], extras_require={ "ldap": ["ldap3"], diff --git a/src/satosa/version.py b/src/satosa/version.py index 8025c9e3c..cac85faf0 100644 --- a/src/satosa/version.py +++ b/src/satosa/version.py @@ -1,11 +1,12 @@ -import pkg_resources as _pkg_resources +try: + from importlib.metadata import version as _resolve_package_version +except ImportError: + from importlib_metadata import version as _resolve_package_version # type: ignore[no-redef] def _parse_version(): - data = _pkg_resources.get_distribution('satosa') - value = _pkg_resources.parse_version(data.version) + value = _resolve_package_version("satosa") return value -version_info = _parse_version() -version = str(version_info) +version = _parse_version() From c9c5ba05902d5c43012ce9d891505444ec952430 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 13:58:47 +0300 Subject: [PATCH 242/288] Update markers of supported Python versions Signed-off-by: Ivan Kanakarakis --- setup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/setup.py b/setup.py index 644ed1d19..557691df7 100644 --- a/setup.py +++ b/setup.py @@ -38,6 +38,9 @@ "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ], entry_points={ "console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"] From 58f9381df0b913253ddee94ee616198fc738d40f Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 13:59:03 +0300 Subject: [PATCH 243/288] Use raw strings for regex Signed-off-by: Ivan Kanakarakis --- tests/satosa/micro_services/test_attribute_modifications.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/satosa/micro_services/test_attribute_modifications.py b/tests/satosa/micro_services/test_attribute_modifications.py index aa1fcb8d5..41ce8a7c0 100644 --- a/tests/satosa/micro_services/test_attribute_modifications.py +++ b/tests/satosa/micro_services/test_attribute_modifications.py @@ -303,7 +303,7 @@ def test_shibmdscope_match_value_filter_with_idp_md_with_single_regexp_scope(sel resp.auth_info.issuer = idp_entityid mdstore = MetadataStore(None, Config()) - mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["[^.]*\.foo\.bar$"])) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, [r"[^.]*\.foo\.bar$"])) # mark scope as regexp (cannot be done via pysaml2 YAML config) mdstore[idp_entityid]['idpsso_descriptor'][0]['extensions']['extension_elements'][0]['regexp'] = 'true' ctx = Context() From c86c9c29d2c7d77fbbe06aff9624e2e59e3f362c Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Sat, 14 Jan 2023 22:56:07 +1300 Subject: [PATCH 244/288] fix: metadata_creation: for SAML backend, use sp.config to render metadata ... because SAMLBackend modifies the config (adding encryption_keypairs to config) and this modified config is stored under sp.config. Otherwise, metadata created via the metadata-creation scripts (satosa-saml-metadata) would be missing encryption keys (KeyDescriptor use="encryption"). --- src/satosa/metadata_creation/saml_metadata.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/satosa/metadata_creation/saml_metadata.py b/src/satosa/metadata_creation/saml_metadata.py index 895de4b98..b1c5087c1 100644 --- a/src/satosa/metadata_creation/saml_metadata.py +++ b/src/satosa/metadata_creation/saml_metadata.py @@ -17,7 +17,7 @@ def _create_entity_descriptor(entity_config): - cnf = Config().load(copy.deepcopy(entity_config)) + cnf = entity_config if isinstance(entity_config, Config) else Config().load(copy.deepcopy(entity_config)) return entity_descriptor(cnf) @@ -28,7 +28,7 @@ def _create_backend_metadata(backend_modules): if isinstance(plugin_module, SAMLBackend): logline = "Generating SAML backend '{}' metadata".format(plugin_module.name) logger.info(logline) - backend_metadata[plugin_module.name] = [_create_entity_descriptor(plugin_module.config["sp_config"])] + backend_metadata[plugin_module.name] = [_create_entity_descriptor(plugin_module.sp.config)] return backend_metadata From 69b532a31c88de19a3ab4b3a1ec1401cd20c99cb Mon Sep 17 00:00:00 2001 From: Vlad Mencl Date: Sat, 14 Jan 2023 23:04:44 +1300 Subject: [PATCH 245/288] new: satosa-saml-metadata: make signing optional Allow skipping signing with --no-sign - and in that case, do not require key+cert. Default to signing enabled (keep existing behaviour). Mark key and cert args as optional in Click and instead check them explicitly when signing is enabled. Add new method create_entity_descriptor_metadata as counterpart to create_signed_entity_descriptor to also apply `valid` option to EntityDescriptor but avoid signing. --- src/satosa/metadata_creation/saml_metadata.py | 15 +++++++ src/satosa/scripts/satosa_saml_metadata.py | 45 +++++++++++++------ 2 files changed, 46 insertions(+), 14 deletions(-) diff --git a/src/satosa/metadata_creation/saml_metadata.py b/src/satosa/metadata_creation/saml_metadata.py index b1c5087c1..f88bbaaec 100644 --- a/src/satosa/metadata_creation/saml_metadata.py +++ b/src/satosa/metadata_creation/saml_metadata.py @@ -154,3 +154,18 @@ def create_signed_entity_descriptor(entity_descriptor, security_context, valid_f raise ValueError("Could not construct valid EntityDescriptor tag") return xmldoc + + +def create_entity_descriptor_metadata(entity_descriptor, valid_for=None): + """ + :param entity_descriptor: the entity descriptor to create metadata for + :param valid_for: number of hours the metadata should be valid + :return: the EntityDescriptor metadata + + :type entity_descriptor: saml2.md.EntityDescriptor] + :type valid_for: Optional[int] + """ + if valid_for: + entity_descriptor.valid_until = in_a_while(hours=valid_for) + + return str(entity_descriptor) diff --git a/src/satosa/scripts/satosa_saml_metadata.py b/src/satosa/scripts/satosa_saml_metadata.py index 20e4ae4f9..c0638d8b7 100644 --- a/src/satosa/scripts/satosa_saml_metadata.py +++ b/src/satosa/scripts/satosa_saml_metadata.py @@ -5,6 +5,7 @@ from saml2.sigver import security_context from ..metadata_creation.saml_metadata import create_entity_descriptors +from ..metadata_creation.saml_metadata import create_entity_descriptor_metadata from ..metadata_creation.saml_metadata import create_signed_entity_descriptor from ..satosa_config import SATOSAConfig @@ -16,44 +17,58 @@ def _get_security_context(key, cert): return security_context(conf) -def _create_split_entity_descriptors(entities, secc, valid): +def _create_split_entity_descriptors(entities, secc, valid, sign=True): output = [] for module_name, eds in entities.items(): for i, ed in enumerate(eds): - output.append((create_signed_entity_descriptor(ed, secc, valid), "{}_{}.xml".format(module_name, i))) + ed_str = ( + create_signed_entity_descriptor(ed, secc, valid) + if sign + else create_entity_descriptor_metadata(ed, valid) + ) + output.append((ed_str, "{}_{}.xml".format(module_name, i))) return output -def _create_merged_entities_descriptors(entities, secc, valid, name): +def _create_merged_entities_descriptors(entities, secc, valid, name, sign=True): output = [] frontend_entity_descriptors = [e for sublist in entities.values() for e in sublist] for frontend in frontend_entity_descriptors: - output.append((create_signed_entity_descriptor(frontend, secc, valid), name)) + ed_str = ( + create_signed_entity_descriptor(frontend, secc, valid) + if sign + else create_entity_descriptor_metadata(frontend, valid) + ) + output.append((ed_str, name)) return output def create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend_metadata=False, - split_backend_metadata=False): + split_backend_metadata=False, sign=True): """ Generates SAML metadata for the given PROXY_CONF, signed with the given KEY and associated CERT. """ satosa_config = SATOSAConfig(proxy_conf) - secc = _get_security_context(key, cert) + + if sign and (not key or not cert): + raise ValueError("Key and cert are required when signing") + secc = _get_security_context(key, cert) if sign else None + frontend_entities, backend_entities = create_entity_descriptors(satosa_config) output = [] if frontend_entities: if split_frontend_metadata: - output.extend(_create_split_entity_descriptors(frontend_entities, secc, valid)) + output.extend(_create_split_entity_descriptors(frontend_entities, secc, valid, sign)) else: - output.extend(_create_merged_entities_descriptors(frontend_entities, secc, valid, "frontend.xml")) + output.extend(_create_merged_entities_descriptors(frontend_entities, secc, valid, "frontend.xml", sign)) if backend_entities: if split_backend_metadata: - output.extend(_create_split_entity_descriptors(backend_entities, secc, valid)) + output.extend(_create_split_entity_descriptors(backend_entities, secc, valid, sign)) else: - output.extend(_create_merged_entities_descriptors(backend_entities, secc, valid, "backend.xml")) + output.extend(_create_merged_entities_descriptors(backend_entities, secc, valid, "backend.xml", sign)) for metadata, filename in output: path = os.path.join(dir, filename) @@ -64,8 +79,8 @@ def create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_fron @click.command() @click.argument("proxy_conf") -@click.argument("key") -@click.argument("cert") +@click.argument("key", required=False) +@click.argument("cert", required=False) @click.option("--dir", type=click.Path(exists=True, file_okay=False, dir_okay=True, writable=True, readable=False, resolve_path=False), @@ -75,5 +90,7 @@ def create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_fron help="Create one entity descriptor per file for the frontend metadata") @click.option("--split-backend", is_flag=True, type=click.BOOL, default=False, help="Create one entity descriptor per file for the backend metadata") -def construct_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend, split_backend): - create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend, split_backend) +@click.option("--sign/--no-sign", is_flag=True, type=click.BOOL, default=True, + help="Sign the generated metadata") +def construct_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend, split_backend, sign): + create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend, split_backend, sign) From c0a7f2293cecdfda1b8bdc612949e0cd3803340a Mon Sep 17 00:00:00 2001 From: Christos Kanellopoulos Date: Wed, 2 Mar 2022 21:09:08 +0000 Subject: [PATCH 246/288] Completes the support for the mdui:UIInfo element Adds: - keywords - information_url - privacy_statement_url --- src/satosa/backends/oauth.py | 8 +++ src/satosa/backends/saml2.py | 8 +++ src/satosa/metadata_creation/description.py | 59 ++++++++++++++++++- .../metadata_creation/test_description.py | 6 ++ 4 files changed, 79 insertions(+), 2 deletions(-) diff --git a/src/satosa/backends/oauth.py b/src/satosa/backends/oauth.py index 1e584f617..0cfa3a6ff 100644 --- a/src/satosa/backends/oauth.py +++ b/src/satosa/backends/oauth.py @@ -319,6 +319,14 @@ def get_metadata_desc_for_oauth_backend(entity_id, config): ui_description.add_display_name(name[0], name[1]) for logo in ui_info.get("logo", []): ui_description.add_logo(logo["image"], logo["width"], logo["height"], logo["lang"]) + for keywords in ui_info.get("keywords", []): + ui_description.add_keywords(keywords.get("text", []), keywords.get("lang")) + for information_url in ui_info.get("information_url", []): + ui_description.add_information_url(information_url.get("text"), information_url.get("lang")) + for privacy_statement_url in ui_info.get("privacy_statement_url", []): + ui_description.add_information_url( + privacy_statement_url.get("text"), privacy_statement_url.get("lang") + ) description.ui_info = ui_description diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index be7a095fb..12a641732 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -623,6 +623,14 @@ def get_metadata_desc(self): ui_info_desc.add_display_name(name["text"], name["lang"]) for logo in ui_info.get("logo", []): ui_info_desc.add_logo(logo["text"], logo["width"], logo["height"], logo.get("lang")) + for keywords in ui_info.get("keywords", []): + ui_info_desc.add_keywords(keywords.get("text", []), keywords.get("lang")) + for information_url in ui_info.get("information_url", []): + ui_info_desc.add_information_url(information_url.get("text"), information_url.get("lang")) + for privacy_statement_url in ui_info.get("privacy_statement_url", []): + ui_info_desc.add_privacy_statement_url( + privacy_statement_url.get("text"), privacy_statement_url.get("lang") + ) description.ui_info = ui_info_desc entity_descriptions.append(description) diff --git a/src/satosa/metadata_creation/description.py b/src/satosa/metadata_creation/description.py index 26abdd555..4aa82fa31 100644 --- a/src/satosa/metadata_creation/description.py +++ b/src/satosa/metadata_creation/description.py @@ -52,6 +52,9 @@ def __init__(self): self._description = [] self._display_name = [] self._logos = [] + self._keywords = [] + self._information_url = [] + self._privacy_statement_url = [] def add_description(self, text, lang): """ @@ -96,6 +99,52 @@ def add_logo(self, text, width, height, lang=None): logo_entry["lang"] = lang self._logos.append(logo_entry) + def add_keywords(self, text, lang): + """ + Binds keywords to the given language + :type text: List + :type lang: str + + :param text: List of keywords + :param lang: language + """ + + if text: + self._keywords.append( + { + "text": [_keyword.replace(" ", "+") for _keyword in text], + "lang": lang if lang else "en", + } + ) + + def add_information_url(self, text, lang): + """ + Binds information_url to the given language + :type text: str + :type lang: str + + :param text: Information URL + :param lang: language + """ + + if text: + self._information_url.append({"text": text, "lang": lang if lang else "en"}) + + def add_privacy_statement_url(self, text, lang): + """ + Binds privacy_statement_url to the given language + :type text: str + :type lang: str + + :param text: Privacy statement URL + :param lang: language + """ + + if text: + self._privacy_statement_url.append( + {"text": text, "lang": lang if lang else "en"} + ) + def to_dict(self): """ Returns a dictionary representation of the UIInfoDesc object. @@ -110,6 +159,12 @@ def to_dict(self): ui_info["display_name"] = self._display_name if self._logos: ui_info["logo"] = self._logos + if self._keywords: + ui_info["keywords"] = self._keywords + if self._information_url: + ui_info["information_url"] = self._information_url + if self._privacy_statement_url: + ui_info["privacy_statement_url"] = self._privacy_statement_url return {"service": {"idp": {"ui_info": ui_info}}} if ui_info else {} @@ -227,9 +282,9 @@ def to_dict(self): if self._organization: description.update(self._organization.to_dict()) if self._contact_person: - description['contact_person'] = [] + description["contact_person"] = [] for person in self._contact_person: - description['contact_person'].append(person.to_dict()) + description["contact_person"].append(person.to_dict()) if self._ui_info: description.update(self._ui_info.to_dict()) return description diff --git a/tests/satosa/metadata_creation/test_description.py b/tests/satosa/metadata_creation/test_description.py index ae8caf166..818d01a03 100644 --- a/tests/satosa/metadata_creation/test_description.py +++ b/tests/satosa/metadata_creation/test_description.py @@ -24,12 +24,18 @@ def test_to_dict(self): desc.add_description("test", "en") desc.add_display_name("my company", "en") desc.add_logo("logo.jpg", 80, 80, "en") + desc.add_keywords(["kw1", "kw2"], "en") + desc.add_information_url("https://test", "en") + desc.add_privacy_statement_url("https://test", "en") serialized = desc.to_dict() ui_info = serialized["service"]["idp"]["ui_info"] assert ui_info["description"] == [{"text": "test", "lang": "en"}] assert ui_info["display_name"] == [{"text": "my company", "lang": "en"}] assert ui_info["logo"] == [{"text": "logo.jpg", "width": 80, "height": 80, "lang": "en"}] + assert ui_info["keywords"] == [{"text": ["kw1", "kw2"], "lang": "en"}] + assert ui_info["information_url"] == [{"text": "https://test", "lang": "en"}] + assert ui_info["privacy_statement_url"] == [{"text": "https://test", "lang": "en"}] def test_to_dict_for_logo_without_lang(self): desc = UIInfoDesc() From fd64ece504ef89ce25e69f39d817477288997ad8 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 18:51:57 +0300 Subject: [PATCH 247/288] Avoid setting duplicate set-cookie headers Especially helpful for healthcheck requests that are continuously and with short interval checking an endpoint while never completing a flow thus not having the state cleared. Signed-off-by: Ivan Kanakarakis --- src/satosa/base.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/src/satosa/base.py b/src/satosa/base.py index 404104920..2a862a969 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -219,8 +219,19 @@ def _save_state(self, resp, context): :param context: Session context """ - cookie = state_to_cookie(context.state, self.config["COOKIE_STATE_NAME"], "/", - self.config["STATE_ENCRYPTION_KEY"]) + cookie_name = self.config["COOKIE_STATE_NAME"] + cookie = state_to_cookie( + context.state, + name=cookie_name, + path="/", + encryption_key=self.config["STATE_ENCRYPTION_KEY"], + ) + resp.headers = [ + (name, value) + for (name, value) in resp.headers + if name != "Set-Cookie" + or not value.startswith(f"{cookie_name}=") + ] resp.headers.append(tuple(cookie.output().split(": ", 1))) def run(self, context): From 4041df2e79129f8e70342909fbfdccf6bb6f722c Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 19:58:53 +0300 Subject: [PATCH 248/288] Rearrange class order Signed-off-by: Ivan Kanakarakis --- src/satosa/state.py | 206 ++++++++++++++++++++++---------------------- 1 file changed, 103 insertions(+), 103 deletions(-) diff --git a/src/satosa/state.py b/src/satosa/state.py index 05e343529..37609f4e9 100644 --- a/src/satosa/state.py +++ b/src/satosa/state.py @@ -25,6 +25,109 @@ _SESSION_ID_KEY = "SESSION_ID" +class State(UserDict): + """ + This class holds a state attribute object. A state object must be able to be converted to + a json string, otherwise will an exception be raised. + """ + + def __init__(self, urlstate_data=None, encryption_key=None): + """ + If urlstate is empty a new empty state instance will be returned. + + If urlstate is not empty the constructor will rebuild the state attribute objects + from the urlstate string. + :type urlstate_data: str + :type encryption_key: str + :rtype: State + + :param encryption_key: The key to be used for encryption. + :param urlstate_data: A string created by the method urlstate in this class. + :return: An instance of this class. + """ + self.delete = False + + urlstate_data = {} if urlstate_data is None else urlstate_data + if urlstate_data and not encryption_key: + raise ValueError("If an 'urlstate_data' is supplied 'encrypt_key' must be specified.") + + if urlstate_data: + try: + urlstate_data_bytes = urlstate_data.encode("utf-8") + urlstate_data_b64decoded = base64.urlsafe_b64decode(urlstate_data_bytes) + lzma = LZMADecompressor() + urlstate_data_decompressed = lzma.decompress(urlstate_data_b64decoded) + urlstate_data_decrypted = _AESCipher(encryption_key).decrypt( + urlstate_data_decompressed + ) + lzma = LZMADecompressor() + urlstate_data_decrypted_decompressed = lzma.decompress(urlstate_data_decrypted) + urlstate_data_obj = json.loads(urlstate_data_decrypted_decompressed) + except Exception as e: + error_context = { + "message": "Failed to load state data. Reinitializing empty state.", + "reason": str(e), + "urlstate_data": urlstate_data, + } + logger.warning(error_context) + urlstate_data = {} + else: + urlstate_data = urlstate_data_obj + + session_id = ( + urlstate_data[_SESSION_ID_KEY] + if urlstate_data and _SESSION_ID_KEY in urlstate_data + else uuid4().urn + ) + urlstate_data[_SESSION_ID_KEY] = session_id + + super().__init__(urlstate_data) + + @property + def session_id(self): + return self.data.get(_SESSION_ID_KEY) + + def urlstate(self, encryption_key): + """ + Will return a url safe representation of the state. + + :type encryption_key: Key used for encryption. + :rtype: str + + :return: Url representation av of the state. + """ + lzma = LZMACompressor() + urlstate_data = json.dumps(self.data) + urlstate_data = lzma.compress(urlstate_data.encode("UTF-8")) + urlstate_data += lzma.flush() + urlstate_data = _AESCipher(encryption_key).encrypt(urlstate_data) + lzma = LZMACompressor() + urlstate_data = lzma.compress(urlstate_data) + urlstate_data += lzma.flush() + urlstate_data = base64.urlsafe_b64encode(urlstate_data) + return urlstate_data.decode("utf-8") + + def copy(self): + """ + Returns a deepcopy of the state + + :rtype: satosa.state.State + + :return: A copy of the state + """ + state_copy = State() + state_copy.data = copy.deepcopy(self.data) + return state_copy + + @property + def state_dict(self): + """ + :rtype: dict[str, any] + :return: A copy of the state as dictionary. + """ + return copy.deepcopy(self.data) + + def state_to_cookie(state, name, path, encryption_key): """ Saves a state to a cookie @@ -156,106 +259,3 @@ def _unpad(b): :rtype: bytes """ return b[:-ord(b[len(b) - 1:])] - - -class State(UserDict): - """ - This class holds a state attribute object. A state object must be able to be converted to - a json string, otherwise will an exception be raised. - """ - - def __init__(self, urlstate_data=None, encryption_key=None): - """ - If urlstate is empty a new empty state instance will be returned. - - If urlstate is not empty the constructor will rebuild the state attribute objects - from the urlstate string. - :type urlstate_data: str - :type encryption_key: str - :rtype: State - - :param encryption_key: The key to be used for encryption. - :param urlstate_data: A string created by the method urlstate in this class. - :return: An instance of this class. - """ - self.delete = False - - urlstate_data = {} if urlstate_data is None else urlstate_data - if urlstate_data and not encryption_key: - raise ValueError("If an 'urlstate_data' is supplied 'encrypt_key' must be specified.") - - if urlstate_data: - try: - urlstate_data_bytes = urlstate_data.encode("utf-8") - urlstate_data_b64decoded = base64.urlsafe_b64decode(urlstate_data_bytes) - lzma = LZMADecompressor() - urlstate_data_decompressed = lzma.decompress(urlstate_data_b64decoded) - urlstate_data_decrypted = _AESCipher(encryption_key).decrypt( - urlstate_data_decompressed - ) - lzma = LZMADecompressor() - urlstate_data_decrypted_decompressed = lzma.decompress(urlstate_data_decrypted) - urlstate_data_obj = json.loads(urlstate_data_decrypted_decompressed) - except Exception as e: - error_context = { - "message": "Failed to load state data. Reinitializing empty state.", - "reason": str(e), - "urlstate_data": urlstate_data, - } - logger.warning(error_context) - urlstate_data = {} - else: - urlstate_data = urlstate_data_obj - - session_id = ( - urlstate_data[_SESSION_ID_KEY] - if urlstate_data and _SESSION_ID_KEY in urlstate_data - else uuid4().urn - ) - urlstate_data[_SESSION_ID_KEY] = session_id - - super().__init__(urlstate_data) - - @property - def session_id(self): - return self.data.get(_SESSION_ID_KEY) - - def urlstate(self, encryption_key): - """ - Will return a url safe representation of the state. - - :type encryption_key: Key used for encryption. - :rtype: str - - :return: Url representation av of the state. - """ - lzma = LZMACompressor() - urlstate_data = json.dumps(self.data) - urlstate_data = lzma.compress(urlstate_data.encode("UTF-8")) - urlstate_data += lzma.flush() - urlstate_data = _AESCipher(encryption_key).encrypt(urlstate_data) - lzma = LZMACompressor() - urlstate_data = lzma.compress(urlstate_data) - urlstate_data += lzma.flush() - urlstate_data = base64.urlsafe_b64encode(urlstate_data) - return urlstate_data.decode("utf-8") - - def copy(self): - """ - Returns a deepcopy of the state - - :rtype: satosa.state.State - - :return: A copy of the state - """ - state_copy = State() - state_copy.data = copy.deepcopy(self.data) - return state_copy - - @property - def state_dict(self): - """ - :rtype: dict[str, any] - :return: A copy of the state as dictionary. - """ - return copy.deepcopy(self.data) From 1206ea58aff60dedd0f5e1f89488237fa5f947dd Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 20:02:20 +0300 Subject: [PATCH 249/288] feat: make cookie parameters configurable Signed-off-by: Ivan Kanakarakis --- doc/README.md | 4 +++ src/satosa/base.py | 6 +++- src/satosa/satosa_config.py | 8 ++--- src/satosa/state.py | 58 +++++++++++++++++++++++-------------- tests/satosa/test_state.py | 4 +-- 5 files changed, 51 insertions(+), 29 deletions(-) diff --git a/doc/README.md b/doc/README.md index 8d001847e..fd266723b 100644 --- a/doc/README.md +++ b/doc/README.md @@ -80,6 +80,10 @@ bind_password: !ENVFILE LDAP_BIND_PASSWORD_FILE | -------------- | --------- | ------------- | ----------- | | `BASE` | string | `https://proxy.example.com` | base url of the proxy | | `COOKIE_STATE_NAME` | string | `satosa_state` | name of the cookie SATOSA uses for preserving state between requests | +| `COOKIE_SECURE` | bool | `True` | whether to include the cookie only when the request is transmitted over a secure channel | +| `COOKIE_HTTPONLY` | bool | `True` | whether the cookie should only be accessed only by the server | +| `COOKIE_SAMESITE` | string | `"None"` | whether the cookie should only be sent with requests initiated from the same registrable domain | +| `COOKIE_MAX_AGE` | string | `"1200"` | indicates the maximum lifetime of the cookie represented as the number of seconds until the cookie expires | | `CONTEXT_STATE_DELETE` | bool | `True` | controls whether SATOSA will delete the state cookie after receiving the authentication response from the upstream IdP| | `STATE_ENCRYPTION_KEY` | string | `52fddd3528a44157` | key used for encrypting the state cookie, will be overridden by the environment variable `SATOSA_STATE_ENCRYPTION_KEY` if it is set | | `INTERNAL_ATTRIBUTES` | string | `example/internal_attributes.yaml` | path to attribute mapping diff --git a/src/satosa/base.py b/src/satosa/base.py index 2a862a969..b53b4d8ab 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -204,7 +204,7 @@ def _load_state(self, context): state = State() finally: context.state = state - msg = "Loaded state {state} from cookie {cookie}".format(state=state, cookie=context.cookie) + msg = f"Loaded state {state} from cookie {context.cookie}" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) @@ -225,6 +225,10 @@ def _save_state(self, resp, context): name=cookie_name, path="/", encryption_key=self.config["STATE_ENCRYPTION_KEY"], + secure=self.config.get("COOKIE_SECURE"), + httponly=self.config.get("COOKIE_HTTPONLY"), + samesite=self.config.get("COOKIE_SAMESITE"), + max_age=self.config.get("COOKIE_MAX_AGE"), ) resp.headers = [ (name, value) diff --git a/src/satosa/satosa_config.py b/src/satosa/satosa_config.py index b107e5728..d45280c41 100644 --- a/src/satosa/satosa_config.py +++ b/src/satosa/satosa_config.py @@ -40,7 +40,7 @@ def __init__(self, config): # Load sensitive config from environment variables for key in SATOSAConfig.sensitive_dict_keys: - val = os.environ.get("SATOSA_{key}".format(key=key)) + val = os.environ.get(f"SATOSA_{key}") if val: self._config[key] = val @@ -56,7 +56,7 @@ def __init__(self, config): plugin_configs.append(plugin_config) break else: - raise SATOSAConfigurationError('Failed to load plugin config \'{}\''.format(config)) + raise SATOSAConfigurationError(f"Failed to load plugin config '{config}'") self._config[key] = plugin_configs for parser in parsers: @@ -86,8 +86,8 @@ def _verify_dict(self, conf): raise SATOSAConfigurationError("Missing key '%s' in config" % key) for key in SATOSAConfig.sensitive_dict_keys: - if key not in conf and "SATOSA_{key}".format(key=key) not in os.environ: - raise SATOSAConfigurationError("Missing key '%s' from config and ENVIRONMENT" % key) + if key not in conf and f"SATOSA_{key}" not in os.environ: + raise SATOSAConfigurationError(f"Missing key '{key}' from config and ENVIRONMENT") def __getitem__(self, item): """ diff --git a/src/satosa/state.py b/src/satosa/state.py index 37609f4e9..1fc768425 100644 --- a/src/satosa/state.py +++ b/src/satosa/state.py @@ -128,31 +128,46 @@ def state_dict(self): return copy.deepcopy(self.data) -def state_to_cookie(state, name, path, encryption_key): +def state_to_cookie( + state: State, + *, + name: str, + path: str, + encryption_key: str, + secure: bool = None, + httponly: bool = None, + samesite: str = None, + max_age: str = None, +) -> SimpleCookie: """ Saves a state to a cookie - :type state: satosa.state.State - :type name: str - :type path: str - :type encryption_key: str - :rtype: satosa.cookies.SimpleCookie - - :param state: The state to save - :param name: Name identifier of the cookie - :param path: Endpoint path the cookie will be associated to - :param encryption_key: Key to encrypt the state information - :return: A cookie + :param state: the data to save + :param name: identifier of the cookie + :param path: path the cookie will be associated to + :param encryption_key: the key to use to encrypt the state information + :param secure: whether to include the cookie only when the request is transmitted + over a secure channel + :param httponly: whether the cookie should only be accessed only by the server + :param samesite: whether the cookie should only be sent with requests + initiated from the same registrable domain + :param max_age: indicates the maximum lifetime of the cookie, + represented as the number of seconds until the cookie expires + :return: A cookie object """ - - cookie_data = "" if state.delete else state.urlstate(encryption_key) - cookie = SimpleCookie() - cookie[name] = cookie_data - cookie[name]["samesite"] = "None" - cookie[name]["secure"] = True + cookie[name] = "" if state.delete else state.urlstate(encryption_key) cookie[name]["path"] = path - cookie[name]["max-age"] = 0 if state.delete else "" + cookie[name]["secure"] = secure if secure is not None else True + cookie[name]["httponly"] = httponly if httponly is not None else "" + cookie[name]["samesite"] = samesite if samesite is not None else "None" + cookie[name]["max-age"] = ( + 0 + if state.delete + else max_age + if max_age is not None + else "" + ) msg = "Saved state in cookie {name} with properties {props}".format( name=name, props=list(cookie[name].items()) @@ -163,7 +178,7 @@ def state_to_cookie(state, name, path, encryption_key): return cookie -def cookie_to_state(cookie_str, name, encryption_key): +def cookie_to_state(cookie_str: str, name: str, encryption_key: str) -> State: """ Loads a state from a cookie @@ -181,8 +196,7 @@ def cookie_to_state(cookie_str, name, encryption_key): cookie = SimpleCookie(cookie_str) state = State(cookie[name].value, encryption_key) except KeyError as e: - msg_tmpl = 'No cookie named {name} in {data}' - msg = msg_tmpl.format(name=name, data=cookie_str) + msg = f'No cookie named {name} in {cookie_str}' raise SATOSAStateError(msg) from e except ValueError as e: msg_tmpl = 'Failed to process {name} from {data}' diff --git a/tests/satosa/test_state.py b/tests/satosa/test_state.py index 76b33d60c..eadee2182 100644 --- a/tests/satosa/test_state.py +++ b/tests/satosa/test_state.py @@ -100,7 +100,7 @@ def test_encode_decode_of_state(self): path = "/" encrypt_key = "2781y4hef90" - cookie = state_to_cookie(state, cookie_name, path, encrypt_key) + cookie = state_to_cookie(state, name=cookie_name, path=path, encryption_key=encrypt_key) cookie_str = cookie[cookie_name].OutputString() loaded_state = cookie_to_state(cookie_str, cookie_name, encrypt_key) @@ -117,7 +117,7 @@ def test_state_to_cookie_produces_cookie_without_max_age_for_state_that_should_b path = "/" encrypt_key = "2781y4hef90" - cookie = state_to_cookie(state, cookie_name, path, encrypt_key) + cookie = state_to_cookie(state, name=cookie_name, path=path, encryption_key=encrypt_key) cookie_str = cookie[cookie_name].OutputString() parsed_cookie = SimpleCookie(cookie_str) From 44aa4ac541ec109417a2cd7c76327e254877581a Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 20:17:28 +0300 Subject: [PATCH 250/288] Release v8.4.0 - Make cookie parameters configurable - Avoid setting duplicate set-cookie headers - Complete the support for the mdui:UIInfo element - satosa-saml-metadata: make signing optional - metadata_creation: for SAML backend, use sp.config to render metadata - tests: update markers of supported Python versions - deps: move away from pkg_resources when deriving the package version at runtime Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 11 +++++++++++ setup.py | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index c1fc9a358..35f7a82c6 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 8.3.0 +current_version = 8.4.0 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index 2824cc0a1..ee782f08f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## 8.4.0 (2023-06-11) + +- Make cookie parameters configurable +- Avoid setting duplicate set-cookie headers +- Complete the support for the mdui:UIInfo element +- satosa-saml-metadata: make signing optional +- metadata_creation: for SAML backend, use sp.config to render metadata +- tests: update markers of supported Python versions +- deps: move away from pkg_resources when deriving the package version at runtime + + ## 8.3.0 (2023-06-08) - FilterAttributeValues plugin: add new filter types shibmdscope_match_scope and shibmdscope_match_value; add tests diff --git a/setup.py b/setup.py index 557691df7..59065f6ac 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='8.3.0', + version='8.4.0', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', From 4ba0b2f4fe3d779921ee9c31841f75ebffdcbb18 Mon Sep 17 00:00:00 2001 From: Christos Kanellopoulos Date: Mon, 13 Apr 2020 14:49:01 +0200 Subject: [PATCH 251/288] Handle missing state from cookie and redirect to generic error page --- src/satosa/backends/apple.py | 1 - src/satosa/backends/github.py | 1 - src/satosa/backends/linkedin.py | 1 - src/satosa/backends/oauth.py | 1 - src/satosa/backends/openid_connect.py | 44 +++++++-- src/satosa/backends/orcid.py | 1 - src/satosa/backends/saml2.py | 130 ++++++++++++++++++++------ src/satosa/base.py | 47 +++++++++- src/satosa/context.py | 1 + src/satosa/exception.py | 35 +++++++ src/satosa/frontends/saml2.py | 46 ++++++++- 11 files changed, 261 insertions(+), 47 deletions(-) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index edace8641..37f756a68 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -245,7 +245,6 @@ def response_endpoint(self, context, *args): msg = "UserInfo: {}".format(all_user_claims) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - del context.state[self.name] internal_resp = self._translate_response( all_user_claims, self.client.authorization_endpoint ) diff --git a/src/satosa/backends/github.py b/src/satosa/backends/github.py index b04906f56..70944e371 100644 --- a/src/satosa/backends/github.py +++ b/src/satosa/backends/github.py @@ -99,7 +99,6 @@ def _authn_response(self, context): internal_response.attributes = self.converter.to_internal( self.external_type, user_info) internal_response.subject_id = str(user_info[self.user_id_attr]) - del context.state[self.name] return self.auth_callback_func(context, internal_response) def user_information(self, access_token): diff --git a/src/satosa/backends/linkedin.py b/src/satosa/backends/linkedin.py index 06a5cbac8..8d3a85b4c 100644 --- a/src/satosa/backends/linkedin.py +++ b/src/satosa/backends/linkedin.py @@ -110,7 +110,6 @@ def _authn_response(self, context): self.external_type, user_info) internal_response.subject_id = user_info[self.user_id_attr] - del context.state[self.name] return self.auth_callback_func(context, internal_response) def user_information(self, access_token, api): diff --git a/src/satosa/backends/oauth.py b/src/satosa/backends/oauth.py index 0cfa3a6ff..3e2bd041b 100644 --- a/src/satosa/backends/oauth.py +++ b/src/satosa/backends/oauth.py @@ -145,7 +145,6 @@ def _authn_response(self, context): internal_response = InternalData(auth_info=self.auth_info(context.request)) internal_response.attributes = self.converter.to_internal(self.external_type, user_info) internal_response.subject_id = user_info[self.user_id_attr] - del context.state[self.name] return self.auth_callback_func(context, internal_response) def auth_info(self, request): diff --git a/src/satosa/backends/openid_connect.py b/src/satosa/backends/openid_connect.py index cb97154f6..58d47af9b 100644 --- a/src/satosa/backends/openid_connect.py +++ b/src/satosa/backends/openid_connect.py @@ -19,7 +19,9 @@ from satosa.internal import InternalData from .base import BackendModule from .oauth import get_metadata_desc_for_oauth_backend -from ..exception import SATOSAAuthenticationError, SATOSAError +from ..exception import SATOSAAuthenticationError +from ..exception import SATOSAError +from ..exception import SATOSAMissingStateError from ..response import Redirect @@ -58,11 +60,24 @@ def __init__(self, auth_callback_func, internal_attributes, config, base_url, na self.config = config cfg_verify_ssl = config["client"].get("verify_ssl", True) oidc_settings = PyoidcSettings(verify_ssl=cfg_verify_ssl) - self.client = _create_client( - provider_metadata=config["provider_metadata"], - client_metadata=config["client"]["client_metadata"], - settings=oidc_settings, - ) + + try: + self.client = _create_client( + provider_metadata=config["provider_metadata"], + client_metadata=config["client"]["client_metadata"], + settings=oidc_settings, + ) + except Exception as exc: + msg = { + "message": f"Failed to initialize client", + "error": str(exc), + "client_metadata": self.config['client']['client_metadata'], + "provider_metadata": self.config['provider_metadata'], + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + raise SATOSAAuthenticationError(context.state, msg) from exc + if "scope" not in config["client"]["auth_req_params"]: config["auth_req_params"]["scope"] = "openid" if "response_type" not in config["client"]["auth_req_params"]: @@ -185,6 +200,22 @@ def response_endpoint(self, context, *args): :param args: None :return: """ + + if self.name not in context.state: + """ + If we end up here, it means that the user returns to the proxy + without the SATOSA session cookie. This can happen at least in the + following cases: + - the user deleted the cookie from the browser + - the browser of the user blocked the cookie + - the user has completed an authentication flow, the cookie has + been removed by SATOSA and then the user used the back button + of their browser and resend the authentication response, but + without the SATOSA session cookie + """ + error = "Received AuthN response without a SATOSA session cookie" + raise SATOSAMissingStateError(error) + backend_state = context.state[self.name] authn_resp = self.client.parse_response(AuthorizationResponse, info=context.request, sformat="dict") if backend_state[STATE_KEY] != authn_resp["state"]: @@ -215,7 +246,6 @@ def response_endpoint(self, context, *args): msg = "UserInfo: {}".format(all_user_claims) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - del context.state[self.name] internal_resp = self._translate_response(all_user_claims, self.client.authorization_endpoint) return self.auth_callback_func(context, internal_resp) diff --git a/src/satosa/backends/orcid.py b/src/satosa/backends/orcid.py index d0ceee9b9..649e72451 100644 --- a/src/satosa/backends/orcid.py +++ b/src/satosa/backends/orcid.py @@ -79,7 +79,6 @@ def _authn_response(self, context): internal_response.attributes = self.converter.to_internal( self.external_type, user_info) internal_response.subject_id = user_info[self.user_id_attr] - del context.state[self.name] return self.auth_callback_func(context, internal_response) def user_information(self, access_token, orcid, name=None): diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 12a641732..3376ab300 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -27,6 +27,8 @@ from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.exception import SATOSAAuthenticationError +from satosa.exception import SATOSAMissingStateError +from satosa.exception import SATOSAAuthenticationFlowError from satosa.response import SeeOther, Response from satosa.saml_util import make_saml_response from satosa.metadata_creation.description import ( @@ -224,6 +226,14 @@ def disco_query(self, context): loc = self.sp.create_discovery_service_request( disco_url, self.sp.config.entityid, **args ) + + msg = { + "message": "Sending user to the discovery service", + "disco_url": loc + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) + return SeeOther(loc) def construct_requested_authn_context(self, entity_id, *, target_accr=None): @@ -268,10 +278,13 @@ def authn_request(self, context, entity_id): with open(self.idp_blacklist_file) as blacklist_file: blacklist_array = json.load(blacklist_file)['blacklist'] if entity_id in blacklist_array: - msg = "IdP with EntityID {} is blacklisted".format(entity_id) + msg = { + "message": "AuthnRequest Failed", + "error": f"Selected IdP with EntityID {entity_id} is blacklisted for this backend", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline, exc_info=False) - raise SATOSAAuthenticationError(context.state, "Selected IdP is blacklisted for this backend") + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) kwargs = {} target_accr = context.state.get(Context.KEY_TARGET_AUTHN_CONTEXT_CLASS_REF) @@ -299,16 +312,22 @@ def authn_request(self, context, entity_id): **kwargs, ) except Exception as e: - msg = "Failed to construct the AuthnRequest for state" + msg = { + "message": "AuthnRequest Failed", + "error": f"Failed to construct the AuthnRequest for state: {e}", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline, exc_info=True) - raise SATOSAAuthenticationError(context.state, "Failed to construct the AuthnRequest") from e + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) from e if self.sp.config.getattr('allow_unsolicited', 'sp') is False: if req_id in self.outstanding_queries: - msg = "Request with duplicate id {}".format(req_id) + msg = { + "message": "AuthnRequest Failed", + "error": f"Request with duplicate id {req_id}", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + logger.info(logline) raise SATOSAAuthenticationError(context.state, msg) self.outstanding_queries[req_id] = req_id @@ -378,43 +397,78 @@ def authn_response(self, context, binding): :param binding: The saml binding type :return: response """ + + if self.name not in context.state: + """ + If we end up here, it means that the user returns to the proxy + without the SATOSA session cookie. This can happen at least in the + following cases: + - the user deleted the cookie from the browser + - the browser of the user blocked the cookie + - the user has completed an authentication flow, the cookie has + been removed by SATOSA and then the user used the back button + of their browser and resend the authentication response, but + without the SATOSA session cookie + """ + msg = { + "message": "Authentication failed", + "error": "Received AuthN response without a SATOSA session cookie", + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) + raise SATOSAMissingStateError(msg) + if not context.request.get("SAMLResponse"): - msg = "Missing Response for state" + msg = { + "message": "Authentication failed", + "error": "SAML Response not found in context.request", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - raise SATOSAAuthenticationError(context.state, "Missing Response") + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) try: authn_response = self.sp.parse_authn_request_response( context.request["SAMLResponse"], - binding, outstanding=self.outstanding_queries) - except Exception as err: - msg = "Failed to parse authn request for state" + binding, + outstanding=self.outstanding_queries, + ) + except Exception as e: + msg = { + "message": "Authentication failed", + "error": f"Failed to parse Authn response: {err}", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline, exc_info=True) - raise SATOSAAuthenticationError(context.state, "Failed to parse authn request") from err + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) from e if self.sp.config.getattr('allow_unsolicited', 'sp') is False: req_id = authn_response.in_response_to if req_id not in self.outstanding_queries: - msg = "No request with id: {}".format(req_id), + msg = { + "message": "Authentication failed", + "error": f"No corresponding request with id: {req_id}", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + logger.info(logline) raise SATOSAAuthenticationError(context.state, msg) del self.outstanding_queries[req_id] # check if the relay_state matches the cookie state if context.state[self.name]["relay_state"] != context.request["RelayState"]: - msg = "State did not match relay state for state" + msg = { + "message": "Authentication failed", + "error": "Response state query param did not match relay state for request", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - raise SATOSAAuthenticationError(context.state, "State did not match relay state") + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) context.decorate(Context.KEY_METADATA_STORE, self.sp.metadata) if self.config.get(SAMLBackend.KEY_MEMORIZE_IDP): issuer = authn_response.response.issuer.text.strip() context.state[Context.KEY_MEMORIZED_IDP] = issuer - context.state.pop(self.name, None) context.state.pop(Context.KEY_FORCE_AUTHN, None) return self.auth_callback_func(context, self._translate_response(authn_response, context.state)) @@ -431,13 +485,18 @@ def disco_response(self, context): info = context.request state = context.state - try: - entity_id = info["entityID"] - except KeyError as err: - msg = "No IDP chosen for state" - logline = lu.LOG_FMT.format(id=lu.get_session_id(state), message=msg) - logger.debug(logline, exc_info=True) - raise SATOSAAuthenticationError(state, "No IDP chosen") from err + if 'SATOSA_BASE' not in state: + raise SATOSAAuthenticationFlowError("Discovery response without AuthN request") + + entity_id = info.get("entityID") + msg = { + "message": "Received response from the discovery service", + "entity_id": entity_id, + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) + if not entity_id: + raise SATOSAAuthenticationError(state, msg) from err return self.authn_request(context, entity_id) @@ -488,11 +547,20 @@ def _translate_response(self, response, state): subject_id=name_id, ) - msg = "backend received attributes:\n{}".format( - json.dumps(response.ava, indent=4) - ) + msg = "backend received attributes: {}".format(response.ava) logline = lu.LOG_FMT.format(id=lu.get_session_id(state), message=msg) logger.debug(logline) + + msg = { + "message": "Attributes received by the backend", + "issuer": issuer, + "attributes": " ".join(list(response.ava.keys())) + } + if name_id_format: + msg['name_id'] = name_id_format + logline = lu.LOG_FMT.format(id=lu.get_session_id(state), message=msg) + logger.info(logline) + return internal_resp def _metadata_endpoint(self, context): diff --git a/src/satosa/base.py b/src/satosa/base.py index b53b4d8ab..388a4c900 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -8,8 +8,15 @@ from saml2.s_utils import UnknownSystemEntity from satosa import util +from satosa.response import Redirect +from satosa.response import BadRequest from .context import Context -from .exception import SATOSAError, SATOSAAuthenticationError, SATOSAUnknownError +from .exception import SATOSAError +from .exception import SATOSAAuthenticationError +from .exception import SATOSAUnknownError +from .exception import SATOSAMissingStateError +from .exception import SATOSAAuthenticationFlowError +from .exception import SATOSABadRequestError from .plugin_loader import load_backends, load_frontends from .plugin_loader import load_request_microservices, load_response_microservices from .routing import ModuleRouter, SATOSANoBoundEndpointError @@ -253,6 +260,39 @@ def run(self, context): spec = self.module_router.endpoint_routing(context) resp = self._run_bound_endpoint(context, spec) self._save_state(resp, context) + except SATOSABadRequestError as e: + msg = { + "message": "Bad Request", + "error": e.error, + "error_id": uuid.uuid4().urn + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + return BadRequest(e.error) + except SATOSAMissingStateError as e: + msg = { + "message": "Missing SATOSA State", + "error": e.error, + "error_id": uuid.uuid4().urn + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + if self.config.get("ERROR_URL"): + return Redirect(self.config.get("ERROR_URL")) + else: + raise + except SATOSAAuthenticationFlowError as e: + msg = { + "message": "SATOSA Authentication Flow Error", + "error": e.error, + "error_id": uuid.uuid4().urn + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + if self.config.get("ERROR_URL"): + return Redirect(self.config.get("ERROR_URL")) + else: + raise except SATOSANoBoundEndpointError: raise except SATOSAError: @@ -269,7 +309,10 @@ def run(self, context): msg = "Uncaught exception" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline, exc_info=True) - raise SATOSAUnknownError("Unknown error") from err + if self.config.get("ERROR_URL"): + return Redirect(self.config.get("ERROR_URL")) + else: + raise SATOSAUnknownError("Unknown error") from err return resp diff --git a/src/satosa/context.py b/src/satosa/context.py index 1cf140586..33c365a51 100644 --- a/src/satosa/context.py +++ b/src/satosa/context.py @@ -18,6 +18,7 @@ class Context(object): KEY_TARGET_ENTITYID = 'target_entity_id' KEY_FORCE_AUTHN = 'force_authn' KEY_MEMORIZED_IDP = 'memorized_idp' + KEY_REQUESTER_METADATA = 'requester_metadata' KEY_AUTHN_CONTEXT_CLASS_REF = 'authn_context_class_ref' KEY_TARGET_AUTHN_CONTEXT_CLASS_REF = 'target_authn_context_class_ref' diff --git a/src/satosa/exception.py b/src/satosa/exception.py index 02f3c0554..f4fc4bc0c 100644 --- a/src/satosa/exception.py +++ b/src/satosa/exception.py @@ -67,3 +67,38 @@ def message(self): :return: Exception message """ return self._message.format(error_id=self.error_id) + +class SATOSABasicError(SATOSAError): + """ + eduTEAMS error + """ + def __init__(self, error): + self.error = error + +class SATOSAMissingStateError(SATOSABasicError): + """ + SATOSA Missing State error. + + This exception should be raised when SATOSA receives a request as part of + an authentication flow and while the session state cookie is expected for + that step, it is not included in the request + """ + pass + +class SATOSAAuthenticationFlowError(SATOSABasicError): + """ + SATOSA Flow error. + + This exception should be raised when SATOSA receives a request that cannot + be serviced because previous steps in the authentication flow for that session + cannot be found + """ + pass + +class SATOSABadRequestError(SATOSABasicError): + """ + SATOSA Bad Request error. + + This exception should be raised when we want to return an HTTP 400 Bad Request + """ + pass diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 379635fc2..cecd533db 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -34,6 +34,8 @@ from ..response import ServiceError from ..saml_util import make_saml_response from satosa.exception import SATOSAError +from satosa.exception import SATOSABadRequestError +from satosa.exception import SATOSAMissingStateError import satosa.util as util import satosa.logging_util as lu @@ -152,7 +154,23 @@ def load_state(self, state): :param state: The current state :return: The dictionary given by the save_state function """ - state_data = state[self.name] + try: + state_data = state[self.name] + except KeyError: + """ + If we end up here, it means that the user returns to the proxy + without the SATOSA session cookie. This can happen at least in the + following cases: + - the user deleted the cookie from the browser + - the browser of the user blocked the cookie + - the user has completed an authentication flow, the cookie has + been removed by SATOSA and then the user used the back button + of their browser and resend the authentication response, but + without the SATOSA session cookie + """ + error = "Received AuthN response without a SATOSA session cookie" + raise SATOSAMissingStateError(error) + if isinstance(state_data["resp_args"]["name_id_policy"], str): state_data["resp_args"]["name_id_policy"] = name_id_policy_from_string( state_data["resp_args"]["name_id_policy"]) @@ -190,7 +208,16 @@ def _handle_authn_request(self, context, binding_in, idp): :param idp: The saml frontend idp server :return: response """ - req_info = idp.parse_authn_request(context.request["SAMLRequest"], binding_in) + + try: + req_info = idp.parse_authn_request(context.request["SAMLRequest"], binding_in) + except KeyError: + """ + HTTP clients that call the SSO endpoint without sending SAML AuthN + request will receive a "400 Bad Request" response + """ + raise SATOSABadRequestError("HTTP request does not include a SAML AuthN request") + authn_req = req_info.message msg = "{}".format(authn_req) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) @@ -444,6 +471,21 @@ def _handle_authn_response(self, context, internal_response, idp): self._set_common_domain_cookie(internal_response, http_args, context) del context.state[self.name] + + msg = { + "message": "Sending SAML AuthN Response", + "issuer": internal_response.auth_info.issuer, + "requester": sp_entity_id, + "signed response": sign_response, + "signed assertion": sign_assertion, + "encrypted": encrypt_assertion, + "attributes": " ".join(list(ava.keys())) + } + if nameid_format: + msg['name_id'] = nameid_format + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) + return make_saml_response(resp_args["binding"], http_args) def _handle_backend_error(self, exception, idp): From d7adb92fd259c11b054421e10b18535f2d43b8fa Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 18:20:24 +0300 Subject: [PATCH 252/288] Add debug info for request data Signed-off-by: Ivan Kanakarakis --- src/satosa/proxy_server.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index 03305d4ce..b06534b11 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -143,6 +143,18 @@ def __call__(self, environ, start_response, debug=False): environ['wsgi.input'].seek(0) + logline = { + "message": "Proxy server received request", + "request_method": context.request_method, + "request_uri": context.request_uri, + "content_length": content_length, + "request_data": context.request, + "query_params": context.qs_params, + "http_headers": context.http_headers, + "server_headers": context.server, + } + logger.debug(logline) + try: resp = self.run(context) if isinstance(resp, Exception): From e66bfcb25e774b53a8d43256c397d81593191bdd Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 18:21:03 +0300 Subject: [PATCH 253/288] Handle more generic error cases Signed-off-by: Ivan Kanakarakis --- src/satosa/base.py | 55 +++++++++++++++++++++++++------------- src/satosa/proxy_server.py | 17 +++--------- 2 files changed, 41 insertions(+), 31 deletions(-) diff --git a/src/satosa/base.py b/src/satosa/base.py index 388a4c900..9c562b457 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -10,6 +10,7 @@ from satosa import util from satosa.response import Redirect from satosa.response import BadRequest +from satosa.response import NotFound from .context import Context from .exception import SATOSAError from .exception import SATOSAAuthenticationError @@ -268,7 +269,11 @@ def run(self, context): } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) - return BadRequest(e.error) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + return Redirect(generic_error_url) + else: + return BadRequest(e.error) except SATOSAMissingStateError as e: msg = { "message": "Missing SATOSA State", @@ -277,8 +282,9 @@ def run(self, context): } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) - if self.config.get("ERROR_URL"): - return Redirect(self.config.get("ERROR_URL")) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + return Redirect(generic_error_url) else: raise except SATOSAAuthenticationFlowError as e: @@ -289,30 +295,43 @@ def run(self, context): } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) - if self.config.get("ERROR_URL"): - return Redirect(self.config.get("ERROR_URL")) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + return Redirect(generic_error_url) else: raise - except SATOSANoBoundEndpointError: - raise + except SATOSANoBoundEndpointError as e: + msg = str(e) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + return NotFound("The Service or Identity Provider you requested could not be found.") except SATOSAError: msg = "Uncaught SATOSA error" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline, exc_info=True) - raise - except UnknownSystemEntity as err: - msg = "configuration error: unknown system entity " + str(err) + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + return Redirect(generic_error_url) + else: + raise + except UnknownSystemEntity as e: + msg = f"Configuration error: unknown system entity: {e}" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline, exc_info=False) - raise - except Exception as err: + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + return Redirect(generic_error_url) + else: + raise + except Exception as e: msg = "Uncaught exception" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline, exc_info=True) - if self.config.get("ERROR_URL"): - return Redirect(self.config.get("ERROR_URL")) + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + return Redirect(generic_error_url) else: - raise SATOSAUnknownError("Unknown error") from err + raise SATOSAUnknownError("Unknown error") from e return resp diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index b06534b11..7968167ea 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -7,13 +7,12 @@ from cookies_samesite_compat import CookiesSameSiteCompatMiddleware import satosa -import satosa.logging_util as lu from .base import SATOSABase from .context import Context -from .response import ServiceError, NotFound -from .routing import SATOSANoBoundEndpointError -from saml2.s_utils import UnknownSystemEntity +from .response import ServiceError +from .response import NotFound + logger = logging.getLogger(__name__) @@ -160,16 +159,8 @@ def __call__(self, environ, start_response, debug=False): if isinstance(resp, Exception): raise resp return resp(environ, start_response) - except SATOSANoBoundEndpointError as e: - msg = str(e) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - resp = NotFound("The Service or Identity Provider you requested could not be found.") - return resp(environ, start_response) except Exception as e: - if type(e) != UnknownSystemEntity: - logline = "{}".format(e) - logger.exception(logline) + logger.exception(str(e)) if debug: raise From 189871d711128f7096b8ff6654d8a8fbb7c99c31 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Sun, 11 Jun 2023 18:35:36 +0300 Subject: [PATCH 254/288] Fix tests after changes in state presence Signed-off-by: Ivan Kanakarakis --- tests/satosa/backends/test_bitbucket.py | 2 -- tests/satosa/backends/test_oauth.py | 2 -- tests/satosa/backends/test_openid_connect.py | 2 -- tests/satosa/backends/test_saml2.py | 6 ++---- 4 files changed, 2 insertions(+), 10 deletions(-) diff --git a/tests/satosa/backends/test_bitbucket.py b/tests/satosa/backends/test_bitbucket.py index 192c55a84..d6cf25bac 100644 --- a/tests/satosa/backends/test_bitbucket.py +++ b/tests/satosa/backends/test_bitbucket.py @@ -159,7 +159,6 @@ def test_authn_response(self, incoming_authn_response): mock_do_access_token_request self.bb_backend._authn_response(incoming_authn_response) - assert self.bb_backend.name not in incoming_authn_response.state self.assert_expected_attributes() self.assert_token_request(**mock_do_access_token_request.call_args[1]) @@ -190,5 +189,4 @@ def test_entire_flow(self, context): "state": mock_get_state.return_value } self.bb_backend._authn_response(context) - assert self.bb_backend.name not in context.state self.assert_expected_attributes() diff --git a/tests/satosa/backends/test_oauth.py b/tests/satosa/backends/test_oauth.py index 0100cfaa9..22afc8ee7 100644 --- a/tests/satosa/backends/test_oauth.py +++ b/tests/satosa/backends/test_oauth.py @@ -136,7 +136,6 @@ def test_authn_response(self, incoming_authn_response): self.fb_backend.consumer.do_access_token_request = mock_do_access_token_request self.fb_backend._authn_response(incoming_authn_response) - assert self.fb_backend.name not in incoming_authn_response.state self.assert_expected_attributes() self.assert_token_request(**mock_do_access_token_request.call_args[1]) @@ -164,5 +163,4 @@ def test_entire_flow(self, context): "state": mock_get_state.return_value } self.fb_backend._authn_response(context) - assert self.fb_backend.name not in context.state self.assert_expected_attributes() diff --git a/tests/satosa/backends/test_openid_connect.py b/tests/satosa/backends/test_openid_connect.py index b898e157c..34bac79fe 100644 --- a/tests/satosa/backends/test_openid_connect.py +++ b/tests/satosa/backends/test_openid_connect.py @@ -163,7 +163,6 @@ def test_response_endpoint(self, backend_config, internal_attributes, userinfo, self.setup_userinfo_endpoint(backend_config["provider_metadata"]["userinfo_endpoint"], userinfo) self.oidc_backend.response_endpoint(incoming_authn_response) - assert self.oidc_backend.name not in incoming_authn_response.state args = self.oidc_backend.auth_callback_func.call_args[0] assert isinstance(args[0], Context) @@ -198,7 +197,6 @@ def test_entire_flow(self, context, backend_config, internal_attributes, userinf "token_type": "Bearer", } self.oidc_backend.response_endpoint(context) - assert self.oidc_backend.name not in context.state args = self.oidc_backend.auth_callback_func.call_args[0] self.assert_expected_attributes(internal_attributes, userinfo, args[1].attributes) diff --git a/tests/satosa/backends/test_saml2.py b/tests/satosa/backends/test_saml2.py index dcfdb0fa9..de349d9ad 100644 --- a/tests/satosa/backends/test_saml2.py +++ b/tests/satosa/backends/test_saml2.py @@ -132,11 +132,12 @@ def test_full_flow(self, context, idp_conf, sp_conf): disco_resp = parse_qs(urlparse(resp.message).query) info = parse_qs(urlparse(disco_resp["return"][0]).query) info["entityID"] = idp_conf["entityid"] - request_context = Context() + request_context = context request_context.request = info request_context.state = context.state # pass discovery response to backend and check that it redirects to the selected IdP + context.state["SATOSA_BASE"] = {"requester": "the-service-identifier"} resp = self.samlbackend.disco_response(request_context) assert_redirect_to_idp(resp, idp_conf) @@ -155,7 +156,6 @@ def test_full_flow(self, context, idp_conf, sp_conf): # pass auth response to backend and verify behavior self.samlbackend.authn_response(response_context, response_binding) context, internal_resp = self.samlbackend.auth_callback_func.call_args[0] - assert self.samlbackend.name not in context.state assert context.state[test_state_key] == "my_state" assert_authn_response(internal_resp) @@ -254,7 +254,6 @@ def test_authn_response(self, context, idp_conf, sp_conf): context, internal_resp = self.samlbackend.auth_callback_func.call_args[0] assert_authn_response(internal_resp) - assert self.samlbackend.name not in context.state @pytest.mark.skipif( saml2.__version__ < '4.6.1', @@ -290,7 +289,6 @@ def test_authn_response_no_name_id(self, context, idp_conf, sp_conf): context, internal_resp = backend.auth_callback_func.call_args[0] assert_authn_response(internal_resp) - assert backend.name not in context.state def test_authn_response_with_encrypted_assertion(self, sp_conf, context): with open(os.path.join( From 62f8775421734af08a337be18ff208d00a78bc71 Mon Sep 17 00:00:00 2001 From: Kristof Bajnok Date: Tue, 21 Mar 2023 08:42:05 +0100 Subject: [PATCH 255/288] Test for missing state and missing relay state Signed-off-by: Ivan Kanakarakis --- src/satosa/backends/saml2.py | 9 ++-- tests/satosa/backends/test_saml2.py | 83 ++++++++++++++++++++--------- 2 files changed, 61 insertions(+), 31 deletions(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 3376ab300..ec99cad06 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -418,7 +418,8 @@ def authn_response(self, context, binding): logger.info(logline) raise SATOSAMissingStateError(msg) - if not context.request.get("SAMLResponse"): + samlresponse = context.request.get("SAMLResponse") + if not samlresponse: msg = { "message": "Authentication failed", "error": "SAML Response not found in context.request", @@ -429,9 +430,7 @@ def authn_response(self, context, binding): try: authn_response = self.sp.parse_authn_request_response( - context.request["SAMLResponse"], - binding, - outstanding=self.outstanding_queries, + samlresponse, binding, outstanding=self.outstanding_queries ) except Exception as e: msg = { @@ -456,7 +455,7 @@ def authn_response(self, context, binding): del self.outstanding_queries[req_id] # check if the relay_state matches the cookie state - if context.state[self.name]["relay_state"] != context.request["RelayState"]: + if context.state[self.name].get("relay_state") != context.request["RelayState"]: msg = { "message": "Authentication failed", "error": "Response state query param did not match relay state for request", diff --git a/tests/satosa/backends/test_saml2.py b/tests/satosa/backends/test_saml2.py index de349d9ad..e1cc96466 100644 --- a/tests/satosa/backends/test_saml2.py +++ b/tests/satosa/backends/test_saml2.py @@ -21,6 +21,8 @@ from satosa.backends.saml2 import SAMLBackend from satosa.context import Context +from satosa.exception import SATOSAAuthenticationError +from satosa.exception import SATOSAMissingStateError from satosa.internal import InternalData from tests.users import USERS from tests.util import FakeIdP, create_metadata_from_config_dict, FakeSP @@ -132,7 +134,7 @@ def test_full_flow(self, context, idp_conf, sp_conf): disco_resp = parse_qs(urlparse(resp.message).query) info = parse_qs(urlparse(disco_resp["return"][0]).query) info["entityID"] = idp_conf["entityid"] - request_context = context + request_context = Context() request_context.request = info request_context.state = context.state @@ -241,13 +243,9 @@ def test_unknown_or_no_hostname_selects_first_acs( def test_authn_response(self, context, idp_conf, sp_conf): response_binding = BINDING_HTTP_REDIRECT - fakesp = FakeSP(SPConfig().load(sp_conf)) - fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) - destination, request_params = fakesp.make_auth_req(idp_conf["entityid"]) - url, auth_resp = fakeidp.handle_auth_req(request_params["SAMLRequest"], request_params["RelayState"], - BINDING_HTTP_REDIRECT, - "testuser1", response_binding=response_binding) - + request_params, auth_resp = self._perform_request_response( + idp_conf, sp_conf, response_binding + ) context.request = auth_resp context.state[self.samlbackend.name] = {"relay_state": request_params["RelayState"]} self.samlbackend.authn_response(context, response_binding) @@ -255,29 +253,62 @@ def test_authn_response(self, context, idp_conf, sp_conf): context, internal_resp = self.samlbackend.auth_callback_func.call_args[0] assert_authn_response(internal_resp) - @pytest.mark.skipif( - saml2.__version__ < '4.6.1', - reason="Optional NameID needs pysaml2 v4.6.1 or higher") - def test_authn_response_no_name_id(self, context, idp_conf, sp_conf): + def _perform_request_response( + self, idp_conf, sp_conf, response_binding, receive_nameid=True + ): + fakesp = FakeSP(SPConfig().load(sp_conf)) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) + destination, request_params = fakesp.make_auth_req(idp_conf["entityid"]) + auth_resp_func = ( + fakeidp.handle_auth_req + if receive_nameid + else fakeidp.handle_auth_req_no_name_id + ) + url, auth_resp = auth_resp_func( + request_params["SAMLRequest"], + request_params["RelayState"], + BINDING_HTTP_REDIRECT, + "testuser1", + response_binding=response_binding, + ) + + return request_params, auth_resp + + def test_no_state_raises_error(self, context, idp_conf, sp_conf): response_binding = BINDING_HTTP_REDIRECT + request_params, auth_resp = self._perform_request_response( + idp_conf, sp_conf, response_binding + ) + context.request = auth_resp + # not setting context.state[self.samlbackend.name] + # to simulate a request with lost state - fakesp_conf = SPConfig().load(sp_conf) - fakesp = FakeSP(fakesp_conf) + with pytest.raises(SATOSAMissingStateError): + self.samlbackend.authn_response(context, response_binding) - fakeidp_conf = IdPConfig().load(idp_conf) - fakeidp = FakeIdP(USERS, config=fakeidp_conf) + def test_no_relay_state_raises_error(self, context, idp_conf, sp_conf): + response_binding = BINDING_HTTP_REDIRECT + request_params, auth_resp = self._perform_request_response( + idp_conf, sp_conf, response_binding + ) + context.request = auth_resp + # not setting context.state[self.samlbackend.name]["relay_state"] + # to simulate a request without a relay state + context.state[self.samlbackend.name] = {} - destination, request_params = fakesp.make_auth_req( - idp_conf["entityid"]) + with pytest.raises(SATOSAAuthenticationError): + self.samlbackend.authn_response(context, response_binding) - # Use the fake IdP to mock up an authentication request that has no - # element. - url, auth_resp = fakeidp.handle_auth_req_no_name_id( - request_params["SAMLRequest"], - request_params["RelayState"], - BINDING_HTTP_REDIRECT, - "testuser1", - response_binding=response_binding) + @pytest.mark.skipif( + saml2.__version__ < '4.6.1', + reason="Optional NameID needs pysaml2 v4.6.1 or higher" + ) + def test_authn_response_no_name_id(self, context, idp_conf, sp_conf): + response_binding = BINDING_HTTP_REDIRECT + + request_params, auth_resp = self._perform_request_response( + idp_conf, sp_conf, response_binding, receive_nameid=False + ) backend = self.samlbackend From 014e12166d0097d251e0e38bd291f730be8969de Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 12 Jun 2023 21:15:08 +0300 Subject: [PATCH 256/288] Restructure fatal error messages Signed-off-by: Ivan Kanakarakis --- src/satosa/base.py | 97 +++++++++++++++++++++++++++-------------- src/satosa/context.py | 9 +--- src/satosa/exception.py | 18 ++++++++ src/satosa/routing.py | 18 +------- 4 files changed, 86 insertions(+), 56 deletions(-) diff --git a/src/satosa/base.py b/src/satosa/base.py index 9c562b457..1e17c8cbe 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -8,20 +8,26 @@ from saml2.s_utils import UnknownSystemEntity from satosa import util -from satosa.response import Redirect from satosa.response import BadRequest from satosa.response import NotFound +from satosa.response import Redirect from .context import Context -from .exception import SATOSAError from .exception import SATOSAAuthenticationError -from .exception import SATOSAUnknownError -from .exception import SATOSAMissingStateError from .exception import SATOSAAuthenticationFlowError from .exception import SATOSABadRequestError -from .plugin_loader import load_backends, load_frontends -from .plugin_loader import load_request_microservices, load_response_microservices -from .routing import ModuleRouter, SATOSANoBoundEndpointError -from .state import cookie_to_state, SATOSAStateError, State, state_to_cookie +from .exception import SATOSAError +from .exception import SATOSAMissingStateError +from .exception import SATOSANoBoundEndpointError +from .exception import SATOSAUnknownError +from .exception import SATOSAStateError +from .plugin_loader import load_backends +from .plugin_loader import load_frontends +from .plugin_loader import load_request_microservices +from .plugin_loader import load_response_microservices +from .routing import ModuleRouter +from .state import State +from .state import cookie_to_state +from .state import state_to_cookie import satosa.logging_util as lu @@ -262,77 +268,104 @@ def run(self, context): resp = self._run_bound_endpoint(context, spec) self._save_state(resp, context) except SATOSABadRequestError as e: + error_id = uuid.uuid4().urn msg = { "message": "Bad Request", - "error": e.error, - "error_id": uuid.uuid4().urn + "error": str(e), + "error_id": error_id, } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) generic_error_url = self.config.get("ERROR_URL") if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" return Redirect(generic_error_url) - else: - return BadRequest(e.error) + return BadRequest(error) except SATOSAMissingStateError as e: + error_id = uuid.uuid4().urn msg = { "message": "Missing SATOSA State", - "error": e.error, - "error_id": uuid.uuid4().urn + "error": str(e), + "error_id": error_id, } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) generic_error_url = self.config.get("ERROR_URL") if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" return Redirect(generic_error_url) - else: - raise + raise except SATOSAAuthenticationFlowError as e: + error_id = uuid.uuid4().urn msg = { "message": "SATOSA Authentication Flow Error", - "error": e.error, - "error_id": uuid.uuid4().urn + "error": str(e), + "error_id": error_id, } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) generic_error_url = self.config.get("ERROR_URL") if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" return Redirect(generic_error_url) - else: - raise + raise except SATOSANoBoundEndpointError as e: - msg = str(e) + error_id = uuid.uuid4().urn + msg = { + "message": "URL-path is not bound to any endpoint function", + "error": str(e), + "error_id": error_id, + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" + return Redirect(generic_error_url) return NotFound("The Service or Identity Provider you requested could not be found.") - except SATOSAError: - msg = "Uncaught SATOSA error" + except SATOSAError as e: + error_id = uuid.uuid4().urn + msg = { + "message": "Uncaught SATOSA error", + "error": str(e), + "error_id": error_id, + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) generic_error_url = self.config.get("ERROR_URL") if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" return Redirect(generic_error_url) - else: - raise + raise except UnknownSystemEntity as e: - msg = f"Configuration error: unknown system entity: {e}" + error_id = uuid.uuid4().urn + msg = { + "message": "Configuration error: unknown system entity", + "error": str(e), + "error_id": error_id, + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) generic_error_url = self.config.get("ERROR_URL") if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" return Redirect(generic_error_url) - else: - raise + raise except Exception as e: - msg = "Uncaught exception" + error_id = uuid.uuid4().urn + msg = { + "message": "Uncaught exception", + "error": str(e), + "error_id": error_id, + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) generic_error_url = self.config.get("ERROR_URL") if generic_error_url: return Redirect(generic_error_url) - else: - raise SATOSAUnknownError("Unknown error") from e - return resp + raise SATOSAUnknownError("Unknown error") from e + else: + return resp class SAMLBaseModule(object): diff --git a/src/satosa/context.py b/src/satosa/context.py index 33c365a51..2cd8243ac 100644 --- a/src/satosa/context.py +++ b/src/satosa/context.py @@ -1,13 +1,6 @@ from warnings import warn as _warn -from satosa.exception import SATOSAError - - -class SATOSABadContextError(SATOSAError): - """ - Raise this exception if validating the Context and failing. - """ - pass +from satosa.exception import SATOSABadContextError class Context(object): diff --git a/src/satosa/exception.py b/src/satosa/exception.py index f4fc4bc0c..770d26283 100644 --- a/src/satosa/exception.py +++ b/src/satosa/exception.py @@ -68,6 +68,7 @@ def message(self): """ return self._message.format(error_id=self.error_id) + class SATOSABasicError(SATOSAError): """ eduTEAMS error @@ -75,6 +76,7 @@ class SATOSABasicError(SATOSAError): def __init__(self, error): self.error = error + class SATOSAMissingStateError(SATOSABasicError): """ SATOSA Missing State error. @@ -85,6 +87,7 @@ class SATOSAMissingStateError(SATOSABasicError): """ pass + class SATOSAAuthenticationFlowError(SATOSABasicError): """ SATOSA Flow error. @@ -95,6 +98,7 @@ class SATOSAAuthenticationFlowError(SATOSABasicError): """ pass + class SATOSABadRequestError(SATOSABasicError): """ SATOSA Bad Request error. @@ -102,3 +106,17 @@ class SATOSABadRequestError(SATOSABasicError): This exception should be raised when we want to return an HTTP 400 Bad Request """ pass + + +class SATOSABadContextError(SATOSAError): + """ + Raise this exception if validating the Context and failing. + """ + pass + + +class SATOSANoBoundEndpointError(SATOSAError): + """ + Raised when a given url path is not bound to any endpoint function + """ + pass diff --git a/src/satosa/routing.py b/src/satosa/routing.py index 317b047f9..015cffb23 100644 --- a/src/satosa/routing.py +++ b/src/satosa/routing.py @@ -4,8 +4,8 @@ import logging import re -from satosa.context import SATOSABadContextError -from satosa.exception import SATOSAError +from satosa.exception import SATOSABadContextError +from satosa.exception import SATOSANoBoundEndpointError import satosa.logging_util as lu @@ -15,20 +15,6 @@ STATE_KEY = "ROUTER" -class SATOSANoBoundEndpointError(SATOSAError): - """ - Raised when a given url path is not bound to any endpoint function - """ - pass - - -class SATOSAUnknownTargetBackend(SATOSAError): - """ - Raised when targeting an unknown backend - """ - pass - - class ModuleRouter(object): class UnknownEndpoint(ValueError): pass From ee913b21327806ca175902ad49df95a1b90d0efe Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Sat, 11 Dec 2021 15:41:03 +0100 Subject: [PATCH 257/288] Add option pool_lifetime option to ldap This patch adds another option to the ldap connection. Next to the other pool connections, it is now possible to set the `pool_lifetime`. --- .../plugins/microservices/ldap_attribute_store.yaml.example | 3 +++ src/satosa/micro_services/ldap_attribute_store.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 4efe85072..35e1bf264 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -27,6 +27,9 @@ config: # pool_keepalive: seconds to wait between calls to server to keep the # connection alive; default: 10 pool_keepalive: 10 + # pool_lifetime: number of seconds before recreating a new connection + # in a pooled connection strategy. + pool_lifetime: None # Attributes to return from LDAP query. query_return_attributes: diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index 6d61559b1..fa0cb422f 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -61,6 +61,7 @@ class LdapAttributeStore(ResponseMicroService): "client_strategy": "REUSABLE", "pool_size": 10, "pool_keepalive": 10, + "pool_lifetime": None, } def __init__(self, config, *args, **kwargs): @@ -307,6 +308,7 @@ def _ldap_connection_factory(self, config): pool_size = config["pool_size"] pool_keepalive = config["pool_keepalive"] + pool_lifetime = config["pool_lifetime"] pool_name = ''.join(random.sample(string.ascii_lowercase, 6)) if client_strategy == ldap3.REUSABLE: @@ -314,6 +316,9 @@ def _ldap_connection_factory(self, config): logger.debug(msg) msg = "Using pool keep alive {}".format(pool_keepalive) logger.debug(msg) + if pool_lifetime: + msg = "Using pool lifetime {}".format(pool_lifetime) + logger.debug(msg) try: connection = ldap3.Connection( @@ -327,6 +332,7 @@ def _ldap_connection_factory(self, config): pool_name=pool_name, pool_size=pool_size, pool_keepalive=pool_keepalive, + pool_lifetime=pool_lifetime, ) msg = "Successfully connected to LDAP server" logger.debug(msg) From 97cbdf814dd7405ddc3a5ad372b3c9e81f1f12dd Mon Sep 17 00:00:00 2001 From: Sven Haardiek Date: Fri, 16 Jun 2023 15:56:37 +0200 Subject: [PATCH 258/288] =?UTF-8?q?Add=20tests=20f=C3=BCr=20ldap=20connect?= =?UTF-8?q?ion=20configuration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This patch adds tests to check the configuration of the ldap connection. Signed-off-by: Sven Haardiek --- .../test_ldap_attribute_store.py | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/tests/satosa/micro_services/test_ldap_attribute_store.py b/tests/satosa/micro_services/test_ldap_attribute_store.py index e3af1a7f5..26dc3b9fb 100644 --- a/tests/satosa/micro_services/test_ldap_attribute_store.py +++ b/tests/satosa/micro_services/test_ldap_attribute_store.py @@ -2,6 +2,8 @@ from copy import deepcopy +from ldap3 import AUTO_BIND_NO_TLS, MOCK_SYNC + from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.micro_services.ldap_attribute_store import LdapAttributeStore @@ -107,3 +109,60 @@ def test_attributes_general(self, ldap_attribute_store): internal_attr = ldap_to_internal_map[ldap_attr] response_attr = response.attributes[internal_attr] assert(ldap_value in response_attr) + + @pytest.mark.parametrize( + 'config,connection_attributes', + [ + ( + { + 'auto_bind': 'AUTO_BIND_NO_TLS', + 'client_strategy': 'MOCK_SYNC', + 'ldap_url': 'ldap://satosa.example.com', + 'bind_dn': 'uid=readonly_user,ou=system,dc=example,dc=com', + 'bind_password': 'password', + }, + { + 'user': 'uid=readonly_user,ou=system,dc=example,dc=com', + 'password': 'password', + 'auto_bind': AUTO_BIND_NO_TLS, + 'strategy_type': MOCK_SYNC, + 'read_only': True, + 'version': 3, + 'pool_size': 10, + 'pool_keepalive': 10, + 'pool_lifetime': None, + }, + ), + ( + { + 'auto_bind': 'AUTO_BIND_NO_TLS', + 'client_strategy': 'MOCK_SYNC', + 'ldap_url': 'ldap://satosa.example.com', + 'bind_dn': 'uid=readonly_user,ou=system,dc=example,dc=com', + 'bind_password': 'password', + 'pool_size': 40, + 'pool_keepalive': 41, + 'pool_lifetime': 42, + }, + { + 'user': 'uid=readonly_user,ou=system,dc=example,dc=com', + 'password': 'password', + 'auto_bind': AUTO_BIND_NO_TLS, + 'strategy_type': MOCK_SYNC, + 'read_only': True, + 'version': 3, + 'pool_size': 40, + 'pool_keepalive': 41, + 'pool_lifetime': 42, + }, + ), + ] + ) + def test_connection_config(self, config, connection_attributes): + ldapAttributeStore = LdapAttributeStore({'default': config}, + name="test_ldap_attribute_store", + base_url="https://satosa.example.com") + connection = ldapAttributeStore.config['default']['connection'] + + for k, v in connection_attributes.items(): + assert getattr(connection, k) == v From 47638a79834a37ef6e2062d370f97efc1137aba3 Mon Sep 17 00:00:00 2001 From: roland Date: Thu, 8 Jun 2023 09:40:43 +0200 Subject: [PATCH 259/288] New idpyoidc based OAuth2/OIDC backend --- src/satosa/backends/idpy_oidc.py | 124 +++++++++++++++++++++++++++++++ 1 file changed, 124 insertions(+) create mode 100644 src/satosa/backends/idpy_oidc.py diff --git a/src/satosa/backends/idpy_oidc.py b/src/satosa/backends/idpy_oidc.py new file mode 100644 index 000000000..a0aa20f72 --- /dev/null +++ b/src/satosa/backends/idpy_oidc.py @@ -0,0 +1,124 @@ +""" +OIDC backend module. +""" +import logging +from datetime import datetime + +from idpyoidc.server.user_authn.authn_context import UNSPECIFIED + +from satosa.backends.base import BackendModule +from satosa.internal import AuthenticationInformation +from satosa.internal import InternalData + +logger = logging.getLogger(__name__) + +""" +OIDC/OAuth2 backend module. +""" +from idpyoidc.client.oauth2.stand_alone_client import StandAloneClient + + +class IdpyOIDCBackend(BackendModule): + """ + Backend module for OIDC and OAuth 2.0, can be directly used. + """ + + def __init__(self, + outgoing, + internal_attributes, + config, + base_url, + name, + external_type, + user_id_attr + ): + """ + :param outgoing: Callback should be called by the module after the authorization in the + backend is done. + :param internal_attributes: Mapping dictionary between SATOSA internal attribute names and + the names returned by underlying IdP's/OP's as well as what attributes the calling SP's and + RP's expects namevice. + :param config: Configuration parameters for the module. + :param base_url: base url of the service + :param name: name of the plugin + :param external_type: The name for this module in the internal attributes. + + :type outgoing: + (satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response + :type internal_attributes: dict[string, dict[str, str | list[str]]] + :type config: dict[str, dict[str, str] | list[str]] + :type base_url: str + :type name: str + :type external_type: str + """ + super().__init__(outgoing, internal_attributes, base_url, name) + self.name = name + self.external_type = external_type + self.user_id_attr = user_id_attr + + self.client = StandAloneClient(config=config["client_config"], + client_type=config["client_config"]['client_type']) + # Deal with provider discovery and client registration + self.client.do_provider_info() + self.client.do_client_registration() + + def start_auth(self, context, internal_request): + """ + See super class method satosa.backends.base#start_auth + + :type context: satosa.context.Context + :type internal_request: satosa.internal.InternalData + :rtype satosa.response.Redirect + """ + return self.client.init_authorization() + + def register_endpoints(self): + """ + Creates a list of all the endpoints this backend module needs to listen to. In this case + it's the authentication response from the underlying OP that is redirected from the OP to + the proxy. + :rtype: Sequence[(str, Callable[[satosa.context.Context], satosa.response.Response]] + :return: A list that can be used to map the request to SATOSA to this endpoint. + """ + + return self.client.context.claims.get_usage('authorization_endpoint') + + def _authn_response(self, context): + """ + Handles the authentication response from the AS. + + :type context: satosa.context.Context + :rtype: satosa.response.Response + :param context: The context in SATOSA + :return: A SATOSA response. This method is only responsible to call the callback function + which generates the Response object. + """ + + _info = self.client.finalize(context.request) + + try: + auth_info = self.auth_info(context.request) + except NotImplementedError: + auth_info = AuthenticationInformation(UNSPECIFIED, str(datetime.now()), _info["issuer"]) + + internal_response = InternalData(auth_info=auth_info) + internal_response.attributes = self.converter.to_internal(self.external_type, + _info['userinfo']) + internal_response.subject_id = _info['userinfo'][self.user_id_attr] + del context.state[self.name] + # return self.auth_callback_func(context, internal_response) + if 'error' in _info: + return _info + else: + return _info['userinfo'] + + def auth_info(self, request): + """ + Creates the SATOSA authentication information object. + :type request: dict[str, str] + :rtype: AuthenticationInformation + + :param request: The request parameters in the authentication response sent by the AS. + :return: How, who and when the authentication took place. + """ + raise NotImplementedError("Method 'auth_info' must be implemented in the subclass!") From dba92f80141cfeb6112d05983697be55b5ae5cf5 Mon Sep 17 00:00:00 2001 From: roland Date: Thu, 8 Jun 2023 12:17:09 +0200 Subject: [PATCH 260/288] Added error message handling. --- src/satosa/backends/idpy_oidc.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/satosa/backends/idpy_oidc.py b/src/satosa/backends/idpy_oidc.py index a0aa20f72..f9c18826f 100644 --- a/src/satosa/backends/idpy_oidc.py +++ b/src/satosa/backends/idpy_oidc.py @@ -6,7 +6,9 @@ from idpyoidc.server.user_authn.authn_context import UNSPECIFIED +import satosa.logging_util as lu from satosa.backends.base import BackendModule +from satosa.exception import SATOSAAuthenticationError from satosa.internal import AuthenticationInformation from satosa.internal import InternalData @@ -83,6 +85,23 @@ def register_endpoints(self): return self.client.context.claims.get_usage('authorization_endpoint') + def _check_error_response(self, response, context): + """ + Check if the response is an error response. + :param response: the response from finalize() + :type response: oic.oic.message + :raise SATOSAAuthenticationError: if the response is an OAuth error response + """ + if "error" in response: + msg = "{name} error: {error} {description}".format( + name=type(response).__name__, + error=response["error"], + description=response.get("error_description", ""), + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + raise SATOSAAuthenticationError(context.state, "Access denied") + def _authn_response(self, context): """ Handles the authentication response from the AS. @@ -95,6 +114,7 @@ def _authn_response(self, context): """ _info = self.client.finalize(context.request) + self._check_error_response(_info, context) try: auth_info = self.auth_info(context.request) From 7ca9a801337299301965e46f3d090cb660ef4a02 Mon Sep 17 00:00:00 2001 From: roland Date: Thu, 8 Jun 2023 21:34:35 +0200 Subject: [PATCH 261/288] Updated init attributes. --- src/satosa/backends/idpy_oidc.py | 61 +++++++++++++------------------- 1 file changed, 24 insertions(+), 37 deletions(-) diff --git a/src/satosa/backends/idpy_oidc.py b/src/satosa/backends/idpy_oidc.py index f9c18826f..cec28fe80 100644 --- a/src/satosa/backends/idpy_oidc.py +++ b/src/satosa/backends/idpy_oidc.py @@ -25,38 +25,23 @@ class IdpyOIDCBackend(BackendModule): Backend module for OIDC and OAuth 2.0, can be directly used. """ - def __init__(self, - outgoing, - internal_attributes, - config, - base_url, - name, - external_type, - user_id_attr - ): + def __init__(self, outgoing, internal_attributes, config, base_url, name): """ - :param outgoing: Callback should be called by the module after the authorization in the - backend is done. - :param internal_attributes: Mapping dictionary between SATOSA internal attribute names and - the names returned by underlying IdP's/OP's as well as what attributes the calling SP's and - RP's expects namevice. - :param config: Configuration parameters for the module. - :param base_url: base url of the service - :param name: name of the plugin - :param external_type: The name for this module in the internal attributes. - :type outgoing: (satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response - :type internal_attributes: dict[string, dict[str, str | list[str]]] - :type config: dict[str, dict[str, str] | list[str]] + :type internal_attributes: dict[str, dict[str, list[str] | str]] + :type config: dict[str, Any] :type base_url: str :type name: str - :type external_type: str + + :param outgoing: Callback should be called by the module after + the authorization in the backend is done. + :param internal_attributes: Internal attribute map + :param config: The module config + :param base_url: base url of the service + :param name: name of the plugin """ super().__init__(outgoing, internal_attributes, base_url, name) - self.name = name - self.external_type = external_type - self.user_id_attr = user_id_attr self.client = StandAloneClient(config=config["client_config"], client_type=config["client_config"]['client_type']) @@ -119,18 +104,20 @@ def _authn_response(self, context): try: auth_info = self.auth_info(context.request) except NotImplementedError: - auth_info = AuthenticationInformation(UNSPECIFIED, str(datetime.now()), _info["issuer"]) - - internal_response = InternalData(auth_info=auth_info) - internal_response.attributes = self.converter.to_internal(self.external_type, - _info['userinfo']) - internal_response.subject_id = _info['userinfo'][self.user_id_attr] - del context.state[self.name] - # return self.auth_callback_func(context, internal_response) - if 'error' in _info: - return _info - else: - return _info['userinfo'] + auth_info = AuthenticationInformation(auth_class_ref=UNSPECIFIED, + timestamp=str(datetime.now()), + issuer=_info["issuer"]) + + attributes = self.converter.to_internal( + self.client.client_type, _info['userinfo'], + ) + + internal_response = InternalData( + auth_info=auth_info, + attributes=attributes, + subject_id=_info['userinfo']['sub'] + ) + return internal_response def auth_info(self, request): """ From f0f38af3fd1bee7d24f055a798b6c5065bb25373 Mon Sep 17 00:00:00 2001 From: roland Date: Fri, 9 Jun 2023 16:33:12 +0200 Subject: [PATCH 262/288] Changes as a result of Ali's testing. --- src/satosa/backends/idpy_oidc.py | 141 ++++++++++++++++--------------- 1 file changed, 73 insertions(+), 68 deletions(-) diff --git a/src/satosa/backends/idpy_oidc.py b/src/satosa/backends/idpy_oidc.py index cec28fe80..825ba9f72 100644 --- a/src/satosa/backends/idpy_oidc.py +++ b/src/satosa/backends/idpy_oidc.py @@ -1,51 +1,50 @@ """ -OIDC backend module. +OIDC/OAuth2 backend module. """ import logging from datetime import datetime +from idpyoidc.client.oauth2.stand_alone_client import StandAloneClient from idpyoidc.server.user_authn.authn_context import UNSPECIFIED import satosa.logging_util as lu from satosa.backends.base import BackendModule -from satosa.exception import SATOSAAuthenticationError from satosa.internal import AuthenticationInformation from satosa.internal import InternalData +from ..exception import SATOSAAuthenticationError +from ..response import Redirect logger = logging.getLogger(__name__) -""" -OIDC/OAuth2 backend module. -""" -from idpyoidc.client.oauth2.stand_alone_client import StandAloneClient - class IdpyOIDCBackend(BackendModule): """ Backend module for OIDC and OAuth 2.0, can be directly used. """ - def __init__(self, outgoing, internal_attributes, config, base_url, name): + def __init__(self, auth_callback_func, internal_attributes, config, base_url, name): """ - :type outgoing: + OIDC backend module. + :param auth_callback_func: Callback should be called by the module after the authorization + in the backend is done. + :param internal_attributes: Mapping dictionary between SATOSA internal attribute names and + the names returned by underlying IdP's/OP's as well as what attributes the calling SP's and + RP's expects namevice. + :param config: Configuration parameters for the module. + :param base_url: base url of the service + :param name: name of the plugin + + :type auth_callback_func: (satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response - :type internal_attributes: dict[str, dict[str, list[str] | str]] - :type config: dict[str, Any] + :type internal_attributes: dict[string, dict[str, str | list[str]]] + :type config: dict[str, dict[str, str] | list[str]] :type base_url: str :type name: str - - :param outgoing: Callback should be called by the module after - the authorization in the backend is done. - :param internal_attributes: Internal attribute map - :param config: The module config - :param base_url: base url of the service - :param name: name of the plugin """ - super().__init__(outgoing, internal_attributes, base_url, name) - - self.client = StandAloneClient(config=config["client_config"], - client_type=config["client_config"]['client_type']) - # Deal with provider discovery and client registration + super().__init__(auth_callback_func, internal_attributes, base_url, name) + # self.auth_callback_func = auth_callback_func + # self.config = config + self.client = StandAloneClient(config=config["client"], client_type="oidc") self.client.do_provider_info() self.client.do_client_registration() @@ -57,7 +56,8 @@ def start_auth(self, context, internal_request): :type internal_request: satosa.internal.InternalData :rtype satosa.response.Redirect """ - return self.client.init_authorization() + login_url = self.client.init_authorization() + return Redirect(login_url) def register_endpoints(self): """ @@ -67,8 +67,56 @@ def register_endpoints(self): :rtype: Sequence[(str, Callable[[satosa.context.Context], satosa.response.Response]] :return: A list that can be used to map the request to SATOSA to this endpoint. """ + return self.client.context.claims.get_usage('redirect_uris') + + def response_endpoint(self, context, *args): + """ + Handles the authentication response from the OP. + :type context: satosa.context.Context + :type args: Any + :rtype: satosa.response.Response - return self.client.context.claims.get_usage('authorization_endpoint') + :param context: SATOSA context + :param args: None + :return: + """ + + _info = self.client.finalize(context.request) + self._check_error_response(_info, context) + userinfo = _info.get('userinfo') + id_token = _info.get('id_token') + + if not id_token and not userinfo: + msg = "No id_token or userinfo, nothing to do.." + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + raise SATOSAAuthenticationError(context.state, "No user info available.") + + all_user_claims = dict(list(userinfo.items()) + list(id_token.items())) + msg = "UserInfo: {}".format(all_user_claims) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + internal_resp = self._translate_response(all_user_claims, _info["issuer"]) + return self.auth_callback_func(context, internal_resp) + + def _translate_response(self, response, issuer): + """ + Translates oidc response to SATOSA internal response. + :type response: dict[str, str] + :type issuer: str + :type subject_type: str + :rtype: InternalData + + :param response: Dictioary with attribute name as key. + :param issuer: The oidc op that gave the repsonse. + :param subject_type: public or pairwise according to oidc standard. + :return: A SATOSA internal response. + """ + auth_info = AuthenticationInformation(UNSPECIFIED, str(datetime.now()), issuer) + internal_resp = InternalData(auth_info=auth_info) + internal_resp.attributes = self.converter.to_internal("openid", response) + internal_resp.subject_id = response["sub"] + return internal_resp def _check_error_response(self, response, context): """ @@ -86,46 +134,3 @@ def _check_error_response(self, response, context): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) raise SATOSAAuthenticationError(context.state, "Access denied") - - def _authn_response(self, context): - """ - Handles the authentication response from the AS. - - :type context: satosa.context.Context - :rtype: satosa.response.Response - :param context: The context in SATOSA - :return: A SATOSA response. This method is only responsible to call the callback function - which generates the Response object. - """ - - _info = self.client.finalize(context.request) - self._check_error_response(_info, context) - - try: - auth_info = self.auth_info(context.request) - except NotImplementedError: - auth_info = AuthenticationInformation(auth_class_ref=UNSPECIFIED, - timestamp=str(datetime.now()), - issuer=_info["issuer"]) - - attributes = self.converter.to_internal( - self.client.client_type, _info['userinfo'], - ) - - internal_response = InternalData( - auth_info=auth_info, - attributes=attributes, - subject_id=_info['userinfo']['sub'] - ) - return internal_response - - def auth_info(self, request): - """ - Creates the SATOSA authentication information object. - :type request: dict[str, str] - :rtype: AuthenticationInformation - - :param request: The request parameters in the authentication response sent by the AS. - :return: How, who and when the authentication took place. - """ - raise NotImplementedError("Method 'auth_info' must be implemented in the subclass!") From b175d0ee156d2314e68aefcd4fa229973558f8b6 Mon Sep 17 00:00:00 2001 From: roland Date: Wed, 14 Jun 2023 09:10:11 +0200 Subject: [PATCH 263/288] More changes as a result of Ali Haider's testing. --- src/satosa/backends/idpy_oidc.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/satosa/backends/idpy_oidc.py b/src/satosa/backends/idpy_oidc.py index 825ba9f72..06eb3c8c4 100644 --- a/src/satosa/backends/idpy_oidc.py +++ b/src/satosa/backends/idpy_oidc.py @@ -3,6 +3,7 @@ """ import logging from datetime import datetime +from urllib.parse import urlparse from idpyoidc.client.oauth2.stand_alone_client import StandAloneClient from idpyoidc.server.user_authn.authn_context import UNSPECIFIED @@ -12,6 +13,7 @@ from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from ..exception import SATOSAAuthenticationError +from ..exception import SATOSAError from ..response import Redirect logger = logging.getLogger(__name__) @@ -67,7 +69,13 @@ def register_endpoints(self): :rtype: Sequence[(str, Callable[[satosa.context.Context], satosa.response.Response]] :return: A list that can be used to map the request to SATOSA to this endpoint. """ - return self.client.context.claims.get_usage('redirect_uris') + url_map = [] + redirect_path = self.client.context.claims.get_usage('redirect_uris') + if not redirect_path: + raise SATOSAError("Missing path in redirect uri") + redirect_path = urlparse(redirect_path[0]).path + url_map.append(("^%s$" % redirect_path.lstrip("/"), self.response_endpoint)) + return url_map def response_endpoint(self, context, *args): """ From a56db954385ca683164f99b946ba25472c7c5a96 Mon Sep 17 00:00:00 2001 From: roland Date: Tue, 20 Jun 2023 13:19:11 +0200 Subject: [PATCH 264/288] Example backend used by Ali Haider. --- .../plugins/backends/idpyoidc_backend.yaml.example | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 example/plugins/backends/idpyoidc_backend.yaml.example diff --git a/example/plugins/backends/idpyoidc_backend.yaml.example b/example/plugins/backends/idpyoidc_backend.yaml.example new file mode 100644 index 000000000..45d011b21 --- /dev/null +++ b/example/plugins/backends/idpyoidc_backend.yaml.example @@ -0,0 +1,12 @@ +module: satosa.backends.idpy_oidc.IdpyOIDCBackend +name: oidc +config: + client_type: oidc + redirect_uris: [/] + client_id: !ENV SATOSA_OIDC_BACKEND_CLIENTID + client_secret: !ENV SATOSA_OIDC_BACKEND_CLIENTSECRET + response_types_supported: ["code"] + scopes_supported: ["openid", "profile", "email"] + subject_type_supported: ["public"] + provider_info: + issuer: !ENV SATOSA_OIDC_BACKEND_ISSUER \ No newline at end of file From b3860b83a26690cd39285dc3c497a1ad91bf5d38 Mon Sep 17 00:00:00 2001 From: Roland Hedberg Date: Mon, 26 Jun 2023 09:39:39 +0200 Subject: [PATCH 265/288] Added tests --- src/satosa/backends/idpy_oidc.py | 6 +- tests/satosa/backends/test_idpy_oidc.py | 207 ++++++++++++++++++++++++ 2 files changed, 210 insertions(+), 3 deletions(-) create mode 100644 tests/satosa/backends/test_idpy_oidc.py diff --git a/src/satosa/backends/idpy_oidc.py b/src/satosa/backends/idpy_oidc.py index 06eb3c8c4..0f259ea1f 100644 --- a/src/satosa/backends/idpy_oidc.py +++ b/src/satosa/backends/idpy_oidc.py @@ -1,17 +1,17 @@ """ OIDC/OAuth2 backend module. """ -import logging from datetime import datetime +import logging from urllib.parse import urlparse from idpyoidc.client.oauth2.stand_alone_client import StandAloneClient from idpyoidc.server.user_authn.authn_context import UNSPECIFIED -import satosa.logging_util as lu from satosa.backends.base import BackendModule from satosa.internal import AuthenticationInformation from satosa.internal import InternalData +import satosa.logging_util as lu from ..exception import SATOSAAuthenticationError from ..exception import SATOSAError from ..response import Redirect @@ -74,7 +74,7 @@ def register_endpoints(self): if not redirect_path: raise SATOSAError("Missing path in redirect uri") redirect_path = urlparse(redirect_path[0]).path - url_map.append(("^%s$" % redirect_path.lstrip("/"), self.response_endpoint)) + url_map.append((f"^{redirect_path.lstrip('/')}$", self.response_endpoint)) return url_map def response_endpoint(self, context, *args): diff --git a/tests/satosa/backends/test_idpy_oidc.py b/tests/satosa/backends/test_idpy_oidc.py new file mode 100644 index 000000000..067118c5d --- /dev/null +++ b/tests/satosa/backends/test_idpy_oidc.py @@ -0,0 +1,207 @@ +import json +import re +import time +from unittest.mock import Mock +from urllib.parse import parse_qsl +from urllib.parse import urlparse + +from cryptojwt.key_jar import build_keyjar +from idpyoidc.client.defaults import DEFAULT_KEY_DEFS +from idpyoidc.client.oauth2.stand_alone_client import StandAloneClient +from idpyoidc.message.oidc import AuthorizationResponse +from idpyoidc.message.oidc import IdToken +from oic.oic import AuthorizationRequest +import pytest +import responses + +from satosa.backends.idpy_oidc import IdpyOIDCBackend +from satosa.context import Context +from satosa.internal import InternalData +from satosa.response import Response + +ISSUER = "https://provider.example.com" +CLIENT_ID = "test_client" +CLIENT_BASE_URL = "https://client.test.com" +NONCE = "the nonce" + + +class TestIdpyOIDCBackend(object): + @pytest.fixture + def backend_config(self): + return { + "client": { + "base_url": CLIENT_BASE_URL, + "client_id": CLIENT_ID, + "client_type": "oidc", + "client_secret": "ZJYCqe3GGRvdrudKyZS0XhGv_Z45DuKhCUk0gBR1vZk", + "application_type": "web", + "application_name": "SATOSA Test", + "contacts": ["ops@example.com"], + "response_types_supported": ["code"], + "response_type": "code id_token token", + "scope": "openid foo", + "key_conf": {"key_defs": DEFAULT_KEY_DEFS}, + "jwks_uri": f"{CLIENT_BASE_URL}/jwks.json", + "provider_info": { + "issuer": ISSUER, + "authorization_endpoint": f"{ISSUER}/authn", + "token_endpoint": f"{ISSUER}/token", + "userinfo_endpoint": f"{ISSUER}/user", + "jwks_uri": f"{ISSUER}/static/jwks" + } + } + } + + @pytest.fixture + def internal_attributes(self): + return { + "attributes": { + "givenname": {"openid": ["given_name"]}, + "mail": {"openid": ["email"]}, + "edupersontargetedid": {"openid": ["sub"]}, + "surname": {"openid": ["family_name"]} + } + } + + @pytest.fixture(autouse=True) + @responses.activate + def create_backend(self, internal_attributes, backend_config): + base_url = backend_config['client']['base_url'] + self.issuer_keys = build_keyjar(DEFAULT_KEY_DEFS) + with responses.RequestsMock() as rsps: + rsps.add( + responses.GET, + backend_config['client']['provider_info']['jwks_uri'], + body=self.issuer_keys.export_jwks_as_json(), + status=200, + content_type="application/json") + + self.oidc_backend = IdpyOIDCBackend(Mock(), internal_attributes, backend_config, + base_url, "oidc") + + @pytest.fixture + def userinfo(self): + return { + "given_name": "Test", + "family_name": "Devsson", + "email": "test_dev@example.com", + "sub": "username" + } + + def test_client(self, backend_config): + assert isinstance(self.oidc_backend.client, StandAloneClient) + # 3 signing keys. One RSA, one EC and one symmetric + assert len(self.oidc_backend.client.context.keyjar.get_signing_key()) == 3 + assert self.oidc_backend.client.context.jwks_uri == backend_config['client']['jwks_uri'] + + def assert_expected_attributes(self, attr_map, user_claims, actual_attributes): + expected_attributes = {} + for out_attr, in_mapping in attr_map["attributes"].items(): + expected_attributes[out_attr] = [user_claims[in_mapping["openid"][0]]] + + assert actual_attributes == expected_attributes + + def setup_token_endpoint(self, userinfo): + _client = self.oidc_backend.client + signing_key = self.issuer_keys.get_signing_key(key_type='RSA')[0] + signing_key.alg = "RS256" + id_token_claims = { + "iss": ISSUER, + "sub": userinfo["sub"], + "aud": CLIENT_ID, + "nonce": NONCE, + "exp": time.time() + 3600, + "iat": time.time() + } + id_token = IdToken(**id_token_claims).to_jwt([signing_key], algorithm=signing_key.alg) + token_response = { + "access_token": "SlAV32hkKG", + "token_type": "Bearer", + "refresh_token": "8xLOxBtZp8", + "expires_in": 3600, + "id_token": id_token + } + responses.add(responses.POST, + _client.context.provider_info['token_endpoint'], + body=json.dumps(token_response), + status=200, + content_type="application/json") + + def setup_userinfo_endpoint(self, userinfo): + responses.add(responses.GET, + self.oidc_backend.client.context.provider_info['userinfo_endpoint'], + body=json.dumps(userinfo), + status=200, + content_type="application/json") + + @pytest.fixture + def incoming_authn_response(self): + _context = self.oidc_backend.client.context + oidc_state = "my state" + _uri = _context.claims.get_usage("redirect_uris")[0] + _request = AuthorizationRequest( + redirect_uri=_uri, + response_type="code", + client_id=_context.get_client_id(), + scope=_context.claims.get_usage("scope"), + nonce=NONCE + ) + _context.cstate.set(oidc_state, {"iss": _context.issuer}) + _context.cstate.bind_key(NONCE, oidc_state) + _context.cstate.update(oidc_state, _request) + + response = AuthorizationResponse( + code="F+R4uWbN46U+Bq9moQPC4lEvRd2De4o=", + state=oidc_state, + iss=_context.issuer, + nonce=NONCE + ) + return response.to_dict() + + def test_register_endpoints(self): + _uri = self.oidc_backend.client.context.claims.get_usage("redirect_uris")[0] + redirect_uri_path = urlparse(_uri).path.lstrip('/') + url_map = self.oidc_backend.register_endpoints() + regex, callback = url_map[0] + assert re.search(regex, redirect_uri_path) + assert callback == self.oidc_backend.response_endpoint + + def test_translate_response_to_internal_response(self, userinfo): + internal_response = self.oidc_backend._translate_response(userinfo, ISSUER) + assert internal_response.subject_id == userinfo["sub"] + self.assert_expected_attributes(self.oidc_backend.internal_attributes, userinfo, + internal_response.attributes) + + @responses.activate + def test_response_endpoint(self, context, userinfo, incoming_authn_response): + self.setup_token_endpoint(userinfo) + self.setup_userinfo_endpoint(userinfo) + + response_context = Context() + response_context.request = incoming_authn_response + response_context.state = context.state + + self.oidc_backend.response_endpoint(response_context) + + args = self.oidc_backend.auth_callback_func.call_args[0] + assert isinstance(args[0], Context) + assert isinstance(args[1], InternalData) + self.assert_expected_attributes(self.oidc_backend.internal_attributes, userinfo, + args[1].attributes) + + def test_start_auth_redirects_to_provider_authorization_endpoint(self, context): + _client = self.oidc_backend.client + auth_response = self.oidc_backend.start_auth(context, None) + assert isinstance(auth_response, Response) + + login_url = auth_response.message + parsed = urlparse(login_url) + assert login_url.startswith(_client.context.provider_info["authorization_endpoint"]) + auth_params = dict(parse_qsl(parsed.query)) + assert auth_params["scope"] == " ".join(_client.context.claims.get_usage("scope")) + assert auth_params["response_type"] == _client.context.claims.get_usage("response_types")[0] + assert auth_params["client_id"] == _client.client_id + assert auth_params["redirect_uri"] == _client.context.claims.get_usage("redirect_uris")[0] + assert "state" in auth_params + assert "nonce" in auth_params + From 34d85971a79880a9a74fe594d1f9fd6588ff796c Mon Sep 17 00:00:00 2001 From: Roland Hedberg Date: Thu, 6 Jul 2023 09:49:58 +0200 Subject: [PATCH 266/288] Changes after comments from Ivan. --- src/satosa/backends/idpy_oidc.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/src/satosa/backends/idpy_oidc.py b/src/satosa/backends/idpy_oidc.py index 0f259ea1f..fbab4c272 100644 --- a/src/satosa/backends/idpy_oidc.py +++ b/src/satosa/backends/idpy_oidc.py @@ -50,6 +50,11 @@ def __init__(self, auth_callback_func, internal_attributes, config, base_url, na self.client.do_provider_info() self.client.do_client_registration() + _redirect_uris = self.client.context.claims.get_usage('redirect_uris') + if not _redirect_uris: + raise SATOSAError("Missing path in redirect uri") + self.redirect_path = urlparse(_redirect_uris[0]).path + def start_auth(self, context, internal_request): """ See super class method satosa.backends.base#start_auth @@ -70,11 +75,7 @@ def register_endpoints(self): :return: A list that can be used to map the request to SATOSA to this endpoint. """ url_map = [] - redirect_path = self.client.context.claims.get_usage('redirect_uris') - if not redirect_path: - raise SATOSAError("Missing path in redirect uri") - redirect_path = urlparse(redirect_path[0]).path - url_map.append((f"^{redirect_path.lstrip('/')}$", self.response_endpoint)) + url_map.append((f"^{self.redirect_path.lstrip('/')}$", self.response_endpoint)) return url_map def response_endpoint(self, context, *args): @@ -120,7 +121,10 @@ def _translate_response(self, response, issuer): :param subject_type: public or pairwise according to oidc standard. :return: A SATOSA internal response. """ - auth_info = AuthenticationInformation(UNSPECIFIED, str(datetime.now()), issuer) + timestamp = response["auth_time"] + auth_class_ref = response.get("amr", response.get("acr", UNSPECIFIED)) + auth_info = AuthenticationInformation(auth_class_ref, timestamp, issuer) + internal_resp = InternalData(auth_info=auth_info) internal_resp.attributes = self.converter.to_internal("openid", response) internal_resp.subject_id = response["sub"] From a8a446ad12dec0ea96c096ff7a196daa14e42de6 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 10 Jul 2023 21:59:53 +0300 Subject: [PATCH 267/288] Prepare the right datetime format --- src/satosa/backends/idpy_oidc.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/satosa/backends/idpy_oidc.py b/src/satosa/backends/idpy_oidc.py index fbab4c272..f3ea43f61 100644 --- a/src/satosa/backends/idpy_oidc.py +++ b/src/satosa/backends/idpy_oidc.py @@ -1,7 +1,7 @@ """ OIDC/OAuth2 backend module. """ -from datetime import datetime +import datetime import logging from urllib.parse import urlparse @@ -16,6 +16,8 @@ from ..exception import SATOSAError from ..response import Redirect + +UTC = datetime.timezone.utc logger = logging.getLogger(__name__) @@ -121,9 +123,15 @@ def _translate_response(self, response, issuer): :param subject_type: public or pairwise according to oidc standard. :return: A SATOSA internal response. """ - timestamp = response["auth_time"] - auth_class_ref = response.get("amr", response.get("acr", UNSPECIFIED)) - auth_info = AuthenticationInformation(auth_class_ref, timestamp, issuer) + timestamp_epoch = ( + response.get("auth_time") + or response.get("iat") + or int(datetime.datetime.now(UTC).timestamp()) + ) + timestamp_dt = datetime.datetime.fromtimestamp(timestamp_epoch, UTC) + timestamp_iso = timestamp_dt.isoformat().replace("+00:00", "Z") + auth_class_ref = response.get("acr") or response.get("amr") or UNSPECIFIED + auth_info = AuthenticationInformation(auth_class_ref, timestamp_iso, issuer) internal_resp = InternalData(auth_info=auth_info) internal_resp.attributes = self.converter.to_internal("openid", response) From aeaea946c1679387c8223f2f0d94649433afbc8c Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 10 Jul 2023 22:46:11 +0300 Subject: [PATCH 268/288] Fix tests Signed-off-by: Ivan Kanakarakis --- tests/satosa/backends/test_idpy_oidc.py | 56 ++++++++++++++++++------- 1 file changed, 42 insertions(+), 14 deletions(-) diff --git a/tests/satosa/backends/test_idpy_oidc.py b/tests/satosa/backends/test_idpy_oidc.py index 067118c5d..95e8b427c 100644 --- a/tests/satosa/backends/test_idpy_oidc.py +++ b/tests/satosa/backends/test_idpy_oidc.py @@ -1,6 +1,7 @@ import json import re import time +from datetime import datetime from unittest.mock import Mock from urllib.parse import parse_qsl from urllib.parse import urlparse @@ -88,6 +89,29 @@ def userinfo(self): "sub": "username" } + @pytest.fixture + def id_token(self, userinfo): + issuer_keys = build_keyjar(DEFAULT_KEY_DEFS) + signing_key = issuer_keys.get_signing_key(key_type='RSA')[0] + signing_key.alg = "RS256" + auth_time = int(datetime.utcnow().timestamp()) + id_token_claims = { + "auth_time": auth_time, + "iss": ISSUER, + "sub": userinfo["sub"], + "aud": CLIENT_ID, + "nonce": NONCE, + "exp": auth_time + 3600, + "iat": auth_time, + } + id_token = IdToken(**id_token_claims) + return id_token + + @pytest.fixture + def all_user_claims(self, userinfo, id_token): + all_user_claims = {**userinfo, **id_token} + return all_user_claims + def test_client(self, backend_config): assert isinstance(self.oidc_backend.client, StandAloneClient) # 3 signing keys. One RSA, one EC and one symmetric @@ -95,10 +119,10 @@ def test_client(self, backend_config): assert self.oidc_backend.client.context.jwks_uri == backend_config['client']['jwks_uri'] def assert_expected_attributes(self, attr_map, user_claims, actual_attributes): - expected_attributes = {} - for out_attr, in_mapping in attr_map["attributes"].items(): - expected_attributes[out_attr] = [user_claims[in_mapping["openid"][0]]] - + expected_attributes = { + out_attr: [user_claims[in_mapping["openid"][0]]] + for out_attr, in_mapping in attr_map["attributes"].items() + } assert actual_attributes == expected_attributes def setup_token_endpoint(self, userinfo): @@ -166,16 +190,19 @@ def test_register_endpoints(self): assert re.search(regex, redirect_uri_path) assert callback == self.oidc_backend.response_endpoint - def test_translate_response_to_internal_response(self, userinfo): - internal_response = self.oidc_backend._translate_response(userinfo, ISSUER) - assert internal_response.subject_id == userinfo["sub"] - self.assert_expected_attributes(self.oidc_backend.internal_attributes, userinfo, - internal_response.attributes) + def test_translate_response_to_internal_response(self, all_user_claims): + internal_response = self.oidc_backend._translate_response(all_user_claims, ISSUER) + assert internal_response.subject_id == all_user_claims["sub"] + self.assert_expected_attributes( + self.oidc_backend.internal_attributes, + all_user_claims, + internal_response.attributes, + ) @responses.activate - def test_response_endpoint(self, context, userinfo, incoming_authn_response): - self.setup_token_endpoint(userinfo) - self.setup_userinfo_endpoint(userinfo) + def test_response_endpoint(self, context, all_user_claims, incoming_authn_response): + self.setup_token_endpoint(all_user_claims) + self.setup_userinfo_endpoint(all_user_claims) response_context = Context() response_context.request = incoming_authn_response @@ -186,8 +213,9 @@ def test_response_endpoint(self, context, userinfo, incoming_authn_response): args = self.oidc_backend.auth_callback_func.call_args[0] assert isinstance(args[0], Context) assert isinstance(args[1], InternalData) - self.assert_expected_attributes(self.oidc_backend.internal_attributes, userinfo, - args[1].attributes) + self.assert_expected_attributes( + self.oidc_backend.internal_attributes, all_user_claims, args[1].attributes + ) def test_start_auth_redirects_to_provider_authorization_endpoint(self, context): _client = self.oidc_backend.client From 628ee94f507d9923b1ed6b20dd831c84860d753c Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 10 Jul 2023 22:46:36 +0300 Subject: [PATCH 269/288] Add extra requirement for the new idpy-oidc based backend Signed-off-by: Ivan Kanakarakis --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 59065f6ac..51bb389ea 100644 --- a/setup.py +++ b/setup.py @@ -31,6 +31,7 @@ "ldap": ["ldap3"], "pyop_mongo": ["pyop[mongo]"], "pyop_redis": ["pyop[redis]"], + "idpy_oidc_backend": ["idpyoidc >= 2.1.0"], }, zip_safe=False, classifiers=[ From 468cc87e9eca5c4d4c3422df4014119a4ffb5474 Mon Sep 17 00:00:00 2001 From: Rastislav Krutak <492918@mail.muni.cz> Date: Mon, 17 Jul 2023 11:19:08 +0200 Subject: [PATCH 270/288] feat: treat resource param as list --- src/satosa/proxy_server.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index 7968167ea..e23be1418 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -20,6 +20,8 @@ def parse_query_string(data): query_param_pairs = _parse_query_string(data) query_param_dict = dict(query_param_pairs) + if "resource" in query_param_dict: + query_param_dict["resource"] = [t[1] for t in query_param_pairs if t[0] == "resource"] return query_param_dict From 6ea06d662f33b739c7239c5c4272f19d8c264e1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Mon, 31 Jul 2023 08:59:33 +0200 Subject: [PATCH 271/288] feat: allow loading of tuples from YAML configs --- src/satosa/yaml.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/satosa/yaml.py b/src/satosa/yaml.py index 2f8d51f1b..ad74e2e9e 100644 --- a/src/satosa/yaml.py +++ b/src/satosa/yaml.py @@ -43,9 +43,21 @@ def _constructor_envfile_variables(loader, node): return new_value +def _constructor_tuple_variables(loader, node): + """ + Extracts the tuple variable from the node's value. + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: value of the tuple + """ + return tuple(loader.construct_sequence(node)) + + TAG_ENV = "!ENV" TAG_ENVFILE = "!ENVFILE" +TAG_TUPLE = u'tag:yaml.org,2002:python/tuple' _safe_loader.add_constructor(TAG_ENV, _constructor_env_variables) _safe_loader.add_constructor(TAG_ENVFILE, _constructor_envfile_variables) +_safe_loader.add_constructor(TAG_TUPLE, _constructor_tuple_variables) From 5debe486fe6d832fdbe880aba378c8cc0afcb133 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Wed, 23 Aug 2023 11:25:21 +0200 Subject: [PATCH 272/288] chore: quotes Co-authored-by: Ivan Kanakarakis --- src/satosa/yaml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/yaml.py b/src/satosa/yaml.py index ad74e2e9e..d45b12116 100644 --- a/src/satosa/yaml.py +++ b/src/satosa/yaml.py @@ -55,7 +55,7 @@ def _constructor_tuple_variables(loader, node): TAG_ENV = "!ENV" TAG_ENVFILE = "!ENVFILE" -TAG_TUPLE = u'tag:yaml.org,2002:python/tuple' +TAG_TUPLE = "tag:yaml.org,2002:python/tuple" _safe_loader.add_constructor(TAG_ENV, _constructor_env_variables) From 80210b364cf8f3a127a8d8eb7046c360704ad8d9 Mon Sep 17 00:00:00 2001 From: war Date: Wed, 8 Nov 2023 14:05:10 +0100 Subject: [PATCH 273/288] fix: prevent endless loop from accessing self --- src/satosa/internal.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/internal.py b/src/satosa/internal.py index 24de31890..a96b19b1f 100644 --- a/src/satosa/internal.py +++ b/src/satosa/internal.py @@ -35,7 +35,7 @@ def __setattr__(self, key, value): def __getattr__(self, key): if key == "data": - return self.data + return super().data try: value = self.__getitem__(key) From 48bd453426e946f3201e3c7270a5e089e8f8bfcd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Mon, 28 Nov 2022 23:43:29 +0100 Subject: [PATCH 274/288] refactor: base AppleBackend on OpenIDConnectBackend common parts are not duplicated --- src/satosa/backends/apple.py | 207 +---------------------------------- 1 file changed, 3 insertions(+), 204 deletions(-) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index 37f756a68..870f5d157 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -2,135 +2,21 @@ Apple backend module. """ import logging -from datetime import datetime -from urllib.parse import urlparse - +from .openid_connect import OpenIDConnectBackend, STATE_KEY from oic.oauth2.message import Message -from oic import oic -from oic import rndstr from oic.oic.message import AuthorizationResponse -from oic.oic.message import ProviderConfigurationResponse -from oic.oic.message import RegistrationRequest -from oic.utils.authn.authn_context import UNSPECIFIED -from oic.utils.authn.client import CLIENT_AUTHN_METHOD - import satosa.logging_util as lu -from satosa.internal import AuthenticationInformation -from satosa.internal import InternalData -from .base import BackendModule -from .oauth import get_metadata_desc_for_oauth_backend -from ..exception import SATOSAAuthenticationError, SATOSAError -from ..response import Redirect - +from ..exception import SATOSAAuthenticationError import json import requests logger = logging.getLogger(__name__) -NONCE_KEY = "oidc_nonce" -STATE_KEY = "oidc_state" - # https://developer.okta.com/blog/2019/06/04/what-the-heck-is-sign-in-with-apple -class AppleBackend(BackendModule): +class AppleBackend(OpenIDConnectBackend): """Sign in with Apple backend""" - def __init__(self, auth_callback_func, internal_attributes, config, base_url, name): - """ - Sign in with Apple backend module. - :param auth_callback_func: Callback should be called by the module after the authorization - in the backend is done. - :param internal_attributes: Mapping dictionary between SATOSA internal attribute names and - the names returned by underlying IdP's/OP's as well as what attributes the calling SP's and - RP's expects namevice. - :param config: Configuration parameters for the module. - :param base_url: base url of the service - :param name: name of the plugin - - :type auth_callback_func: - (satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response - :type internal_attributes: dict[string, dict[str, str | list[str]]] - :type config: dict[str, dict[str, str] | list[str]] - :type base_url: str - :type name: str - """ - super().__init__(auth_callback_func, internal_attributes, base_url, name) - self.auth_callback_func = auth_callback_func - self.config = config - self.client = _create_client( - config["provider_metadata"], - config["client"]["client_metadata"], - config["client"].get("verify_ssl", True), - ) - if "scope" not in config["client"]["auth_req_params"]: - config["auth_req_params"]["scope"] = "openid" - if "response_type" not in config["client"]["auth_req_params"]: - config["auth_req_params"]["response_type"] = "code" - - def start_auth(self, context, request_info): - """ - See super class method satosa.backends.base#start_auth - :type context: satosa.context.Context - :type request_info: satosa.internal.InternalData - """ - oidc_nonce = rndstr() - oidc_state = rndstr() - state_data = {NONCE_KEY: oidc_nonce, STATE_KEY: oidc_state} - context.state[self.name] = state_data - - args = { - "scope": self.config["client"]["auth_req_params"]["scope"], - "response_type": self.config["client"]["auth_req_params"]["response_type"], - "client_id": self.client.client_id, - "redirect_uri": self.client.registration_response["redirect_uris"][0], - "state": oidc_state, - "nonce": oidc_nonce, - } - args.update(self.config["client"]["auth_req_params"]) - auth_req = self.client.construct_AuthorizationRequest(request_args=args) - login_url = auth_req.request(self.client.authorization_endpoint) - return Redirect(login_url) - - def register_endpoints(self): - """ - Creates a list of all the endpoints this backend module needs to listen to. In this case - it's the authentication response from the underlying OP that is redirected from the OP to - the proxy. - :rtype: Sequence[(str, Callable[[satosa.context.Context], satosa.response.Response]] - :return: A list that can be used to map the request to SATOSA to this endpoint. - """ - url_map = [] - redirect_path = urlparse( - self.config["client"]["client_metadata"]["redirect_uris"][0] - ).path - if not redirect_path: - raise SATOSAError("Missing path in redirect uri") - - url_map.append(("^%s$" % redirect_path.lstrip("/"), self.response_endpoint)) - return url_map - - def _verify_nonce(self, nonce, context): - """ - Verify the received OIDC 'nonce' from the ID Token. - :param nonce: OIDC nonce - :type nonce: str - :param context: current request context - :type context: satosa.context.Context - :raise SATOSAAuthenticationError: if the nonce is incorrect - """ - backend_state = context.state[self.name] - if nonce != backend_state[NONCE_KEY]: - msg = "Missing or invalid nonce in authn response for state: {}".format( - backend_state - ) - logline = lu.LOG_FMT.format( - id=lu.get_session_id(context.state), message=msg - ) - logger.debug(logline) - raise SATOSAAuthenticationError( - context.state, "Missing or invalid nonce in authn response" - ) - def _get_tokens(self, authn_response, context): """ :param authn_response: authentication response from OP @@ -169,25 +55,6 @@ def _get_tokens(self, authn_response, context): return authn_response.get("access_token"), authn_response.get("id_token") - def _check_error_response(self, response, context): - """ - Check if the response is an OAuth error response. - :param response: the OIDC response - :type response: oic.oic.message - :raise SATOSAAuthenticationError: if the response is an OAuth error response - """ - if "error" in response: - msg = "{name} error: {error} {description}".format( - name=type(response).__name__, - error=response["error"], - description=response.get("error_description", ""), - ) - logline = lu.LOG_FMT.format( - id=lu.get_session_id(context.state), message=msg - ) - logger.debug(logline) - raise SATOSAAuthenticationError(context.state, "Access denied") - def response_endpoint(self, context, *args): """ Handles the authentication response from the OP. @@ -249,71 +116,3 @@ def response_endpoint(self, context, *args): all_user_claims, self.client.authorization_endpoint ) return self.auth_callback_func(context, internal_resp) - - def _translate_response(self, response, issuer): - """ - Translates oidc response to SATOSA internal response. - :type response: dict[str, str] - :type issuer: str - :type subject_type: str - :rtype: InternalData - - :param response: Dictioary with attribute name as key. - :param issuer: The oidc op that gave the repsonse. - :param subject_type: public or pairwise according to oidc standard. - :return: A SATOSA internal response. - """ - auth_info = AuthenticationInformation(UNSPECIFIED, str(datetime.now()), issuer) - internal_resp = InternalData(auth_info=auth_info) - internal_resp.attributes = self.converter.to_internal("openid", response) - internal_resp.subject_id = response["sub"] - return internal_resp - - def get_metadata_desc(self): - """ - See satosa.backends.oauth.get_metadata_desc - :rtype: satosa.metadata_creation.description.MetadataDescription - """ - return get_metadata_desc_for_oauth_backend( - self.config["provider_metadata"]["issuer"], self.config - ) - - -def _create_client(provider_metadata, client_metadata, verify_ssl=True): - """ - Create a pyoidc client instance. - :param provider_metadata: provider configuration information - :type provider_metadata: Mapping[str, Union[str, Sequence[str]]] - :param client_metadata: client metadata - :type client_metadata: Mapping[str, Union[str, Sequence[str]]] - :return: client instance to use for communicating with the configured provider - :rtype: oic.oic.Client - """ - client = oic.Client(client_authn_method=CLIENT_AUTHN_METHOD, verify_ssl=verify_ssl) - - # Provider configuration information - if "authorization_endpoint" in provider_metadata: - # no dynamic discovery necessary - client.handle_provider_config( - ProviderConfigurationResponse(**provider_metadata), - provider_metadata["issuer"], - ) - else: - # do dynamic discovery - client.provider_config(provider_metadata["issuer"]) - - # Client information - if "client_id" in client_metadata: - # static client info provided - client.store_registration_info(RegistrationRequest(**client_metadata)) - else: - # do dynamic registration - client.register( - client.provider_info["registration_endpoint"], **client_metadata - ) - - client.subject_type = ( - client.registration_response.get("subject_type") - or client.provider_info["subject_types_supported"][0] - ) - return client From f4464c4019c9e0f29f59957e0c18c933605448ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 29 Nov 2022 12:06:06 +0100 Subject: [PATCH 275/288] fix: correct user info loading in apple backend incorrect function was used for parsing json (load is for files, loads for strings) and the error was masked because of too broad except clause --- src/satosa/backends/apple.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index 870f5d157..0415f9b14 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -76,8 +76,8 @@ def response_endpoint(self, context, *args): # - https://developer.apple.com/documentation/sign_in_with_apple/namei try: userdata = context.request.get("user", "{}") - userinfo = json.load(userdata) - except Exception: + userinfo = json.loads(userdata) + except json.JSONDecodeError: userinfo = {} authn_resp = self.client.parse_response( From 2e3a78238d7da18b92a9ac18d4bfe76a8fe7455f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 29 Nov 2022 12:49:37 +0100 Subject: [PATCH 276/288] fix: prevent exception in attribute mapping when internal_attributes contain nested attribute but the actual value is not nested --- src/satosa/attribute_mapping.py | 8 +++-- tests/satosa/test_attribute_mapping.py | 50 ++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/src/satosa/attribute_mapping.py b/src/satosa/attribute_mapping.py index e8729561c..d5745864c 100644 --- a/src/satosa/attribute_mapping.py +++ b/src/satosa/attribute_mapping.py @@ -1,6 +1,7 @@ import logging from collections import defaultdict from itertools import chain +from typing import Mapping from mako.template import Template @@ -97,8 +98,9 @@ def to_internal(self, attribute_profile, external_dict): continue external_attribute_name = mapping[attribute_profile] - attribute_values = self._collate_attribute_values_by_priority_order(external_attribute_name, - external_dict) + attribute_values = self._collate_attribute_values_by_priority_order( + external_attribute_name, external_dict + ) if attribute_values: # Only insert key if it has some values logline = "backend attribute {external} mapped to {internal} ({value})".format( external=external_attribute_name, internal=internal_attribute_name, value=attribute_values @@ -157,6 +159,8 @@ def _get_nested_attribute_value(self, nested_key, data): d = data for key in keys: + if not isinstance(d, Mapping): + return None d = d.get(key) if d is None: return None diff --git a/tests/satosa/test_attribute_mapping.py b/tests/satosa/test_attribute_mapping.py index c109ab717..93a3dff78 100644 --- a/tests/satosa/test_attribute_mapping.py +++ b/tests/satosa/test_attribute_mapping.py @@ -5,6 +5,56 @@ from satosa.attribute_mapping import AttributeMapper +class TestAttributeMapperNestedDataDifferentAttrProfile: + def test_nested_mapping_nested_data_to_internal(self): + mapping = { + "attributes": { + "name": { + "openid": ["name"] + }, + "givenname": { + "openid": ["given_name", "name.firstName"] + }, + }, + } + + data = { + "name": { + "firstName": "value-first", + "lastName": "value-last", + }, + "email": "someuser@apple.com", + } + + converter = AttributeMapper(mapping) + internal_repr = converter.to_internal("openid", data) + assert internal_repr["name"] == [data["name"]] + assert internal_repr["givenname"] == [data["name"]["firstName"]] + + + def test_nested_mapping_simple_data_to_internal(self): + mapping = { + "attributes": { + "name": { + "openid": ["name"] + }, + "givenname": { + "openid": ["given_name", "name.firstName"] + }, + }, + } + + data = { + "name": "value-first", + "email": "someuser@google.com", + } + + converter = AttributeMapper(mapping) + internal_repr = converter.to_internal("openid", data) + assert internal_repr["name"] == [data["name"]] + assert internal_repr.get("givenname") is None + + class TestAttributeMapper: def test_nested_attribute_to_internal(self): mapping = { From 45c4aa15a5f8b47f46e4be3d5829b7d8a905993c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Mon, 13 Nov 2023 13:38:16 +0100 Subject: [PATCH 277/288] fix: convert strings to booleans in Apple backend --- src/satosa/backends/apple.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index 0415f9b14..3a7c4290d 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -13,6 +13,7 @@ logger = logging.getLogger(__name__) + # https://developer.okta.com/blog/2019/06/04/what-the-heck-is-sign-in-with-apple class AppleBackend(OpenIDConnectBackend): """Sign in with Apple backend""" @@ -109,6 +110,12 @@ def response_endpoint(self, context, *args): raise SATOSAAuthenticationError(context.state, "No user info available.") all_user_claims = dict(list(userinfo.items()) + list(id_token_claims.items())) + + # convert "string or Boolean" claims to actual booleans + for bool_claim_name in ["email_verified", "is_private_email"]: + if type(userinfo.get(bool_claim_name)) == str: + userinfo[bool_claim_name] = userinfo[bool_claim_name] == "true" + msg = "UserInfo: {}".format(all_user_claims) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) From 0d6615aa3357b1752855c2f3667ab498b3fe154e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 14 Nov 2023 00:23:59 +0100 Subject: [PATCH 278/288] refactor: easier to read boolean expression Co-authored-by: Ivan Kanakarakis --- src/satosa/backends/apple.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index 3a7c4290d..b17308c48 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -113,8 +113,11 @@ def response_endpoint(self, context, *args): # convert "string or Boolean" claims to actual booleans for bool_claim_name in ["email_verified", "is_private_email"]: - if type(userinfo.get(bool_claim_name)) == str: - userinfo[bool_claim_name] = userinfo[bool_claim_name] == "true" + userinfo[bool_claim_name] = ( + True + if userinfo[bool_claim_name] == "true" + else False + ) msg = "UserInfo: {}".format(all_user_claims) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) From ef4c10d0edf5f3188ae887044bd96eb81b67e73b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Tue, 14 Nov 2023 00:48:36 +0100 Subject: [PATCH 279/288] fix: only modify existing string booleans in Apple backend --- src/satosa/backends/apple.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py index b17308c48..f7c1189ea 100644 --- a/src/satosa/backends/apple.py +++ b/src/satosa/backends/apple.py @@ -113,11 +113,10 @@ def response_endpoint(self, context, *args): # convert "string or Boolean" claims to actual booleans for bool_claim_name in ["email_verified", "is_private_email"]: - userinfo[bool_claim_name] = ( - True - if userinfo[bool_claim_name] == "true" - else False - ) + if type(all_user_claims.get(bool_claim_name)) == str: + all_user_claims[bool_claim_name] = ( + True if all_user_claims[bool_claim_name] == "true" else False + ) msg = "UserInfo: {}".format(all_user_claims) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) From 355eb05f0d8754978dd121abcdf9247c861b5c92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pavel=20B=C5=99ou=C5=A1ek?= Date: Wed, 10 Jan 2024 11:32:17 +0100 Subject: [PATCH 280/288] fix: correct typo in saml2 exception is thrown during error handling --- src/satosa/backends/saml2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index ec99cad06..8be4572d4 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -435,7 +435,7 @@ def authn_response(self, context, binding): except Exception as e: msg = { "message": "Authentication failed", - "error": f"Failed to parse Authn response: {err}", + "error": f"Failed to parse Authn response: {e}", } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline, exc_info=True) From 1b7236822017ab5cc699e845e633e69c7fbdfe00 Mon Sep 17 00:00:00 2001 From: Jonathan Perret Date: Thu, 25 Apr 2024 14:00:42 +0200 Subject: [PATCH 281/288] BaseProcessor: add missing 'self' --- src/satosa/micro_services/processors/base_processor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/micro_services/processors/base_processor.py b/src/satosa/micro_services/processors/base_processor.py index ad5eb10b5..b29b7f294 100644 --- a/src/satosa/micro_services/processors/base_processor.py +++ b/src/satosa/micro_services/processors/base_processor.py @@ -2,5 +2,5 @@ class BaseProcessor(object): def __init__(self): pass - def process(internal_data, attribute, **kwargs): + def process(self, internal_data, attribute, **kwargs): pass From af6ff771ad2a1a16e00d355ad5b7249cc64319fb Mon Sep 17 00:00:00 2001 From: Dave Lafferty Date: Thu, 23 May 2024 09:52:32 -0400 Subject: [PATCH 282/288] ACR documentation changes. --- doc/README.md | 4 ++-- example/plugins/backends/saml2_backend.yaml.example | 4 ++-- example/plugins/frontends/saml2_frontend.yaml.example | 4 ++-- .../plugins/frontends/saml2_virtualcofrontend.yaml.example | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/README.md b/doc/README.md index fd266723b..8b6416eda 100644 --- a/doc/README.md +++ b/doc/README.md @@ -241,8 +241,8 @@ provider will be preserved, and when using a OAuth or OpenID Connect backend, th config: [...] acr_mapping: - "": default-LoA - "https://accounts.google.com": LoA1 + "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" ``` ### SAML2 Frontend diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index 3d3f25c0d..76f9406ee 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -4,8 +4,8 @@ config: idp_blacklist_file: /path/to/blacklist.json acr_mapping: - "": default-LoA - "https://accounts.google.com": LoA1 + "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" # disco_srv must be defined if there is more than one IdP in the metadata specified above disco_srv: http://disco.example.com diff --git a/example/plugins/frontends/saml2_frontend.yaml.example b/example/plugins/frontends/saml2_frontend.yaml.example index a527ab652..342ae03f5 100644 --- a/example/plugins/frontends/saml2_frontend.yaml.example +++ b/example/plugins/frontends/saml2_frontend.yaml.example @@ -2,8 +2,8 @@ module: satosa.frontends.saml2.SAMLFrontend name: Saml2IDP config: #acr_mapping: - # "": default-LoA - # "https://accounts.google.com": LoA1 + # "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + # "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" endpoints: single_sign_on_service: diff --git a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example index a1ed8ad8f..f5a87e9f2 100644 --- a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example +++ b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example @@ -91,8 +91,8 @@ config: lifetime: {minutes: 15} name_form: urn:oasis:names:tc:SAML:2.0:attrname-format:uri acr_mapping: - "": default-LoA - "https://accounts.google.com": LoA1 + "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" endpoints: single_sign_on_service: From f6155f4825a40397417839dba5ff2e70de17b69c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gyula=20Szab=C3=B3?= Date: Tue, 4 Jun 2024 09:40:55 +0200 Subject: [PATCH 283/288] typo in README.md emailAdress -> emailAddress --- doc/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/README.md b/doc/README.md index fd266723b..15ce71115 100644 --- a/doc/README.md +++ b/doc/README.md @@ -129,7 +129,7 @@ attribute to use, e.g. `address.formatted` will access the attribute value attributes: mail: openid: [email] - saml: [mail, emailAdress, email] + saml: [mail, emailAddress, email] address: openid: [address.formatted] saml: [postaladdress] From 14c64d570076316ac396a935f29120f705e74cea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gyula=20Szab=C3=B3?= Date: Wed, 5 Jun 2024 08:21:11 +0200 Subject: [PATCH 284/288] Update one-to-many.md --- doc/one-to-many.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/one-to-many.md b/doc/one-to-many.md index c9b08851f..c370db9d8 100644 --- a/doc/one-to-many.md +++ b/doc/one-to-many.md @@ -58,7 +58,7 @@ be configured with a SAML2 frontend and an SAML2 backend. mv internal_attributes.yaml.example internal_attributes.yaml ``` - 1. Map the necessary attributes, see the [Attribute mapping configuration](README.md#attr_map) + 1. Map the necessary attributes, see the [Attribute mapping configuration](README.md#attribute-mapping-configuration-internal_attributesyaml) section of the proxy configuration instructions for more information. From 75c325b3d0c4a73e18994189cacda364804a709b Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 6 Nov 2024 14:06:45 +0200 Subject: [PATCH 285/288] Fix typo in error handler for BadRequest Signed-off-by: Ivan Kanakarakis --- src/satosa/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/base.py b/src/satosa/base.py index 1e17c8cbe..40af19979 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -280,7 +280,7 @@ def run(self, context): if generic_error_url: redirect_url = f"{generic_error_url}?errorid={error_id}" return Redirect(generic_error_url) - return BadRequest(error) + return BadRequest(e.error) except SATOSAMissingStateError as e: error_id = uuid.uuid4().urn msg = { From 2644c73cc41774f6dbae16055c2ab0aaa8ff5e66 Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Wed, 8 Jan 2025 13:50:41 +0200 Subject: [PATCH 286/288] Release v8.5.0 - openid connect backend: Add OAuth2/OIDC backend based on idpy-oidc (new extra requirement `idpy_oidc_backend` to pull the library dependecy) - apple backend: Rework the Apple backend to be based on the generic OpenIDConnectBackend and fix the userinfo loading - Restructure fatal error messages to redirect to generic error page when an errors occur - Allow multiple values for the "resource" query param - Fix checks for missing state from cookie and missing relay state - Allow loading of tuples from YAML configs - docs: minor fixes Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 11 +++++++++++ setup.py | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 35f7a82c6..1a8457bbb 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 8.4.0 +current_version = 8.5.0 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index ee782f08f..380f8bda0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## 8.5.0 (2025-01-08) + +- openid connect backend: Add OAuth2/OIDC backend based on idpy-oidc (new extra requirement `idpy_oidc_backend` to pull the library dependecy) +- apple backend: Rework the Apple backend to be based on the generic OpenIDConnectBackend and fix the userinfo loading +- Restructure fatal error messages to redirect to generic error page when an errors occur +- Allow multiple values for the "resource" query param +- Fix checks for missing state from cookie and missing relay state +- Allow loading of tuples from YAML configs +- docs: minor fixes + + ## 8.4.0 (2023-06-11) - Make cookie parameters configurable diff --git a/setup.py b/setup.py index 51bb389ea..5e802545c 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='8.4.0', + version='8.5.0', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', From 382094bb6b8869fa5631dd0b13d907a6df018da6 Mon Sep 17 00:00:00 2001 From: Vasco Fernandes Date: Tue, 14 Jan 2025 01:15:41 +0000 Subject: [PATCH 287/288] Update saml2.py avoid repeated work --- src/satosa/frontends/saml2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index cecd533db..22f43376d 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -235,7 +235,7 @@ def _handle_authn_request(self, context, binding_in, idp): return ServiceError("Incorrect request from requester: %s" % e) requester = resp_args["sp_entity_id"] - context.state[self.name] = self._create_state_data(context, idp.response_args(authn_req), + context.state[self.name] = self._create_state_data(context, resp_args, context.request.get("RelayState")) subject = authn_req.subject From 2cdf670ce12f3ae44e9735d1eff7cf82a10ddafe Mon Sep 17 00:00:00 2001 From: Ivan Kanakarakis Date: Mon, 10 Feb 2025 17:55:07 +0200 Subject: [PATCH 288/288] Release version 8.5.1 ## 8.5.1 (2025-02-10) - ldap_attribute_store plugin: Add configuration option `use_all_results` to specify whether all LDAP results should be processed. - ldap_attribute_store plugin: Add configuration option `provider_attribute` to define the extracted attribute (ie, domain) that will be used to select the LDAP configuration. - ldap_attribute_store plugin: Add configuration option search_filter to define complex LDAP queries, when the default search based on an identifier is not good enough. - ldap_attribute_store plugin: Add configuration option pool_lifetime. The LDAP Server may abandon connections after some time without notifying the client. The new option allows to set the maximum pool lifetime, so that connections close on the client side. Signed-off-by: Ivan Kanakarakis --- .bumpversion.cfg | 2 +- CHANGELOG.md | 16 ++++++++++++++++ setup.py | 2 +- 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 1a8457bbb..f9133d54b 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 8.5.0 +current_version = 8.5.1 commit = True tag = True diff --git a/CHANGELOG.md b/CHANGELOG.md index 380f8bda0..8287887df 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## 8.5.1 (2025-02-10) + +- ldap_attribute_store plugin: Add configuration option `use_all_results` to + specify whether all LDAP results should be processed. +- ldap_attribute_store plugin: Add configuration option `provider_attribute` to + define the extracted attribute (ie, domain) that will be used to select the LDAP + configuration. +- ldap_attribute_store plugin: Add configuration option search_filter to define + complex LDAP queries, when the default search based on an identifier is not + good enough. +- ldap_attribute_store plugin: Add configuration option pool_lifetime. The LDAP + Server may abandon connections after some time without notifying the client. + The new option allows to set the maximum pool lifetime, so that connections + close on the client side. + + ## 8.5.0 (2025-01-08) - openid connect backend: Add OAuth2/OIDC backend based on idpy-oidc (new extra requirement `idpy_oidc_backend` to pull the library dependecy) diff --git a/setup.py b/setup.py index 5e802545c..70d1e51ab 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='8.5.0', + version='8.5.1', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se',