diff --git a/.bumpversion.cfg b/.bumpversion.cfg index b29a4f3fa..f9133d54b 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 6.1.0 +current_version = 8.5.1 commit = True tag = True diff --git a/.gitignore b/.gitignore index ac511933f..17b270187 100644 --- a/.gitignore +++ b/.gitignore @@ -1,12 +1,10 @@ **/.DS_Store _build .idea +*.iml *.pyc *.log* -example/* -!example/plugins - *.xml *.db *static/ @@ -16,3 +14,4 @@ example/* build/ dist/ .coverage +venv/ diff --git a/.travis.yml b/.travis.yml index 77214ff2c..f63586ac0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,62 +1,171 @@ -dist: xenial -addons: - apt: - packages: - - xmlsec1 +os: linux +dist: bionic +language: python services: - docker - mongodb -language: python +before_install: + - sudo apt-get install -y xmlsec1 + install: - pip install tox - pip install tox-travis script: - - tox + - tox -r jobs: + allow_failures: + - python: 3.9-dev include: - python: 3.6 - python: 3.7 + - python: 3.8 - python: pypy3 - - stage: Deploy latest version - script: skip - deploy: - - provider: script - script: scripts/travis_create_docker_image_branch.sh - on: - repo: IdentityPython/SATOSA - branch: master + - stage: Expose env-var information + script: | + cat </reload-metadata`) that allows external + triggers to reload the frontend's metadata. This setting is disabled by + default. It is up to the user to protect the endpoint if enabled. This + feature requires pysaml2 > 7.0.1 +- backends: the saml2 backend derives the encryption keys based on the + `encryption_keypairs` configuration option, otherwise falling back to + the `key_file` and `cert_file` pair. This is now reflected in the internal + pysaml2 configuration. +- backends: the saml2 backend `sp` property is now of type + `saml2.client::Saml2Client` instead of `saml2.client_base::Base`. This allows + us to call the higer level method + `saml2.client::Saml2Client::prepare_for_negotiated_authenticate` instead of + `saml2.client_base::Base::create_authn_request` to properly behave when + needing to sign the AuthnRequest using the Redirect binding. +- backends: the saml2 backend has a new option `enable_metadata_reload` to + expose an endpoint (`//reload-metadata`) that allows external + triggers to reload the backend's metadata. This setting is disabled by + default. It is up to the user to protect the endpoint if enabled. This + feature requires pysaml2 > 7.0.1 +- backends: new ReflectorBackend to help with frontend debugging easier and + developing quicker. +- backends: the saml2 backend has a new configuration option + `send_requester_id` to specify whether Scoping/RequesterID element should be + part of the AuthnRequest. +- micro-services: new DecideBackendByTargetIssuer micro-service, to select + a target backend based on the target issuer. +- micro-services: new DiscoToTargetIssuer micro-service, to set the discovery + protocol response to be the target issuer. +- micro-services: new IdpHinting micro-service, to detect if an idp-hinting + feature has been requested and set the target entityID. Enabling this + micro-service will result in skipping the discovery service and using the + specified entityID as the IdP to be used. The IdP entityID is expected to be + specified as a query-param value on the authentication request. +- micro-services: new AttributePolicy micro-service, which is able to force + attribute policies for requester by limiting results to a predefined set of + allowed attributes. +- micro-services: the PrimaryIdentifier micro-service has a new option + `replace_subject_id` to specify whether to replace the `subject_id` with the + constructed primary identifier. +- micro-services: PrimaryIdentifier is set only if there is a value. +- micro-services: AddSyntheticAttributes has various small fixes. +- micro-services: ScopeExtractorProcessor can handle string values. +- dependencies: the `pystache` package has been replaced by `chevron`, as + `pystache` seems to be abandoned and will not work with python v3.10 and + `setuptools` v58 or newer. This package is a dependency of the + `satosa.micro_services.attribute_generation.AddSyntheticAttributes` + micro-service. +- tests: MongoDB flags have been updated to cater for deprecated flags. +- docs: updated with information about the newly added micro-services. +- docs: various typo fixes. +- docs: various example configuration fixes. + + +## 7.0.3 (2021-01-21) + +- dependencies: Set minimum pysaml2 version to v6.5.1 to fix internal XML + parser issues around the xs and xsd namespace prefixes declarations + + +## 7.0.2 (2021-01-20) - Security release for pySAML2 dependency + +- Add RegexSubProcessor attribute processor +- Fix SAMLVirtualCoFrontend metadata generation +- frontends: Deprecate the sign_alg and digest_alg configuration options on the + saml2 frontend. Instead, use the signing_algorithm and digest_algorithm + configuration options under the service/idp configuration path (not under + service/idp/policy/default) +- backends: New backend to login with Apple ID +- dependencies: Set minimum pysaml2 version to v6.5.0 to make sure we get a + version patched for CVE-2021-21238 and CVE-2021-21239 +- build: Fix the CI base image +- tests: Fix entity-category checks +- docs: Document the sub_hash_salt configuration for the OIDC frontend +- examples: Add entityid_endpoint to the saml backend and frontend + configuration +- examples: Fix the SAMLVirtualCoFrontend example configuration + + +## 7.0.1 (2020-06-09) + +- build: fix the CI release process + + +## 7.0.0 (2020-06-09) + +- Make the AuthnContextClassRefs available through the context +- Extend YAML parsing to understand the `!ENV` and `!ENVFILE` tags, that read + values or file contents from the environment +- Add `satosa.yaml` module to handle YAML parsing +- BREAKING: Remove previously deprecated configuration options: + - `hash`: use the hasher micro-service instead + - `USER_ID_HASH_SALT`: use the hasher micro-service instead +- BREAKING: Remove previously deprecated classes: + - `SAMLInternalResponse`: use `satosa.internal.InternalData` instead + - `InternalRequest`: use `satosa.internal.InternalData` instead + - `InternalResponse`: use `satosa.internal.InternalData` instead + - `UserIdHashType`: use the hasher micro-service instead + - `UserIdHasher`: use the hasher micro-service instead +- BREAKING: Remove previously deprecated functions: + - `hash_attributes`: use the hasher micro-service instead + - `oidc_subject_type_to_hash_type`: use `satosa.internal.InternalData.subject_type` directly + - `saml_name_id_format_to_hash_type`: use `satosa.internal.InternalData.subject_type` directly + - `hash_type_to_saml_name_id_format`: use `satosa.internal.InternalData.subject_type` directly +- BREAKING: Remove previously deprecated modules: + - `src/satosa/internal_data.py` +- BREAKING: Remove previously deprecated properties of the `saml2.internal.InternalData` class: + - `name_id`: use use `subject_id` instead, + - `user_id`: use `subject_id` instead, + - `user_id_hash_type`: use `subject_type` instead, + - `approved_attributes`: use `attributes` instead, +- The cookie is now a session-cookie; To have the the cookie removed + immediately after use, the CONTEXT_STATE_DELETE configuration option should + be set to `True` +- Create dedicated module to handle the proxy version +- Set the logger to log to stdout on DEBUG level by default +- Cleanup code around the wsgi calls +- micro-services: separate core from micro-services; drop checks for + micro-services order; drop references to the Consent and AccountLinking + micro-services +- micro-services: generate a random name for the pool name when REUSABLE client + strategy is used for the ldap-attribute-store micro-service. +- docs: improve example proxy configuration +- docs: minor fixes/typos/etc +- build: update CI to use Travis-CI stages +- build: run tests for Python3.8 +- build: tag docker image by commit, branch, PR number, version and "latest" + + +## 6.1.0 (2020-02-28) - Security release for pySAML2 dependency - Set the SameSite cookie attribute to "None" - Add compatibility support for the SameSite attribute for incompatible @@ -112,7 +380,6 @@ Trigger new version build to automatically upload to PyPI and docker hub. - Add initial eIDAS support - Support memoization of IdP selection when using MDQ - plugins: Warn when AssertionConsumerService binding is HTTP-Redirect in the saml2 backend -- plugins: Add SAMLUnsolicitedFrontend frontend - plugins: Add SAMLVirtualCoFrontend frontend - plugins: Add extra_scopes configuration to support multiple scopes - plugins: Use the latest pyop version diff --git a/README.md b/README.md index cfa4ae5dd..4a8d757eb 100644 --- a/README.md +++ b/README.md @@ -1,62 +1,84 @@ # SATOSA -[![Build Status](https://travis-ci.org/IdentityPython/SATOSA.svg?branch=travis)](https://travis-ci.org/IdentityPython/SATOSA) + [![PyPI](https://img.shields.io/pypi/v/SATOSA.svg)](https://pypi.python.org/pypi/SATOSA) -A configurable proxy for translating between different authentication protocols such as SAML2, -OpenID Connect and OAuth2. +A configurable proxy for translating between different authentication protocols +such as SAML2, OpenID Connect and OAuth2. + # Table of Contents - [Installation](doc/README.md#installation) - - [Docker](doc/README.md#docker) - - [Manual installation](doc/README.md#manual_installation) - - [Dependencies](doc/README.md#dependencies) - - [Instructions](doc/README.md#install_instructions) + - [Docker](doc/README.md#docker) + - [Manual installation](doc/README.md#manual-installation) + - [Dependencies](doc/README.md#dependencies) + - [Instructions](doc/README.md#instructions) - [Configuration](doc/README.md#configuration) - - [SATOSA proxy configuration: proxy_conf.yaml.example](doc/README.md#proxy_conf) - - [Additional services](doc/README.md#additional_service) - - [Attribute mapping configuration: internal_attributes.yaml](doc/README.md#attr_map) - - [attributes](doc/README.md#attributes) - - [user_id_from_attrs](doc/README.md#user_id_from_attrs) - - [user_id_to_attr](doc/README.md#user_id_to_attr) - - [hash](doc/README.md#hash) + - [SATOSA proxy configuration: proxy_conf.yaml.example](doc/README.md#satosa-proxy-configuration-proxy_confyamlexample) + - [Attribute mapping configuration: internal_attributes.yaml](doc/README.md#attribute-mapping-configuration-internal_attributesyaml) + - [attributes](doc/README.md#attributes) + - [user_id_from_attrs](doc/README.md#user_id_from_attrs) + - [user_id_to_attr](doc/README.md#user_id_to_attr) - [Plugins](doc/README.md#plugins) - - [SAML2 plugins](doc/README.md#saml_plugin) - - [Metadata](doc/README.md#metadata) - - [Frontend](doc/README.md#frontend) - - [Backend](doc/README.md#backend) - - [Name ID Format](doc/README.md#name_id) - - [OpenID Connect plugins](doc/README.md#openid_plugin) - - [Backend](doc/README.md#backend) - - [Social login plugins](doc/README.md#social_plugins) - - [Google](doc/README.md#google) - - [Facebook](doc/README.md#facebook) -- [SAML metadata](doc/README.md#saml_metadata) -- [Running the proxy application](doc/README.md#run) + - [SAML2 plugins](doc/README.md#saml2-plugins) + - [Metadata](doc/README.md#metadata) + - [AuthnContextClassRef](doc/README.md#providing-authncontextclassref) + - [Frontend](doc/README.md#saml2-frontend) + - [Custom attribute release](doc/README.md#custom-attribute-release) + - [Policy](doc/README.md#policy) + - [Backend](doc/README.md#saml2-backend) + - [Name ID Format](doc/README.md#name-id-format) + - [Discovery service](doc/README.md#use-a-discovery-service) + - [ForceAuthn option](doc/README.md#mirror-the-saml-forceauthn-option) + - [Memorize IdP](doc/README.md#memorize-the-idp-selected-through-the-discovery-service) + - [OpenID Connect plugins](doc/README.md#openid-connect-plugins) + - [Frontend](doc/README.md#oidc-frontend) + - [Backend](doc/README.md#oidc-backend) + - [Social login plugins](doc/README.md#social-login-plugins) + - [Google](doc/README.md#google) + - [Facebook](doc/README.md#facebook) + - [Dummy adapters](doc/README.md#dummy-adapters) + - [Micro-services](doc/README.md#micro-services) +- [Generating proxy metadata](doc/README.md#generate-proxy-metadata) +- [Running the proxy application](doc/README.md#running-the-proxy-application) +- [External contributions](doc/README.md#external-contributions) # Use cases -In this section a set of use cases for the proxy is presented. + +In this section a set of use cases for the proxy is presented. + ## SAML2<->SAML2 -There are SAML2 service providers for example Box which is not able to handle multiple identity -providers. For more information about how to set up, configure and run such a proxy instance -please visit [Single Service Provider<->Multiple Identity providers](doc/one-to-many.md) -If an identity provider can not communicate with service providers in for example a federation the -can convert request and make the communication possible. +There are SAML2 service providers for example Box which is not able to handle +multiple identity providers. For more information about how to set up, +configure and run such a proxy instance please visit [Single Service +Provider<->Multiple Identity providers](doc/one-to-many.md) + +If an identity provider can not communicate with service providers in for +example a federation, they can convert requests and make the communication +possible. + ## SAML2<->Social logins -This setup makes it possible to connect a SAML2 service provider to multiple social media identity -providers such as Google and Facebook. The proxy makes it possible to mirror a identity provider by -generating SAML2 metadata corresponding that provider and create dynamic endpoint which -are connected to a single identity provider. -For more information about how to set up, configure and run such a proxy instance please visit -[SAML2<->Social logins](doc/SAML2-to-Social_logins.md) + +This setup makes it possible to connect a SAML2 service provider to multiple +social media identity providers such as Google and Facebook. The proxy makes it +possible to mirror an identity provider by generating SAML2 metadata +corresponding to that provider and create dynamic endpoints which are connected to +a single identity provider. + +For more information about how to set up, configure and run such a proxy +instance please read [SAML2<->Social logins](doc/SAML2-to-Social_logins.md) + ## SAML2<->OIDC -The proxy is able to act as a proxy between a SAML2 service provider and a OpenID connect provider -[SAML2<->OIDC](doc/saml2-to-oidc.md) + +The proxy is able to act as a proxy between a SAML2 service provider and a +OpenID connect provider [SAML2<->OIDC](doc/saml2-to-oidc.md) # Contact -If you have any questions regarding operations/deployment of SATOSA please use the satosa-users [mailing list](https://lists.sunet.se/listinfo/satosa-users). + +If you have any questions regarding operations/deployment of SATOSA please use +the satosa-users [mailing list](https://lists.sunet.se/postorius/lists/idpy-discuss.lists.sunet.se/). diff --git a/doc/README.md b/doc/README.md index 6722fa413..a20458774 100644 --- a/doc/README.md +++ b/doc/README.md @@ -4,39 +4,86 @@ This document describes how to install and configure the SATOSA proxy. # Installation -## Docker -A pre-built Docker image is accessible at the [Docker Hub](https://hub.docker.com/r/satosa/), and is the +## Docker + +A pre-built Docker image is accessible at the [Docker Hub](https://hub.docker.com/_/satosa), and is the recommended ways of running the proxy. -## Manual installation +## Manual installation + +### Dependencies -### Dependencies SATOSA requires Python 3.4 (or above), and the following packages on Ubuntu: -``` + +```bash apt-get install libffi-dev libssl-dev xmlsec1 ```` -### Instructions -1. Download the SATOSA proxy project as a [compressed archive](https://github.com/SUNET/SATOSA/releases) +### Instructions + +1. Download the SATOSA proxy project as a [compressed archive](https://github.com/IdentityPython/SATOSA/releases) and unpack it to ``. -1. Install the application: +2. Install the application: ```bash pip install ``` -Alternatively the application can be installed directly from PyPI (`pip install satosa`), or the [Docker image](https://hub.docker.com/r/satosa/) can be used. +Alternatively the application can be installed directly from PyPI (`pip install satosa`), or the [Docker image](https://hub.docker.com/_/satosa) can be used. + # Configuration + +SATOSA is configured using YAML. + All default configuration files, as well as an example WSGI application for the proxy, can be found in the [example directory](../example). -## SATOSA proxy configuration: `proxy_conf.yaml.example` +The default YAML syntax is extended to include the capability to resolve +environment variables. The following tags are used to achieve this: + +* The `!ENV` tag + +The `!ENV` tag is followed by a string that denotes the environment variable +name. It will be replaced by the value of the environment variable with the +same name. + +In the example below `LDAP_BIND_PASSWORD` will, at runtime, be replaced with +the value from the process environment variable of the same name. If the +process environment has been set with `LDAP_BIND_PASSWORD=secret_password` then +the configuration value for `bind_password` will be `secret_password`. + +```yaml +bind_password: !ENV LDAP_BIND_PASSWORD +``` + +* The `!ENVFILE` tag + +The `!ENVFILE` tag is followed by a string that denotes the environment +variable name. It will be replaced by the value of the environment variable +with the same name. + +In the example below `LDAP_BIND_PASSWORD_FILE` will, at runtime, be replaced +with the value from the process environment variable of the same name. If the +process environment has been set with +`LDAP_BIND_PASSWORD_FILE=/etc/satosa/secrets/ldap.txt` then the configuration +value for `bind_password` will be `secret_password`. + +```yaml +bind_password: !ENVFILE LDAP_BIND_PASSWORD_FILE +``` + +## SATOSA proxy configuration: `proxy_conf.yaml.example` + | Parameter name | Data type | Example value | Description | | -------------- | --------- | ------------- | ----------- | | `BASE` | string | `https://proxy.example.com` | base url of the proxy | | `COOKIE_STATE_NAME` | string | `satosa_state` | name of the cookie SATOSA uses for preserving state between requests | +| `COOKIE_SECURE` | bool | `True` | whether to include the cookie only when the request is transmitted over a secure channel | +| `COOKIE_HTTPONLY` | bool | `True` | whether the cookie should only be accessed only by the server | +| `COOKIE_SAMESITE` | string | `"None"` | whether the cookie should only be sent with requests initiated from the same registrable domain | +| `COOKIE_MAX_AGE` | string | `"1200"` | indicates the maximum lifetime of the cookie represented as the number of seconds until the cookie expires | | `CONTEXT_STATE_DELETE` | bool | `True` | controls whether SATOSA will delete the state cookie after receiving the authentication response from the upstream IdP| | `STATE_ENCRYPTION_KEY` | string | `52fddd3528a44157` | key used for encrypting the state cookie, will be overridden by the environment variable `SATOSA_STATE_ENCRYPTION_KEY` if it is set | | `INTERNAL_ATTRIBUTES` | string | `example/internal_attributes.yaml` | path to attribute mapping @@ -44,13 +91,12 @@ in the [example directory](../example). | `BACKEND_MODULES` | string[] | `[openid_connect_backend.yaml, saml2_backend.yaml]` | list of plugin configuration file paths, describing enabled backends | | `FRONTEND_MODULES` | string[] | `[saml2_frontend.yaml, openid_connect_frontend.yaml]` | list of plugin configuration file paths, describing enabled frontends | | `MICRO_SERVICES` | string[] | `[statistics_service.yaml]` | list of plugin configuration file paths, describing enabled microservices | -| `USER_ID_HASH_SALT` | string | `61a89d2db0b9e1e2` | **DEPRECATED - use the hasher micro-service** salt used when creating the persistent user identifier, will be overridden by the environment variable `SATOSA_USER_ID_HASH_SALT` if it is set | | `LOGGING` | dict | see [Python logging.conf](https://docs.python.org/3/library/logging.config.html) | optional configuration of application logging | - -## Attribute mapping configuration: `internal_attributes.yaml` +## Attribute mapping configuration: `internal_attributes.yaml` ### attributes + The values directly under the `attributes` key are the internal attribute names. Every internal attribute has a map of profiles, which in turn has a list of external attributes names which should be mapped to the internal attributes. @@ -62,6 +108,7 @@ internal attribute. Sometimes the external attributes are nested/complex structures. One example is the [address claim in OpenID connect](http://openid.net/specs/openid-connect-core-1_0.html#AddressClaim) which consists of multiple sub-fields, e.g.: + ```json "address": { "formatted": "100 Universal City Plaza, Hollywood CA 91608, USA", @@ -69,7 +116,7 @@ which consists of multiple sub-fields, e.g.: "locality": "Hollywood", "region": "CA", "postal_code": "91608", - "country": "USA", + "country": "USA" } ``` @@ -82,7 +129,7 @@ attribute to use, e.g. `address.formatted` will access the attribute value attributes: mail: openid: [email] - saml: [mail, emailAdress, email] + saml: [mail, emailAddress, email] address: openid: [address.formatted] saml: [postaladdress] @@ -104,40 +151,34 @@ attributes (in the proxy backend) <-> internal <-> returned attributes (from the * Any plugin using the `saml` profile will use the attribute value from `postaladdress` delivered from the target provider as the value for `address`. - ### user_id_from_attrs -The user identifier generated by the backend module can be overridden by + +The subject identifier generated by the backend module can be overridden by specifying a list of internal attribute names under the `user_id_from_attrs` key. The attribute values of the attributes specified in this list will be -concatenated and hashed to be used as the user identifier. - +concatenated and used as the subject identifier. ### user_id_to_attr -To store the user identifier in a specific internal attribute, the internal + +To store the subject identifier in a specific internal attribute, the internal attribute name can be specified in `user_id_to_attr`. When the [ALService](https://github.com/its-dirg/ALservice) is used for account linking, the `user_id_to_attr` configuration parameter should be set, since that -service will overwrite the user identifier generated by the proxy. +service will overwrite the subject identifier generated by the proxy. +# Plugins -### hash **DEPRECATED - use the hasher micro-service** -The proxy can hash any attribute value (e.g., for obfuscation) before passing -it on to the client. The `hash` key should contain a list of all attribute names -for which the corresponding attribute values should be hashed before being -returned to the client. - - -## Plugins The authentication protocol specific communication is handled by different plugins, divided into frontends (receiving requests from clients) and backends (sending requests to target providers). -### Common plugin configuration parameters +## Common plugin configuration parameters + Both `name` and `module` must be specified in all plugin configurations (frontends, backends, and micro services). The `name` must be unique to ensure correct functionality, and the `module` must be the fully qualified name of an importable Python module. -### SAML2 plugins +## SAML2 plugins Common configuration parameters: @@ -152,6 +193,8 @@ Common configuration parameters: | `entityid_endpoint` | bool | `true` | whether `entityid` should be used as a URL that serves the metadata xml document | `acr_mapping` | dict | `None` | custom Authentication Context Class Reference +### Metadata + The metadata could be loaded in multiple ways in the table above it's loaded from a static file by using the key "local". It's also possible to load read the metadata from a remote URL. @@ -164,18 +207,16 @@ Metadata from local file: Metadata from remote URL: - "metadata": { - "remote": - - url:https://kalmar2.org/simplesaml/module.php/aggregator/?id=kalmarcentral2&set=saml2 - cert:null - } + "metadata": + remote: + - url: "https://kalmar2.org/simplesaml/module.php/aggregator/?id=kalmarcentral2&set=saml2" + cert: null For more detailed information on how you could customize the SAML entities, see the [documentation of the underlying library pysaml2](https://github.com/rohe/pysaml2/blob/master/docs/howto/config.rst). - -##### Providing `AuthnContextClassRef` +### Providing `AuthnContextClassRef` SAML2 frontends and backends can provide a custom (configurable) *Authentication Context Class Reference*. For the frontend this is defined in the `AuthnStatement` of the authentication response, while, @@ -196,14 +237,15 @@ provider will be preserved, and when using a OAuth or OpenID Connect backend, th **Example** - config: - [...] - acr_mapping: - "": default-LoA - "https://accounts.google.com": LoA1 - +```yaml +config: + [...] + acr_mapping: + "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" +``` -#### Frontend +### SAML2 Frontend The SAML2 frontend act as a SAML Identity Provider (IdP), accepting authentication requests from SAML Service Providers (SP). The default @@ -240,8 +282,8 @@ An example configuration can be found [here](../example/plugins/frontends/saml2_ `SP -> Virtual CO SAMLFrontend -> SAMLBackend -> optional discovery service -> target IdP` +#### Custom attribute release -##### Custom attribute release In addition to respecting for example entity categories from the SAML metadata, the SAML frontend can also further restrict the attribute release with the `custom_attribute_release` configuration parameter based on the SP entity id. @@ -278,52 +320,62 @@ basis. This example summarizes the most common settings (hopefully self-explanat ```yaml config: - idp_config: - service: - idp: - policy: - default: - sign_response: True - sign_assertion: False - sign_alg: "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" - digest_alg: "http://www.w3.org/2001/04/xmlenc#sha256" - : - ... + idp_config: + service: + idp: + policy: + default: + sign_response: True + sign_assertion: False + : + ... ``` Overrides per SP entityID is possible by using the entityID as a key instead of the "default" key in the yaml structure. The most specific key takes presedence. If no policy overrides are provided the defaults above are used. +### SAML2 Backend -#### Backend The SAML2 backend act as a SAML Service Provider (SP), making authentication requests to SAML Identity Providers (IdP). The default configuration file can be found [here](../example/plugins/backends/saml2_backend.yaml.example). -##### Name ID Format -The SAML backend can indicate which *Name ID* format it wants by specifying the key -`name_id_format` in the SP entity configuration in the backend plugin configuration: +#### Name ID Format + +The SAML backend has two ways to indicate which *Name ID* format it wants: +* `name_id_format`: is a list of strings to set the `` element in + SP metadata +* `name_id_policy_format`: is a string to set the `Format` attribute in the + `` element in the authentication request. + +The default is to not set any of the above. Note that if the IdP can not +provide the NameID in a format, which is requested in the ``, it +must return an error. ```yaml config: sp_config: service: sp: - name_id_format: urn:oasis:names:tc:SAML:2.0:nameid-format:transient + name_id_format: + - urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress + - urn:oasis:names:tc:SAML:2.0:nameid-format:transient + name_id_policy_format: urn:oasis:names:tc:SAML:2.0:nameid-format:transient ``` -##### Use a discovery service +#### Use a discovery service + To allow the user to choose which target provider they want to authenticate with, the configuration parameter `disco_srv`, must be specified if the metadata given to the backend module contains more than one IdP: ```yaml config: + disco_srv: http://disco.example.com sp_config: [...] - disco_srv: http://disco.example.com ``` -##### Mirror the SAML ForceAuthn option +#### Mirror the SAML ForceAuthn option By default when the SAML frontend receives a SAML authentication request with `ForceAuthn` set to `True`, this information is not mirrored in the SAML @@ -341,7 +393,7 @@ config: [...] ``` -##### Memorize the IdP selected through the discovery service +#### Memorize the IdP selected through the discovery service In the classic flow, the user is asked to select their home organization to authenticate to. The `memorize_idp` configuration option controls whether @@ -376,53 +428,81 @@ config: [...] ``` -### OpenID Connect plugins +#### Assertion Consumer Service selection -#### Backend -The OpenID Connect backend acts as an OpenID Connect Relying Party (RP), making -authentication requests to OpenID Connect Provider (OP). The default -configuration file can be found [here](../example/plugins/backends/openid_backend.yaml.example). +When SATOSA sends the SAML2 authentication request to the IDP, it always +specifies the AssertionConsumerServiceURL and binding. When +`acs_selection_strategy` configuration option is set to `use_first_acs` (the +default), then the first element of the `assertion_consumer_service` list will +be selected. If `acs_selection_strategy` is `prefer_matching_host`, then SATOSA +will try to select the `assertion_consumer_service`, which matches the host in +the HTTP request (in simple words, it tries to select an ACS that matches the +URL in the user's browser). If there is no match, it will fall back to using the +first assertion consumer service. -The example configuration assumes the OP supports [discovery](http://openid.net/specs/openid-connect-discovery-1_0.html) -and [dynamic client registration](https://openid.net/specs/openid-connect-registration-1_0.html). -When using an OP that only supports statically registered clients, see the -[default configuration for using Google as the OP](../example/plugins/backends/google_backend.yaml.example) -and make sure to provide the redirect URI, constructed as described in the -section about Google configuration below, in the static registration. +Default value: `use_first_acs`. +```yaml +config: + acs_selection_strategy: prefer_matching_host + [...] +``` + +## OpenID Connect plugins + +### OIDC Frontend -#### Frontend The OpenID Connect frontend acts as and OpenID Connect Provider (OP), accepting requests from OpenID Connect Relying Parties (RPs). The default configuration file can be found [here](../example/plugins/frontends/openid_connect_frontend.yaml.example). As opposed to the other plugins, this plugin is NOT stateless (due to the nature of OpenID Connect using any other -flow than "Implicit Flow"). However, the frontend supports using a MongoDB instance as its backend storage, so as long +flow than "Implicit Flow"). However, the frontend supports using a MongoDB or Redis instance as its backend storage, so as long that's reachable from all machines it should not be a problem. The configuration parameters available: * `signing_key_path`: path to a RSA Private Key file (PKCS#1). MUST be configured. -* `db_uri`: connection URI to MongoDB instance where the data will be persisted, if it's not specified all data will only +* `db_uri`: connection URI to MongoDB or Redis instance where the data will be persisted, if it's not specified all data will only be stored in-memory (not suitable for production use). +* `client_db_uri`: connection URI to MongoDB or Redis instance where the client data will be persistent, if it's not specified the clients list will be received from the `client_db_path`. +* `client_db_path`: path to a file containing the client database in json format. It will only be used if `client_db_uri` is not set. If `client_db_uri` and `client_db_path` are not set, clients will only be stored in-memory (not suitable for production use). +* `sub_hash_salt`: salt which is hashed into the `sub` claim. If it's not specified, SATOSA will generate a random salt on each startup, which means that users will get new `sub` value after every restart. +* `sub_mirror_subject` (default: `No`): if this is set to `Yes` and SATOSA releases a public `sub` claim to the client, then the subject identifier received from the backend will be mirrored to the client. The default is to hash the public subject identifier with `sub_hash_salt`. Pairwise `sub` claims are always hashed. * `provider`: provider configuration information. MUST be configured, the following configuration are supported: * `response_types_supported` (default: `[id_token]`): list of all supported response types, see [Section 3 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#Authentication). * `subject_types_supported` (default: `[pairwise]`): list of all supported subject identifier types, see [Section 8 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#SubjectIDTypes) * `scopes_supported` (default: `[openid]`): list of all supported scopes, see [Section 5.4 of OIDC Core](http://openid.net/specs/openid-connect-core-1_0.html#ScopeClaims) * `client_registration_supported` (default: `No`): boolean whether [dynamic client registration is supported](https://openid.net/specs/openid-connect-registration-1_0.html). - If dynamic client registration is not supported all clients must exist in the MongoDB instance configured by the `db_uri` in the `"clients"` collection of the `"satosa"` database. + If dynamic client registration is not supported all clients must exist in the MongoDB or Redis instance configured by the `db_uri` in the `"clients"` collection of the `"satosa"` database. The registration info must be stored using the client id as a key, and use the parameter names of a [OIDC Registration Response](https://openid.net/specs/openid-connect-registration-1_0.html#RegistrationResponse). - * `authorization_code_lifetime`: how long authorization codes should be valid, see [default](https://github.com/SUNET/pyop#token-lifetimes) - * `access_token_lifetime`: how long access tokens should be valid, see [default](https://github.com/SUNET/pyop#token-lifetimes) - * `refresh_token_lifetime`: how long refresh tokens should be valid, if not specified no refresh tokens will be issued (which is [default](https://github.com/SUNET/pyop#token-lifetimes)) - * `refresh_token_threshold`: how long before expiration refresh tokens should be refreshed, if not specified refresh tokens will never be refreshed (which is [default](https://github.com/SUNET/pyop#token-lifetimes)) + * `authorization_code_lifetime`: how long authorization codes should be valid, see [default](https://github.com/IdentityPython/pyop#token-lifetimes) + * `access_token_lifetime`: how long access tokens should be valid, see [default](https://github.com/IdentityPython/pyop#token-lifetimes) + * `refresh_token_lifetime`: how long refresh tokens should be valid, if not specified no refresh tokens will be issued (which is [default](https://github.com/IdentityPython/pyop#token-lifetimes)) + * `refresh_token_threshold`: how long before expiration refresh tokens should be refreshed, if not specified refresh tokens will never be refreshed (which is [default](https://github.com/IdentityPython/pyop#token-lifetimes)) + * `id_token_lifetime`: the lifetime of the ID token in seconds - the default is set to 1hr (3600 seconds) (see [default](https://github.com/IdentityPython/pyop#token-lifetimes)) The other parameters should be left with their default values. -### Social login plugins +### OIDC Backend + +The OpenID Connect backend acts as an OpenID Connect Relying Party (RP), making +authentication requests to OpenID Connect Provider (OP). The default +configuration file can be found [here](../example/plugins/backends/openid_backend.yaml.example). + +The example configuration assumes the OP supports [discovery](http://openid.net/specs/openid-connect-discovery-1_0.html) +and [dynamic client registration](https://openid.net/specs/openid-connect-registration-1_0.html). +When using an OP that only supports statically registered clients, see the +[default configuration for using Google as the OP](../example/plugins/backends/google_backend.yaml.example) +and make sure to provide the redirect URI, constructed as described in the +section about Google configuration below, in the static registration. + +### Social login plugins + The social login plugins can be used as backends for the proxy, allowing the proxy to act as a client to the social login services. #### Google + The default configuration file can be found [here](../example/plugins/backends/google_backend.yaml.example). @@ -437,7 +517,7 @@ It should use the available variables, `` and ``, where: 1. `` is the base url of the proxy as specified in the `BASE` configuration parameter in `proxy_conf.yaml`, e.g. "https://proxy.example.com". -1. `` is the plugin name specified in the `name` configuration parameter defined in the plugin configuration file. +2. `` is the plugin name specified in the `name` configuration parameter defined in the plugin configuration file. The example config in `google_backend.yaml.example`: @@ -449,14 +529,15 @@ config: redirect_uris: [/] [...] ``` + together with `BASE: "https://proxy.example.com"` in `proxy_conf.yaml` would yield the redirect URI `https://proxy.example.com/google` to register with Google. A list of all claims possibly released by Google can be found [here](https://developers.google.com/identity/protocols/OpenIDConnect#obtainuserinfo), which should be used when configuring the attribute mapping (see above). - #### Facebook + The default configuration file can be found [here](../example/plugins/backends/facebook_backend.yaml.example). @@ -468,6 +549,9 @@ for information on how to obtain them. A list of all user attributes released by Facebook can be found [here](https://developers.facebook.com/docs/graph-api/reference/v2.5/user), which should be used when configuring the attribute mapping (see above). + +## Dummy adapters + ### Ping frontend for simple heartbeat monitoring The ping frontend responds to a query with a simple @@ -475,15 +559,17 @@ The ping frontend responds to a query with a simple for example by a load balancer. The default configuration file can be found [here](../example/plugins/frontends/ping_frontend.yaml.example). -### Micro services -Additional behaviour can be configured in the proxy through so called *micro services*. There are two different types -of micro services: *request micro services* which are applied to the incoming request, and *response micro services* -which are applied to the incoming response from the target provider. +## Micro-services + +Additional behaviour can be configured in the proxy through so called *micro +services*. There are two different types of micro services: *request micro +services* which are applied to the incoming request, and *response micro +services* which are applied to the incoming response from the target provider. The following micro services are bundled with SATOSA. -#### Adding static attributes to all responses +### Adding static attributes to all responses To add a set of static attributes, use the `AddStaticAttributes` class which will add pre-configured (static) attributes, see the @@ -491,14 +577,14 @@ pre-configured (static) attributes, see the The static attributes are described as key-value pairs in the YAML file, e.g: -``` +```yaml organisation: Example Org. country: Sweden ``` where the keys are the internal attribute names defined in `internal_attributes.yaml`. -#### Filtering attribute values +### Filtering attribute values Attribute values delivered from the target provider can be filtered based on a per target provider per requester basis using the `FilterAttributeValues` class. See the [example configuration](../example/plugins/microservices/filter_attributes.yaml.example). @@ -515,9 +601,11 @@ where the empty string (`""`) can be used as a key on any level to describe a de The filters are applied such that all attribute values matched by the regular expression are preserved, while any non-matching attribute values will be discarded. -##### Examples +#### Examples + Filter attributes from the target provider `https://provider.example.com`, to only preserve values starting with the string `"foo:bar"`: + ```yaml "https://provider.example.com": "": @@ -525,6 +613,7 @@ string `"foo:bar"`: ``` Filter the attribute `attr1` to only preserve values ending with the string `"foo:bar"`: + ```yaml "": "": @@ -533,18 +622,46 @@ Filter the attribute `attr1` to only preserve values ending with the string `"fo Filter the attribute `attr1` to the requester `https://provider.example.com`, to only preserver values containing the string `"foo:bar"`: + ```yaml "": "https://client.example.com": "attr1": "foo:bar" ``` -#### Route to a specific backend based on the requester +### Apply an Attribute Policy + +Attributes delivered from the target provider can be filtered based on a list of allowed attributes per requester +using the `AttributePolicy` class: + +```yaml +attribute_policy: + : + allowed: + - attr1 + - attr2 +``` + +### Route to a specific backend based on the requester + To choose which backend (essentially choosing target provider) to use based on the requester, use the `DecideBackendByRequester` class which implements that special routing behavior. See the [example configuration](../example/plugins/microservices/requester_based_routing.yaml.example). -#### Filter authentication requests to target SAML entities +### Route to a specific backend based on the target entity id + +Use the `DecideBackendByTargetIssuer` class which implements that special routing behavior. See the +[example configuration](../example/plugins/microservices/target_based_routing.yaml.example). + +### Route to a specific backend based on the discovery service response + +If a Discovery Service is in use and a target entity id is selected by users, you may want to use the +`DiscoToTargetIssuer` class together with `DecideBackendByTargetIssuer` to be able to select a +backend (essentially choosing target provider) based on the response from the discovery service. +See the [example configuration](../example/plugins/microservices/disco_to_target_issuer.yaml.example). + +### Filter authentication requests to target SAML entities + If using the `SAMLMirrorFrontend` module and some of the target providers should support some additional SP's, the `DecideIfRequesterIsAllowed` micro service can be used. It provides a rules mechanism to describe which SP's are allowed to send requests to which IdP's. See the [example configuration](../example/plugins/microservices/allowed_requesters.yaml.example). @@ -555,6 +672,7 @@ Metadata containing all SP's (any SP that might be allowed by a target IdP) must The rules are described using `allow` and `deny` directives under the `rules` configuration parameter. In the following example, the target IdP `target_entity_id1` only allows requests from `requester1` and `requester2`. + ```yaml rules: target_entity_id1: @@ -565,6 +683,7 @@ SP's are by default denied if the IdP has any rules associated with it (i.e, the However, if the IdP does not have any rules associated with its entity id, all SP's are by default allowed. Deny all but one SP: + ```yaml rules: target_entity_id1: @@ -573,6 +692,7 @@ rules: ``` Allow all but one SP: + ```yaml rules: target_entity_id1: @@ -580,7 +700,7 @@ rules: deny: ["requester1"] ``` -#### Account linking +### Account linking To allow account linking (multiple accounts at possibly different target providers are linked together as belonging to the same user), an external service can be used. See the [example config](../example/plugins/microservices/account_linking.yaml.example) @@ -590,16 +710,17 @@ the same REST API). This micro service must be the first in the list of configured micro services in the `proxy_conf.yaml` to ensure correct functionality. -#### User consent management +### User consent management -To handle user consent of released information, an external service can be used. See the [example config](../example/plugins/microservices/consent.yaml.example) -which is intended to work with the [CMService](https://github.com/its-dirg/CMservice) (or any other service providing -the same REST API). +To handle user consent of released information, an external service can be +used. See the [example config](../example/plugins/microservices/consent.yaml.example) +which is intended to work with the [CMService](https://github.com/its-dirg/CMservice) +(or any other service providing the same RESTish API). -This micro service must be the last in the list of configured micro services in the `proxy_conf.yaml` to ensure -correct functionality. +This micro service must be the last in the list of configured micro services in +the `proxy_conf.yaml` to ensure correct functionality. -#### LDAP attribute store +### LDAP attribute store An identifier such as eduPersonPrincipalName asserted by an IdP can be used to look up a person record in an LDAP directory to find attributes to assert about the authenticated user to the SP. The identifier @@ -609,6 +730,16 @@ persistent NameID may also be obtained from attributes returned from the LDAP di LDAP microservice install the extra necessary dependencies with `pip install satosa[ldap]` and then see the [example config](../example/plugins/microservices/ldap_attribute_store.yaml.example). +### Support for IdP Hinting + +It's possible to hint an IdP to SaToSa using the `IdpHinting` micro-service. + +With this feature an SP can send a hint about the IdP that should be used, in order to skip the discovery service. +The hint as a parameter in the query string of the request. +The hint query parameter value must be the entityID of the IdP. +The hint query parameter name is specified in the micro-service configuation. +See the [example configuration](../example/plugins/microservices/idp_hinting.yaml.example). + ### Custom plugins It's possible to write custom plugins which can be loaded by SATOSA. They have to be contained in a Python module, @@ -619,9 +750,10 @@ methods: * Frontends must inherit `satosa.frontends.base.FrontendModule`. * Backends must inherit `satosa.backends.base.BackendModule`. * Request micro services must inherit `satosa.micro_services.base.RequestMicroService`. -* Request micro services must inherit `satosa.micro_services.base.ResponseMicroService`. +* Response micro services must inherit `satosa.micro_services.base.ResponseMicroService`. + -# Generate proxy metadata +# Generate proxy metadata The proxy metadata is generated based on the front-/backend plugins listed in `proxy_conf.yaml` using the `satosa-saml-metadata` (installed globally by SATOSA installation). @@ -634,6 +766,7 @@ satosa-saml-metadata satosa.wsgi:app --keyfile= --certfile= ``` + where * `socket address` is the socket address that `gunicorn` should bind to for incoming requests, e.g. `0.0.0.0:8080` * `https key` is the path to the private key to use for HTTPS, e.g. `pki/key.pem` @@ -666,3 +801,49 @@ set SATOSA_CONFIG=/home/user/proxy_conf.yaml See the [auxiliary documentation for running using mod\_wsgi](mod_wsgi.md). +# External contributions + +Backends and Frontends act like adapters, while micro-services act like plugins +and all of them can be developed by anyone and shared with everyone. + +Other people that have been working with the SaToSa proxy, have built +extentions mainly in the form of additional micro-services that are shared to +be used by anyone. + +- SUNET maintains a small collection of extentions that focus around the SWAMID + policies. + The extentions are licensed under the Apache2.0 license. + You can find the code using the following URL: + + - https://github.com/SUNET/swamid-satosa/ + +- A new OIDC frontend, based on the oidc-op library has been build for + Università della Calabria, by Giuseppe De Marco (@peppelinux); a long-time + contributor to the IdPy projects. + The frontend is licensed under the AGPL-3.0 license. + You can find the code using the following URL: + + - https://github.com/UniversitaDellaCalabria/SATOSA-oidcop + +- A spin-off of SATOSA that adds compatibility with the Italian Digital + Identity System (SPID) is maintained by Giuseppe De Marco. + You can find the code using the following URL: + + - https://github.com/italia/Satosa-Saml2Spid + +- DAASI International have been a long-time user of this software and have made + their extentions available. + The extentions are licensed under the Apache2.0 license. + You can find the code using the following URL: + + - https://gitlab.daasi.de/didmos2/didmos2-auth/-/tree/master/src/didmos_oidc/satosa/micro_services + + The extentions include: + + - SCIM attribute store to fetch attributes via SCIM API (instead of LDAP) + - Authoritzation module for blocking services if necessary group memberships or + attributes are missing in the identity (for service providers that do not + evaluate attributes themselves) + - Backend chooser with Django UI for letting the user choose between any + existing SATOSA backend + - Integration of MFA via PrivacyIDEA, and more. diff --git a/doc/images/middlewares.png b/doc/images/middlewares.png new file mode 100644 index 000000000..2aca071ad Binary files /dev/null and b/doc/images/middlewares.png differ diff --git a/doc/mod_wsgi.md b/doc/mod_wsgi.md index 8605c7abb..e739028dc 100644 --- a/doc/mod_wsgi.md +++ b/doc/mod_wsgi.md @@ -110,8 +110,6 @@ BASE: https://some.host.org STATE_ENCRYPTION_KEY: fazmC8yELv38f9PF0kbS -USER_ID_HASH_SALT: i7tmt34rzb2QRDgN1Ggy - INTERNAL_ATTRIBUTES: "/etc/satosa/internal_attributes.yaml" COOKIE_STATE_NAME: "SATOSA_STATE" diff --git a/doc/one-to-many.md b/doc/one-to-many.md index c9b08851f..c370db9d8 100644 --- a/doc/one-to-many.md +++ b/doc/one-to-many.md @@ -58,7 +58,7 @@ be configured with a SAML2 frontend and an SAML2 backend. mv internal_attributes.yaml.example internal_attributes.yaml ``` - 1. Map the necessary attributes, see the [Attribute mapping configuration](README.md#attr_map) + 1. Map the necessary attributes, see the [Attribute mapping configuration](README.md#attribute-mapping-configuration-internal_attributesyaml) section of the proxy configuration instructions for more information. diff --git a/docker/attributemaps/__init__.py b/docker/attributemaps/__init__.py deleted file mode 100644 index d041d3f13..000000000 --- a/docker/attributemaps/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -__author__ = 'rohe0002' -__all__ = ["adfs_v1x", "adfs_v20", "basic", "saml_uri", "shibboleth_uri"] diff --git a/docker/attributemaps/adfs_v1x.py b/docker/attributemaps/adfs_v1x.py deleted file mode 100644 index 0f8d01a5d..000000000 --- a/docker/attributemaps/adfs_v1x.py +++ /dev/null @@ -1,18 +0,0 @@ -CLAIMS = 'http://schemas.xmlsoap.org/claims/' - - -MAP = { - "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified", - 'fro': { - CLAIMS+'commonname': 'commonName', - CLAIMS+'emailaddress': 'emailAddress', - CLAIMS+'group': 'group', - CLAIMS+'upn': 'upn', - }, - 'to': { - 'commonName': CLAIMS+'commonname', - 'emailAddress': CLAIMS+'emailaddress', - 'group': CLAIMS+'group', - 'upn': CLAIMS+'upn', - } -} diff --git a/docker/attributemaps/adfs_v20.py b/docker/attributemaps/adfs_v20.py deleted file mode 100644 index 94150d077..000000000 --- a/docker/attributemaps/adfs_v20.py +++ /dev/null @@ -1,49 +0,0 @@ -CLAIMS = 'http://schemas.xmlsoap.org/claims/' -COM_WS_CLAIMS = 'http://schemas.xmlsoap.com/ws/2005/05/identity/claims/' -MS_CLAIMS = 'http://schemas.microsoft.com/ws/2008/06/identity/claims/' -ORG_WS_CLAIMS = 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/' - - -MAP = { - "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified", - 'fro': { - CLAIMS+'commonname': 'commonName', - CLAIMS+'group': 'group', - COM_WS_CLAIMS+'denyonlysid': 'denyOnlySid', - MS_CLAIMS+'authenticationmethod': 'authenticationMethod', - MS_CLAIMS+'denyonlyprimarygroupsid': 'denyOnlyPrimaryGroupSid', - MS_CLAIMS+'denyonlyprimarysid': 'denyOnlyPrimarySid', - MS_CLAIMS+'groupsid': 'groupSid', - MS_CLAIMS+'primarygroupsid': 'primaryGroupSid', - MS_CLAIMS+'primarysid': 'primarySid', - MS_CLAIMS+'role': 'role', - MS_CLAIMS+'windowsaccountname': 'windowsAccountName', - ORG_WS_CLAIMS+'emailaddress': 'emailAddress', - ORG_WS_CLAIMS+'givenname': 'givenName', - ORG_WS_CLAIMS+'name': 'name', - ORG_WS_CLAIMS+'nameidentifier': 'nameId', - ORG_WS_CLAIMS+'privatepersonalidentifier': 'privatePersonalId', - ORG_WS_CLAIMS+'surname': 'surname', - ORG_WS_CLAIMS+'upn': 'upn', - }, - 'to': { - 'authenticationMethod': MS_CLAIMS+'authenticationmethod', - 'commonName': CLAIMS+'commonname', - 'denyOnlyPrimaryGroupSid': MS_CLAIMS+'denyonlyprimarygroupsid', - 'denyOnlyPrimarySid': MS_CLAIMS+'denyonlyprimarysid', - 'denyOnlySid': COM_WS_CLAIMS+'denyonlysid', - 'emailAddress': ORG_WS_CLAIMS+'emailaddress', - 'givenName': ORG_WS_CLAIMS+'givenname', - 'group': CLAIMS+'group', - 'groupSid': MS_CLAIMS+'groupsid', - 'name': ORG_WS_CLAIMS+'name', - 'nameId': ORG_WS_CLAIMS+'nameidentifier', - 'primaryGroupSid': MS_CLAIMS+'primarygroupsid', - 'primarySid': MS_CLAIMS+'primarysid', - 'privatePersonalId': ORG_WS_CLAIMS+'privatepersonalidentifier', - 'role': MS_CLAIMS+'role', - 'surname': ORG_WS_CLAIMS+'surname', - 'upn': ORG_WS_CLAIMS+'upn', - 'windowsAccountName': MS_CLAIMS+'windowsaccountname', - } -} diff --git a/docker/attributemaps/basic.py b/docker/attributemaps/basic.py deleted file mode 100644 index c05b6e98b..000000000 --- a/docker/attributemaps/basic.py +++ /dev/null @@ -1,340 +0,0 @@ -DEF = 'urn:mace:dir:attribute-def:' - - -MAP = { - "identifier": "urn:oasis:names:tc:SAML:2.0:attrname-format:basic", - 'fro': { - DEF+'aRecord': 'aRecord', - DEF+'aliasedEntryName': 'aliasedEntryName', - DEF+'aliasedObjectName': 'aliasedObjectName', - DEF+'associatedDomain': 'associatedDomain', - DEF+'associatedName': 'associatedName', - DEF+'audio': 'audio', - DEF+'authorityRevocationList': 'authorityRevocationList', - DEF+'buildingName': 'buildingName', - DEF+'businessCategory': 'businessCategory', - DEF+'c': 'c', - DEF+'cACertificate': 'cACertificate', - DEF+'cNAMERecord': 'cNAMERecord', - DEF+'carLicense': 'carLicense', - DEF+'certificateRevocationList': 'certificateRevocationList', - DEF+'cn': 'cn', - DEF+'co': 'co', - DEF+'commonName': 'commonName', - DEF+'countryName': 'countryName', - DEF+'crossCertificatePair': 'crossCertificatePair', - DEF+'dITRedirect': 'dITRedirect', - DEF+'dSAQuality': 'dSAQuality', - DEF+'dc': 'dc', - DEF+'deltaRevocationList': 'deltaRevocationList', - DEF+'departmentNumber': 'departmentNumber', - DEF+'description': 'description', - DEF+'destinationIndicator': 'destinationIndicator', - DEF+'displayName': 'displayName', - DEF+'distinguishedName': 'distinguishedName', - DEF+'dmdName': 'dmdName', - DEF+'dnQualifier': 'dnQualifier', - DEF+'documentAuthor': 'documentAuthor', - DEF+'documentIdentifier': 'documentIdentifier', - DEF+'documentLocation': 'documentLocation', - DEF+'documentPublisher': 'documentPublisher', - DEF+'documentTitle': 'documentTitle', - DEF+'documentVersion': 'documentVersion', - DEF+'domainComponent': 'domainComponent', - DEF+'drink': 'drink', - DEF+'eduOrgHomePageURI': 'eduOrgHomePageURI', - DEF+'eduOrgIdentityAuthNPolicyURI': 'eduOrgIdentityAuthNPolicyURI', - DEF+'eduOrgLegalName': 'eduOrgLegalName', - DEF+'eduOrgSuperiorURI': 'eduOrgSuperiorURI', - DEF+'eduOrgWhitePagesURI': 'eduOrgWhitePagesURI', - DEF+'eduCourseOffering': 'eduCourseOffering', - DEF+'eduCourseMember': 'eduCourseMember', - DEF+'eduPersonAffiliation': 'eduPersonAffiliation', - DEF+'eduPersonEntitlement': 'eduPersonEntitlement', - DEF+'eduPersonNickname': 'eduPersonNickname', - DEF+'eduPersonOrgDN': 'eduPersonOrgDN', - DEF+'eduPersonOrgUnitDN': 'eduPersonOrgUnitDN', - DEF+'eduPersonPrimaryAffiliation': 'eduPersonPrimaryAffiliation', - DEF+'eduPersonPrimaryOrgUnitDN': 'eduPersonPrimaryOrgUnitDN', - DEF+'eduPersonPrincipalName': 'eduPersonPrincipalName', - DEF+'eduPersonPrincipalNamePrior': 'eduPersonPrincipalNamePrior', - DEF+'eduPersonScopedAffiliation': 'eduPersonScopedAffiliation', - DEF+'eduPersonTargetedID': 'eduPersonTargetedID', - DEF+'eduPersonAssurance': 'eduPersonAssurance', - DEF+'eduPersonUniqueId': 'eduPersonUniqueId', - DEF+'eduPersonOrcid': 'eduPersonOrcid', - DEF+'email': 'email', - DEF+'emailAddress': 'emailAddress', - DEF+'employeeNumber': 'employeeNumber', - DEF+'employeeType': 'employeeType', - DEF+'enhancedSearchGuide': 'enhancedSearchGuide', - DEF+'facsimileTelephoneNumber': 'facsimileTelephoneNumber', - DEF+'favouriteDrink': 'favouriteDrink', - DEF+'fax': 'fax', - DEF+'federationFeideSchemaVersion': 'federationFeideSchemaVersion', - DEF+'friendlyCountryName': 'friendlyCountryName', - DEF+'generationQualifier': 'generationQualifier', - DEF+'givenName': 'givenName', - DEF+'gn': 'gn', - DEF+'homePhone': 'homePhone', - DEF+'homePostalAddress': 'homePostalAddress', - DEF+'homeTelephoneNumber': 'homeTelephoneNumber', - DEF+'host': 'host', - DEF+'houseIdentifier': 'houseIdentifier', - DEF+'info': 'info', - DEF+'initials': 'initials', - DEF+'internationaliSDNNumber': 'internationaliSDNNumber', - DEF+'janetMailbox': 'janetMailbox', - DEF+'jpegPhoto': 'jpegPhoto', - DEF+'knowledgeInformation': 'knowledgeInformation', - DEF+'l': 'l', - DEF+'labeledURI': 'labeledURI', - DEF+'localityName': 'localityName', - DEF+'mDRecord': 'mDRecord', - DEF+'mXRecord': 'mXRecord', - DEF+'mail': 'mail', - DEF+'mailPreferenceOption': 'mailPreferenceOption', - DEF+'manager': 'manager', - DEF+'member': 'member', - DEF+'mobile': 'mobile', - DEF+'mobileTelephoneNumber': 'mobileTelephoneNumber', - DEF+'nSRecord': 'nSRecord', - DEF+'name': 'name', - DEF+'norEduOrgAcronym': 'norEduOrgAcronym', - DEF+'norEduOrgNIN': 'norEduOrgNIN', - DEF+'norEduOrgSchemaVersion': 'norEduOrgSchemaVersion', - DEF+'norEduOrgUniqueIdentifier': 'norEduOrgUniqueIdentifier', - DEF+'norEduOrgUniqueNumber': 'norEduOrgUniqueNumber', - DEF+'norEduOrgUnitUniqueIdentifier': 'norEduOrgUnitUniqueIdentifier', - DEF+'norEduOrgUnitUniqueNumber': 'norEduOrgUnitUniqueNumber', - DEF+'norEduPersonBirthDate': 'norEduPersonBirthDate', - DEF+'norEduPersonLIN': 'norEduPersonLIN', - DEF+'norEduPersonNIN': 'norEduPersonNIN', - DEF+'o': 'o', - DEF+'objectClass': 'objectClass', - DEF+'organizationName': 'organizationName', - DEF+'organizationalStatus': 'organizationalStatus', - DEF+'organizationalUnitName': 'organizationalUnitName', - DEF+'otherMailbox': 'otherMailbox', - DEF+'ou': 'ou', - DEF+'owner': 'owner', - DEF+'pager': 'pager', - DEF+'pagerTelephoneNumber': 'pagerTelephoneNumber', - DEF+'personalSignature': 'personalSignature', - DEF+'personalTitle': 'personalTitle', - DEF+'photo': 'photo', - DEF+'physicalDeliveryOfficeName': 'physicalDeliveryOfficeName', - DEF+'pkcs9email': 'pkcs9email', - DEF+'postOfficeBox': 'postOfficeBox', - DEF+'postalAddress': 'postalAddress', - DEF+'postalCode': 'postalCode', - DEF+'preferredDeliveryMethod': 'preferredDeliveryMethod', - DEF+'preferredLanguage': 'preferredLanguage', - DEF+'presentationAddress': 'presentationAddress', - DEF+'protocolInformation': 'protocolInformation', - DEF+'pseudonym': 'pseudonym', - DEF+'registeredAddress': 'registeredAddress', - DEF+'rfc822Mailbox': 'rfc822Mailbox', - DEF+'roleOccupant': 'roleOccupant', - DEF+'roomNumber': 'roomNumber', - DEF+'sOARecord': 'sOARecord', - DEF+'searchGuide': 'searchGuide', - DEF+'secretary': 'secretary', - DEF+'seeAlso': 'seeAlso', - DEF+'serialNumber': 'serialNumber', - DEF+'singleLevelQuality': 'singleLevelQuality', - DEF+'sn': 'sn', - DEF+'st': 'st', - DEF+'stateOrProvinceName': 'stateOrProvinceName', - DEF+'street': 'street', - DEF+'streetAddress': 'streetAddress', - DEF+'subtreeMaximumQuality': 'subtreeMaximumQuality', - DEF+'subtreeMinimumQuality': 'subtreeMinimumQuality', - DEF+'supportedAlgorithms': 'supportedAlgorithms', - DEF+'supportedApplicationContext': 'supportedApplicationContext', - DEF+'surname': 'surname', - DEF+'telephoneNumber': 'telephoneNumber', - DEF+'teletexTerminalIdentifier': 'teletexTerminalIdentifier', - DEF+'telexNumber': 'telexNumber', - DEF+'textEncodedORAddress': 'textEncodedORAddress', - DEF+'title': 'title', - DEF+'uid': 'uid', - DEF+'uniqueIdentifier': 'uniqueIdentifier', - DEF+'uniqueMember': 'uniqueMember', - DEF+'userCertificate': 'userCertificate', - DEF+'userClass': 'userClass', - DEF+'userPKCS12': 'userPKCS12', - DEF+'userPassword': 'userPassword', - DEF+'userSMIMECertificate': 'userSMIMECertificate', - DEF+'userid': 'userid', - DEF+'x121Address': 'x121Address', - DEF+'x500UniqueIdentifier': 'x500UniqueIdentifier', - }, - 'to': { - 'aRecord': DEF+'aRecord', - 'aliasedEntryName': DEF+'aliasedEntryName', - 'aliasedObjectName': DEF+'aliasedObjectName', - 'associatedDomain': DEF+'associatedDomain', - 'associatedName': DEF+'associatedName', - 'audio': DEF+'audio', - 'authorityRevocationList': DEF+'authorityRevocationList', - 'buildingName': DEF+'buildingName', - 'businessCategory': DEF+'businessCategory', - 'c': DEF+'c', - 'cACertificate': DEF+'cACertificate', - 'cNAMERecord': DEF+'cNAMERecord', - 'carLicense': DEF+'carLicense', - 'certificateRevocationList': DEF+'certificateRevocationList', - 'cn': DEF+'cn', - 'co': DEF+'co', - 'commonName': DEF+'commonName', - 'countryName': DEF+'countryName', - 'crossCertificatePair': DEF+'crossCertificatePair', - 'dITRedirect': DEF+'dITRedirect', - 'dSAQuality': DEF+'dSAQuality', - 'dc': DEF+'dc', - 'deltaRevocationList': DEF+'deltaRevocationList', - 'departmentNumber': DEF+'departmentNumber', - 'description': DEF+'description', - 'destinationIndicator': DEF+'destinationIndicator', - 'displayName': DEF+'displayName', - 'distinguishedName': DEF+'distinguishedName', - 'dmdName': DEF+'dmdName', - 'dnQualifier': DEF+'dnQualifier', - 'documentAuthor': DEF+'documentAuthor', - 'documentIdentifier': DEF+'documentIdentifier', - 'documentLocation': DEF+'documentLocation', - 'documentPublisher': DEF+'documentPublisher', - 'documentTitle': DEF+'documentTitle', - 'documentVersion': DEF+'documentVersion', - 'domainComponent': DEF+'domainComponent', - 'drink': DEF+'drink', - 'eduOrgHomePageURI': DEF+'eduOrgHomePageURI', - 'eduOrgIdentityAuthNPolicyURI': DEF+'eduOrgIdentityAuthNPolicyURI', - 'eduOrgLegalName': DEF+'eduOrgLegalName', - 'eduOrgSuperiorURI': DEF+'eduOrgSuperiorURI', - 'eduOrgWhitePagesURI': DEF+'eduOrgWhitePagesURI', - 'eduCourseMember': DEF+'eduCourseMember', - 'eduCourseOffering': DEF+'eduCourseOffering', - 'eduPersonAffiliation': DEF+'eduPersonAffiliation', - 'eduPersonEntitlement': DEF+'eduPersonEntitlement', - 'eduPersonNickname': DEF+'eduPersonNickname', - 'eduPersonOrgDN': DEF+'eduPersonOrgDN', - 'eduPersonOrgUnitDN': DEF+'eduPersonOrgUnitDN', - 'eduPersonPrimaryAffiliation': DEF+'eduPersonPrimaryAffiliation', - 'eduPersonPrimaryOrgUnitDN': DEF+'eduPersonPrimaryOrgUnitDN', - 'eduPersonPrincipalName': DEF+'eduPersonPrincipalName', - 'eduPersonPrincipalNamePrior': DEF+'eduPersonPrincipalNamePrior', - 'eduPersonScopedAffiliation': DEF+'eduPersonScopedAffiliation', - 'eduPersonTargetedID': DEF+'eduPersonTargetedID', - 'eduPersonAssurance': DEF+'eduPersonAssurance', - 'eduPersonUniqueId': DEF+'eduPersonUniqueId', - 'eduPersonOrcid': DEF+'eduPersonOrcid', - 'email': DEF+'email', - 'emailAddress': DEF+'emailAddress', - 'employeeNumber': DEF+'employeeNumber', - 'employeeType': DEF+'employeeType', - 'enhancedSearchGuide': DEF+'enhancedSearchGuide', - 'facsimileTelephoneNumber': DEF+'facsimileTelephoneNumber', - 'favouriteDrink': DEF+'favouriteDrink', - 'fax': DEF+'fax', - 'federationFeideSchemaVersion': DEF+'federationFeideSchemaVersion', - 'friendlyCountryName': DEF+'friendlyCountryName', - 'generationQualifier': DEF+'generationQualifier', - 'givenName': DEF+'givenName', - 'gn': DEF+'gn', - 'homePhone': DEF+'homePhone', - 'homePostalAddress': DEF+'homePostalAddress', - 'homeTelephoneNumber': DEF+'homeTelephoneNumber', - 'host': DEF+'host', - 'houseIdentifier': DEF+'houseIdentifier', - 'info': DEF+'info', - 'initials': DEF+'initials', - 'internationaliSDNNumber': DEF+'internationaliSDNNumber', - 'janetMailbox': DEF+'janetMailbox', - 'jpegPhoto': DEF+'jpegPhoto', - 'knowledgeInformation': DEF+'knowledgeInformation', - 'l': DEF+'l', - 'labeledURI': DEF+'labeledURI', - 'localityName': DEF+'localityName', - 'mDRecord': DEF+'mDRecord', - 'mXRecord': DEF+'mXRecord', - 'mail': DEF+'mail', - 'mailPreferenceOption': DEF+'mailPreferenceOption', - 'manager': DEF+'manager', - 'member': DEF+'member', - 'mobile': DEF+'mobile', - 'mobileTelephoneNumber': DEF+'mobileTelephoneNumber', - 'nSRecord': DEF+'nSRecord', - 'name': DEF+'name', - 'norEduOrgAcronym': DEF+'norEduOrgAcronym', - 'norEduOrgNIN': DEF+'norEduOrgNIN', - 'norEduOrgSchemaVersion': DEF+'norEduOrgSchemaVersion', - 'norEduOrgUniqueIdentifier': DEF+'norEduOrgUniqueIdentifier', - 'norEduOrgUniqueNumber': DEF+'norEduOrgUniqueNumber', - 'norEduOrgUnitUniqueIdentifier': DEF+'norEduOrgUnitUniqueIdentifier', - 'norEduOrgUnitUniqueNumber': DEF+'norEduOrgUnitUniqueNumber', - 'norEduPersonBirthDate': DEF+'norEduPersonBirthDate', - 'norEduPersonLIN': DEF+'norEduPersonLIN', - 'norEduPersonNIN': DEF+'norEduPersonNIN', - 'o': DEF+'o', - 'objectClass': DEF+'objectClass', - 'organizationName': DEF+'organizationName', - 'organizationalStatus': DEF+'organizationalStatus', - 'organizationalUnitName': DEF+'organizationalUnitName', - 'otherMailbox': DEF+'otherMailbox', - 'ou': DEF+'ou', - 'owner': DEF+'owner', - 'pager': DEF+'pager', - 'pagerTelephoneNumber': DEF+'pagerTelephoneNumber', - 'personalSignature': DEF+'personalSignature', - 'personalTitle': DEF+'personalTitle', - 'photo': DEF+'photo', - 'physicalDeliveryOfficeName': DEF+'physicalDeliveryOfficeName', - 'pkcs9email': DEF+'pkcs9email', - 'postOfficeBox': DEF+'postOfficeBox', - 'postalAddress': DEF+'postalAddress', - 'postalCode': DEF+'postalCode', - 'preferredDeliveryMethod': DEF+'preferredDeliveryMethod', - 'preferredLanguage': DEF+'preferredLanguage', - 'presentationAddress': DEF+'presentationAddress', - 'protocolInformation': DEF+'protocolInformation', - 'pseudonym': DEF+'pseudonym', - 'registeredAddress': DEF+'registeredAddress', - 'rfc822Mailbox': DEF+'rfc822Mailbox', - 'roleOccupant': DEF+'roleOccupant', - 'roomNumber': DEF+'roomNumber', - 'sOARecord': DEF+'sOARecord', - 'searchGuide': DEF+'searchGuide', - 'secretary': DEF+'secretary', - 'seeAlso': DEF+'seeAlso', - 'serialNumber': DEF+'serialNumber', - 'singleLevelQuality': DEF+'singleLevelQuality', - 'sn': DEF+'sn', - 'st': DEF+'st', - 'stateOrProvinceName': DEF+'stateOrProvinceName', - 'street': DEF+'street', - 'streetAddress': DEF+'streetAddress', - 'subtreeMaximumQuality': DEF+'subtreeMaximumQuality', - 'subtreeMinimumQuality': DEF+'subtreeMinimumQuality', - 'supportedAlgorithms': DEF+'supportedAlgorithms', - 'supportedApplicationContext': DEF+'supportedApplicationContext', - 'surname': DEF+'surname', - 'telephoneNumber': DEF+'telephoneNumber', - 'teletexTerminalIdentifier': DEF+'teletexTerminalIdentifier', - 'telexNumber': DEF+'telexNumber', - 'textEncodedORAddress': DEF+'textEncodedORAddress', - 'title': DEF+'title', - 'uid': DEF+'uid', - 'uniqueIdentifier': DEF+'uniqueIdentifier', - 'uniqueMember': DEF+'uniqueMember', - 'userCertificate': DEF+'userCertificate', - 'userClass': DEF+'userClass', - 'userPKCS12': DEF+'userPKCS12', - 'userPassword': DEF+'userPassword', - 'userSMIMECertificate': DEF+'userSMIMECertificate', - 'userid': DEF+'userid', - 'x121Address': DEF+'x121Address', - 'x500UniqueIdentifier': DEF+'x500UniqueIdentifier', - } -} diff --git a/docker/attributemaps/saml_uri.py b/docker/attributemaps/saml_uri.py deleted file mode 100644 index ca6dfd840..000000000 --- a/docker/attributemaps/saml_uri.py +++ /dev/null @@ -1,307 +0,0 @@ -EDUCOURSE_OID = 'urn:oid:1.3.6.1.4.1.5923.1.6.1.' -EDUPERSON_OID = 'urn:oid:1.3.6.1.4.1.5923.1.1.1.' -EDUMEMBER1_OID = 'urn:oid:1.3.6.1.4.1.5923.1.5.1.' -LDAPGVAT_OID = 'urn:oid:1.2.40.0.10.2.1.1.' # ldap.gv.at definitions as specified in http://www.ref.gv.at/AG-IZ-PVP2-Version-2-1-0-2.2754.0.html -UCL_DIR_PILOT = 'urn:oid:0.9.2342.19200300.100.1.' -X500ATTR_OID = 'urn:oid:2.5.4.' -LDAPGVAT_UCL_DIR_PILOT = UCL_DIR_PILOT -LDAPGVAT_X500ATTR_OID = X500ATTR_OID -NETSCAPE_LDAP = 'urn:oid:2.16.840.1.113730.3.1.' -NOREDUPERSON_OID = 'urn:oid:1.3.6.1.4.1.2428.90.1.' -PKCS_9 = 'urn:oid:1.2.840.113549.1.9.1.' -SCHAC = 'urn:oid:1.3.6.1.4.1.25178.1.2.' -SIS = 'urn:oid:1.2.752.194.10.2.' -UMICH = 'urn:oid:1.3.6.1.4.1.250.1.57.' -OPENOSI_OID = 'urn:oid:1.3.6.1.4.1.27630.2.1.1.' #openosi-0.82.schema http://www.openosi.org/osi/display/ldap/Home - -MAP = { - 'identifier': 'urn:oasis:names:tc:SAML:2.0:attrname-format:uri', - 'fro': { - EDUCOURSE_OID+'1': 'eduCourseOffering', - EDUCOURSE_OID+'2': 'eduCourseMember', - EDUMEMBER1_OID+'1': 'isMemberOf', - EDUPERSON_OID+'1': 'eduPersonAffiliation', - EDUPERSON_OID+'2': 'eduPersonNickname', - EDUPERSON_OID+'3': 'eduPersonOrgDN', - EDUPERSON_OID+'4': 'eduPersonOrgUnitDN', - EDUPERSON_OID+'5': 'eduPersonPrimaryAffiliation', - EDUPERSON_OID+'6': 'eduPersonPrincipalName', - EDUPERSON_OID+'7': 'eduPersonEntitlement', - EDUPERSON_OID+'8': 'eduPersonPrimaryOrgUnitDN', - EDUPERSON_OID+'9': 'eduPersonScopedAffiliation', - EDUPERSON_OID+'10': 'eduPersonTargetedID', - EDUPERSON_OID+'11': 'eduPersonAssurance', - EDUPERSON_OID+'12': 'eduPersonPrincipalNamePrior', - EDUPERSON_OID+'13': 'eduPersonUniqueId', - EDUPERSON_OID+'16': 'eduPersonOrcid', - LDAPGVAT_OID+'1': 'PVP-GID', - LDAPGVAT_OID+'149': 'PVP-BPK', - LDAPGVAT_OID+'153': 'PVP-OU-OKZ', - LDAPGVAT_OID+'261.10': 'PVP-VERSION', - LDAPGVAT_OID+'261.20': 'PVP-PRINCIPAL-NAME', - LDAPGVAT_OID+'261.24': 'PVP-PARTICIPANT-OKZ', - LDAPGVAT_OID+'261.30': 'PVP-ROLES', - LDAPGVAT_OID+'261.40': 'PVP-INVOICE-RECPT-ID', - LDAPGVAT_OID+'261.50': 'PVP-COST-CENTER-ID', - LDAPGVAT_OID+'261.60': 'PVP-CHARGE-CODE', - LDAPGVAT_OID+'3': 'PVP-OU-GV-OU-ID', - LDAPGVAT_OID+'33': 'PVP-FUNCTION', - LDAPGVAT_OID+'55': 'PVP-BIRTHDATE', - LDAPGVAT_OID+'71': 'PVP-PARTICIPANT-ID', - LDAPGVAT_UCL_DIR_PILOT+'1': 'PVP-USERID', - LDAPGVAT_UCL_DIR_PILOT+'3': 'PVP-MAIL', - LDAPGVAT_X500ATTR_OID+'11': 'PVP-OU', - LDAPGVAT_X500ATTR_OID+'20': 'PVP-TEL', - LDAPGVAT_X500ATTR_OID+'42': 'PVP-GIVENNAME', - NETSCAPE_LDAP+'1': 'carLicense', - NETSCAPE_LDAP+'2': 'departmentNumber', - NETSCAPE_LDAP+'3': 'employeeNumber', - NETSCAPE_LDAP+'4': 'employeeType', - NETSCAPE_LDAP+'39': 'preferredLanguage', - NETSCAPE_LDAP+'40': 'userSMIMECertificate', - NETSCAPE_LDAP+'216': 'userPKCS12', - NETSCAPE_LDAP+'241': 'displayName', - NOREDUPERSON_OID+'1': 'norEduOrgUniqueNumber', - NOREDUPERSON_OID+'2': 'norEduOrgUnitUniqueNumber', - NOREDUPERSON_OID+'3': 'norEduPersonBirthDate', - NOREDUPERSON_OID+'4': 'norEduPersonLIN', - NOREDUPERSON_OID+'5': 'norEduPersonNIN', - NOREDUPERSON_OID+'6': 'norEduOrgAcronym', - NOREDUPERSON_OID+'7': 'norEduOrgUniqueIdentifier', - NOREDUPERSON_OID+'8': 'norEduOrgUnitUniqueIdentifier', - NOREDUPERSON_OID+'9': 'federationFeideSchemaVersion', - NOREDUPERSON_OID+'10': 'norEduPersonLegalName', - NOREDUPERSON_OID+'11': 'norEduOrgSchemaVersion', - NOREDUPERSON_OID+'12': 'norEduOrgNIN', - OPENOSI_OID+'17': 'osiHomeUrl', - OPENOSI_OID+'19': 'osiPreferredTZ', - OPENOSI_OID+'72': 'osiICardTimeLastUpdated', - OPENOSI_OID+'104': 'osiMiddleName', - OPENOSI_OID+'107': 'osiOtherEmail', - OPENOSI_OID+'109': 'osiOtherHomePhone', - OPENOSI_OID+'120': 'osiWorkURL', - PKCS_9+'1': 'email', - SCHAC+'1': 'schacMotherTongue', - SCHAC+'2': 'schacGender', - SCHAC+'3': 'schacDateOfBirth', - SCHAC+'4': 'schacPlaceOfBirth', - SCHAC+'5': 'schacCountryOfCitizenship', - SCHAC+'6': 'schacSn1', - SCHAC+'7': 'schacSn2', - SCHAC+'8': 'schacPersonalTitle', - SCHAC+'9': 'schacHomeOrganization', - SCHAC+'10': 'schacHomeOrganizationType', - SCHAC+'11': 'schacCountryOfResidence', - SCHAC+'12': 'schacUserPresenceID', - SCHAC+'13': 'schacPersonalPosition', - SCHAC+'14': 'schacPersonalUniqueCode', - SCHAC+'15': 'schacPersonalUniqueID', - SCHAC+'17': 'schacExpiryDate', - SCHAC+'18': 'schacUserPrivateAttribute', - SCHAC+'19': 'schacUserStatus', - SCHAC+'20': 'schacProjectMembership', - SCHAC+'21': 'schacProjectSpecificRole', - SIS+'1': 'sisLegalGuardianFor', - SIS+'2': 'sisSchoolGrade', - UCL_DIR_PILOT+'1': 'uid', - UCL_DIR_PILOT+'3': 'mail', - UCL_DIR_PILOT+'25': 'dc', - UCL_DIR_PILOT+'37': 'associatedDomain', - UCL_DIR_PILOT+'43': 'co', - UCL_DIR_PILOT+'60': 'jpegPhoto', - UMICH+'57': 'labeledURI', - X500ATTR_OID+'2': 'knowledgeInformation', - X500ATTR_OID+'3': 'cn', - X500ATTR_OID+'4': 'sn', - X500ATTR_OID+'5': 'serialNumber', - X500ATTR_OID+'6': 'c', - X500ATTR_OID+'7': 'l', - X500ATTR_OID+'8': 'st', - X500ATTR_OID+'9': 'street', - X500ATTR_OID+'10': 'o', - X500ATTR_OID+'11': 'ou', - X500ATTR_OID+'12': 'title', - X500ATTR_OID+'14': 'searchGuide', - X500ATTR_OID+'15': 'businessCategory', - X500ATTR_OID+'16': 'postalAddress', - X500ATTR_OID+'17': 'postalCode', - X500ATTR_OID+'18': 'postOfficeBox', - X500ATTR_OID+'19': 'physicalDeliveryOfficeName', - X500ATTR_OID+'20': 'telephoneNumber', - X500ATTR_OID+'21': 'telexNumber', - X500ATTR_OID+'22': 'teletexTerminalIdentifier', - X500ATTR_OID+'23': 'facsimileTelephoneNumber', - X500ATTR_OID+'24': 'x121Address', - X500ATTR_OID+'25': 'internationaliSDNNumber', - X500ATTR_OID+'26': 'registeredAddress', - X500ATTR_OID+'27': 'destinationIndicator', - X500ATTR_OID+'28': 'preferredDeliveryMethod', - X500ATTR_OID+'29': 'presentationAddress', - X500ATTR_OID+'30': 'supportedApplicationContext', - X500ATTR_OID+'31': 'member', - X500ATTR_OID+'32': 'owner', - X500ATTR_OID+'33': 'roleOccupant', - X500ATTR_OID+'36': 'userCertificate', - X500ATTR_OID+'37': 'cACertificate', - X500ATTR_OID+'38': 'authorityRevocationList', - X500ATTR_OID+'39': 'certificateRevocationList', - X500ATTR_OID+'40': 'crossCertificatePair', - X500ATTR_OID+'42': 'givenName', - X500ATTR_OID+'43': 'initials', - X500ATTR_OID+'44': 'generationQualifier', - X500ATTR_OID+'45': 'x500UniqueIdentifier', - X500ATTR_OID+'46': 'dnQualifier', - X500ATTR_OID+'47': 'enhancedSearchGuide', - X500ATTR_OID+'48': 'protocolInformation', - X500ATTR_OID+'50': 'uniqueMember', - X500ATTR_OID+'51': 'houseIdentifier', - X500ATTR_OID+'52': 'supportedAlgorithms', - X500ATTR_OID+'53': 'deltaRevocationList', - X500ATTR_OID+'54': 'dmdName', - X500ATTR_OID+'65': 'pseudonym', - }, - 'to': { - 'associatedDomain': UCL_DIR_PILOT+'37', - 'authorityRevocationList': X500ATTR_OID+'38', - 'businessCategory': X500ATTR_OID+'15', - 'c': X500ATTR_OID+'6', - 'cACertificate': X500ATTR_OID+'37', - 'carLicense': NETSCAPE_LDAP+'1', - 'certificateRevocationList': X500ATTR_OID+'39', - 'cn': X500ATTR_OID+'3', - 'co': UCL_DIR_PILOT+'43', - 'crossCertificatePair': X500ATTR_OID+'40', - 'dc': UCL_DIR_PILOT+'25', - 'deltaRevocationList': X500ATTR_OID+'53', - 'departmentNumber': NETSCAPE_LDAP+'2', - 'destinationIndicator': X500ATTR_OID+'27', - 'displayName': NETSCAPE_LDAP+'241', - 'dmdName': X500ATTR_OID+'54', - 'dnQualifier': X500ATTR_OID+'46', - 'eduCourseMember': EDUCOURSE_OID+'2', - 'eduCourseOffering': EDUCOURSE_OID+'1', - 'eduPersonAffiliation': EDUPERSON_OID+'1', - 'eduPersonEntitlement': EDUPERSON_OID+'7', - 'eduPersonNickname': EDUPERSON_OID+'2', - 'eduPersonOrgDN': EDUPERSON_OID+'3', - 'eduPersonOrgUnitDN': EDUPERSON_OID+'4', - 'eduPersonPrimaryAffiliation': EDUPERSON_OID+'5', - 'eduPersonPrimaryOrgUnitDN': EDUPERSON_OID+'8', - 'eduPersonPrincipalName': EDUPERSON_OID+'6', - 'eduPersonPrincipalNamePrior': EDUPERSON_OID+'12', - 'eduPersonScopedAffiliation': EDUPERSON_OID+'9', - 'eduPersonTargetedID': EDUPERSON_OID+'10', - 'eduPersonAssurance': EDUPERSON_OID+'11', - 'eduPersonUniqueId': EDUPERSON_OID+'13', - 'eduPersonOrcid': EDUPERSON_OID+'16', - 'email': PKCS_9+'1', - 'employeeNumber': NETSCAPE_LDAP+'3', - 'employeeType': NETSCAPE_LDAP+'4', - 'enhancedSearchGuide': X500ATTR_OID+'47', - 'facsimileTelephoneNumber': X500ATTR_OID+'23', - 'federationFeideSchemaVersion': NOREDUPERSON_OID+'9', - 'generationQualifier': X500ATTR_OID+'44', - 'givenName': X500ATTR_OID+'42', - 'houseIdentifier': X500ATTR_OID+'51', - 'initials': X500ATTR_OID+'43', - 'internationaliSDNNumber': X500ATTR_OID+'25', - 'isMemberOf': EDUMEMBER1_OID+'1', - 'jpegPhoto': UCL_DIR_PILOT+'60', - 'knowledgeInformation': X500ATTR_OID+'2', - 'l': X500ATTR_OID+'7', - 'labeledURI': UMICH+'57', - 'mail': UCL_DIR_PILOT+'3', - 'member': X500ATTR_OID+'31', - 'norEduOrgAcronym': NOREDUPERSON_OID+'6', - 'norEduOrgNIN': NOREDUPERSON_OID+'12', - 'norEduOrgSchemaVersion': NOREDUPERSON_OID+'11', - 'norEduOrgUniqueIdentifier': NOREDUPERSON_OID+'7', - 'norEduOrgUniqueNumber': NOREDUPERSON_OID+'1', - 'norEduOrgUnitUniqueIdentifier': NOREDUPERSON_OID+'8', - 'norEduOrgUnitUniqueNumber': NOREDUPERSON_OID+'2', - 'norEduPersonBirthDate': NOREDUPERSON_OID+'3', - 'norEduPersonLIN': NOREDUPERSON_OID+'4', - 'norEduPersonLegalName': NOREDUPERSON_OID+'10', - 'norEduPersonNIN': NOREDUPERSON_OID+'5', - 'o': X500ATTR_OID+'10', - 'osiHomeUrl': OPENOSI_OID+'17', - 'osiPreferredTZ': OPENOSI_OID+'19', - 'osiICardTimeLastUpdated': OPENOSI_OID+'72', - 'osiMiddleName': OPENOSI_OID+'104', - 'osiOtherEmail': OPENOSI_OID+'107', - 'osiOtherHomePhone': OPENOSI_OID+'109', - 'osiWorkURL': OPENOSI_OID+'120', - 'ou': X500ATTR_OID+'11', - 'owner': X500ATTR_OID+'32', - 'physicalDeliveryOfficeName': X500ATTR_OID+'19', - 'postOfficeBox': X500ATTR_OID+'18', - 'postalAddress': X500ATTR_OID+'16', - 'postalCode': X500ATTR_OID+'17', - 'preferredDeliveryMethod': X500ATTR_OID+'28', - 'preferredLanguage': NETSCAPE_LDAP+'39', - 'presentationAddress': X500ATTR_OID+'29', - 'protocolInformation': X500ATTR_OID+'48', - 'pseudonym': X500ATTR_OID+'65', - 'PVP-USERID': LDAPGVAT_UCL_DIR_PILOT+'1', - 'PVP-MAIL': LDAPGVAT_UCL_DIR_PILOT+'3', - 'PVP-GID': LDAPGVAT_OID+'1', - 'PVP-BPK': LDAPGVAT_OID+'149', - 'PVP-OU-OKZ': LDAPGVAT_OID+'153', - 'PVP-VERSION': LDAPGVAT_OID+'261.10', - 'PVP-PRINCIPAL-NAME': LDAPGVAT_OID+'261.20', - 'PVP-PARTICIPANT-OKZ': LDAPGVAT_OID+'261.24', - 'PVP-ROLES': LDAPGVAT_OID+'261.30', - 'PVP-INVOICE-RECPT-ID': LDAPGVAT_OID+'261.40', - 'PVP-COST-CENTER-ID': LDAPGVAT_OID+'261.50', - 'PVP-CHARGE-CODE': LDAPGVAT_OID+'261.60', - 'PVP-OU-GV-OU-ID': LDAPGVAT_OID+'3', - 'PVP-FUNCTION': LDAPGVAT_OID+'33', - 'PVP-BIRTHDATE': LDAPGVAT_OID+'55', - 'PVP-PARTICIPANT-ID': LDAPGVAT_OID+'71', - 'PVP-OU': LDAPGVAT_X500ATTR_OID+'11', - 'PVP-TEL': LDAPGVAT_X500ATTR_OID+'20', - 'PVP-GIVENNAME': LDAPGVAT_X500ATTR_OID+'42', - 'registeredAddress': X500ATTR_OID+'26', - 'roleOccupant': X500ATTR_OID+'33', - 'schacCountryOfCitizenship': SCHAC+'5', - 'schacCountryOfResidence': SCHAC+'11', - 'schacDateOfBirth': SCHAC+'3', - 'schacExpiryDate': SCHAC+'17', - 'schacGender': SCHAC+'2', - 'schacHomeOrganization': SCHAC+'9', - 'schacHomeOrganizationType': SCHAC+'10', - 'schacMotherTongue': SCHAC+'1', - 'schacPersonalPosition': SCHAC+'13', - 'schacPersonalTitle': SCHAC+'8', - 'schacPersonalUniqueCode': SCHAC+'14', - 'schacPersonalUniqueID': SCHAC+'15', - 'schacPlaceOfBirth': SCHAC+'4', - 'schacProjectMembership': SCHAC+'20', - 'schacProjectSpecificRole': SCHAC+'21', - 'schacSn1': SCHAC+'6', - 'schacSn2': SCHAC+'7', - 'schacUserPresenceID': SCHAC+'12', - 'schacUserPrivateAttribute': SCHAC+'18', - 'schacUserStatus': SCHAC+'19', - 'searchGuide': X500ATTR_OID+'14', - 'serialNumber': X500ATTR_OID+'5', - 'sisLegalGuardianFor': SIS+'1', - 'sisSchoolGrade': SIS+'2', - 'sn': X500ATTR_OID+'4', - 'st': X500ATTR_OID+'8', - 'street': X500ATTR_OID+'9', - 'supportedAlgorithms': X500ATTR_OID+'52', - 'supportedApplicationContext': X500ATTR_OID+'30', - 'telephoneNumber': X500ATTR_OID+'20', - 'teletexTerminalIdentifier': X500ATTR_OID+'22', - 'telexNumber': X500ATTR_OID+'21', - 'title': X500ATTR_OID+'12', - 'uid': UCL_DIR_PILOT+'1', - 'uniqueMember': X500ATTR_OID+'50', - 'userCertificate': X500ATTR_OID+'36', - 'userPKCS12': NETSCAPE_LDAP+'216', - 'userSMIMECertificate': NETSCAPE_LDAP+'40', - 'x121Address': X500ATTR_OID+'24', - 'x500UniqueIdentifier': X500ATTR_OID+'45', - } -} diff --git a/docker/attributemaps/shibboleth_uri.py b/docker/attributemaps/shibboleth_uri.py deleted file mode 100644 index 54de47353..000000000 --- a/docker/attributemaps/shibboleth_uri.py +++ /dev/null @@ -1,197 +0,0 @@ -EDUPERSON_OID = 'urn:oid:1.3.6.1.4.1.5923.1.1.1.' -NETSCAPE_LDAP = 'urn:oid:2.16.840.1.113730.3.1.' -NOREDUPERSON_OID = 'urn:oid:1.3.6.1.4.1.2428.90.1.' -PKCS_9 = 'urn:oid:1.2.840.113549.1.9.' -UCL_DIR_PILOT = 'urn:oid:0.9.2342.19200300.100.1.' -UMICH = 'urn:oid:1.3.6.1.4.1.250.1.57.' -X500ATTR = 'urn:oid:2.5.4.' - - -MAP = { - "identifier": "urn:mace:shibboleth:1.0:attributeNamespace:uri", - 'fro': { - EDUPERSON_OID+'1': 'eduPersonAffiliation', - EDUPERSON_OID+'2': 'eduPersonNickname', - EDUPERSON_OID+'3': 'eduPersonOrgDN', - EDUPERSON_OID+'4': 'eduPersonOrgUnitDN', - EDUPERSON_OID+'5': 'eduPersonPrimaryAffiliation', - EDUPERSON_OID+'6': 'eduPersonPrincipalName', - EDUPERSON_OID+'7': 'eduPersonEntitlement', - EDUPERSON_OID+'8': 'eduPersonPrimaryOrgUnitDN', - EDUPERSON_OID+'9': 'eduPersonScopedAffiliation', - EDUPERSON_OID+'10': 'eduPersonTargetedID', - EDUPERSON_OID+'11': 'eduPersonAssurance', - EDUPERSON_OID+'12': 'eduPersonPrincipalNamePrior', - EDUPERSON_OID+'13': 'eduPersonUniqueId', - EDUPERSON_OID+'16': 'eduPersonOrcid', - NETSCAPE_LDAP+'1': 'carLicense', - NETSCAPE_LDAP+'2': 'departmentNumber', - NETSCAPE_LDAP+'3': 'employeeNumber', - NETSCAPE_LDAP+'4': 'employeeType', - NETSCAPE_LDAP+'39': 'preferredLanguage', - NETSCAPE_LDAP+'40': 'userSMIMECertificate', - NETSCAPE_LDAP+'216': 'userPKCS12', - NETSCAPE_LDAP+'241': 'displayName', - NOREDUPERSON_OID+'1': 'norEduOrgUniqueNumber', - NOREDUPERSON_OID+'2': 'norEduOrgUnitUniqueNumber', - NOREDUPERSON_OID+'3': 'norEduPersonBirthDate', - NOREDUPERSON_OID+'4': 'norEduPersonLIN', - NOREDUPERSON_OID+'5': 'norEduPersonNIN', - NOREDUPERSON_OID+'6': 'norEduOrgAcronym', - NOREDUPERSON_OID+'7': 'norEduOrgUniqueIdentifier', - NOREDUPERSON_OID+'8': 'norEduOrgUnitUniqueIdentifier', - NOREDUPERSON_OID+'9': 'federationFeideSchemaVersion', - PKCS_9+'1': 'email', - UCL_DIR_PILOT+'3': 'mail', - UCL_DIR_PILOT+'25': 'dc', - UCL_DIR_PILOT+'37': 'associatedDomain', - UCL_DIR_PILOT+'60': 'jpegPhoto', - X500ATTR+'2': 'knowledgeInformation', - X500ATTR+'4': 'sn', - X500ATTR+'5': 'serialNumber', - X500ATTR+'6': 'c', - X500ATTR+'7': 'l', - X500ATTR+'8': 'st', - X500ATTR+'9': 'street', - X500ATTR+'10': 'o', - X500ATTR+'11': 'ou', - X500ATTR+'12': 'title', - X500ATTR+'14': 'searchGuide', - X500ATTR+'15': 'businessCategory', - X500ATTR+'16': 'postalAddress', - X500ATTR+'17': 'postalCode', - X500ATTR+'18': 'postOfficeBox', - X500ATTR+'19': 'physicalDeliveryOfficeName', - X500ATTR+'20': 'telephoneNumber', - X500ATTR+'21': 'telexNumber', - X500ATTR+'22': 'teletexTerminalIdentifier', - X500ATTR+'23': 'facsimileTelephoneNumber', - X500ATTR+'24': 'x121Address', - X500ATTR+'25': 'internationaliSDNNumber', - X500ATTR+'26': 'registeredAddress', - X500ATTR+'27': 'destinationIndicator', - X500ATTR+'28': 'preferredDeliveryMethod', - X500ATTR+'29': 'presentationAddress', - X500ATTR+'30': 'supportedApplicationContext', - X500ATTR+'31': 'member', - X500ATTR+'32': 'owner', - X500ATTR+'33': 'roleOccupant', - X500ATTR+'36': 'userCertificate', - X500ATTR+'37': 'cACertificate', - X500ATTR+'38': 'authorityRevocationList', - X500ATTR+'39': 'certificateRevocationList', - X500ATTR+'40': 'crossCertificatePair', - X500ATTR+'42': 'givenName', - X500ATTR+'43': 'initials', - X500ATTR+'44': 'generationQualifier', - X500ATTR+'45': 'x500UniqueIdentifier', - X500ATTR+'46': 'dnQualifier', - X500ATTR+'47': 'enhancedSearchGuide', - X500ATTR+'48': 'protocolInformation', - X500ATTR+'50': 'uniqueMember', - X500ATTR+'51': 'houseIdentifier', - X500ATTR+'52': 'supportedAlgorithms', - X500ATTR+'53': 'deltaRevocationList', - X500ATTR+'54': 'dmdName', - X500ATTR+'65': 'pseudonym', - }, - 'to': { - 'associatedDomain': UCL_DIR_PILOT+'37', - 'authorityRevocationList': X500ATTR+'38', - 'businessCategory': X500ATTR+'15', - 'c': X500ATTR+'6', - 'cACertificate': X500ATTR+'37', - 'carLicense': NETSCAPE_LDAP+'1', - 'certificateRevocationList': X500ATTR+'39', - 'countryName': X500ATTR+'6', - 'crossCertificatePair': X500ATTR+'40', - 'dc': UCL_DIR_PILOT+'25', - 'deltaRevocationList': X500ATTR+'53', - 'departmentNumber': NETSCAPE_LDAP+'2', - 'destinationIndicator': X500ATTR+'27', - 'displayName': NETSCAPE_LDAP+'241', - 'dmdName': X500ATTR+'54', - 'dnQualifier': X500ATTR+'46', - 'domainComponent': UCL_DIR_PILOT+'25', - 'eduPersonAffiliation': EDUPERSON_OID+'1', - 'eduPersonEntitlement': EDUPERSON_OID+'7', - 'eduPersonNickname': EDUPERSON_OID+'2', - 'eduPersonOrgDN': EDUPERSON_OID+'3', - 'eduPersonOrgUnitDN': EDUPERSON_OID+'4', - 'eduPersonPrimaryAffiliation': EDUPERSON_OID+'5', - 'eduPersonPrimaryOrgUnitDN': EDUPERSON_OID+'8', - 'eduPersonPrincipalName': EDUPERSON_OID+'6', - 'eduPersonPrincipalNamePrior': EDUPERSON_OID+'12', - 'eduPersonScopedAffiliation': EDUPERSON_OID+'9', - 'eduPersonTargetedID': EDUPERSON_OID+'10', - 'eduPersonAssurance': EDUPERSON_OID+'11', - 'eduPersonUniqueId': EDUPERSON_OID+'13', - 'eduPersonOrcid': EDUPERSON_OID+'16', - 'email': PKCS_9+'1', - 'emailAddress': PKCS_9+'1', - 'employeeNumber': NETSCAPE_LDAP+'3', - 'employeeType': NETSCAPE_LDAP+'4', - 'enhancedSearchGuide': X500ATTR+'47', - 'facsimileTelephoneNumber': X500ATTR+'23', - 'fax': X500ATTR+'23', - 'federationFeideSchemaVersion': NOREDUPERSON_OID+'9', - 'generationQualifier': X500ATTR+'44', - 'givenName': X500ATTR+'42', - 'gn': X500ATTR+'42', - 'houseIdentifier': X500ATTR+'51', - 'initials': X500ATTR+'43', - 'internationaliSDNNumber': X500ATTR+'25', - 'jpegPhoto': UCL_DIR_PILOT+'60', - 'knowledgeInformation': X500ATTR+'2', - 'l': X500ATTR+'7', - 'localityName': X500ATTR+'7', - 'mail': UCL_DIR_PILOT+'3', - 'member': X500ATTR+'31', - 'norEduOrgAcronym': NOREDUPERSON_OID+'6', - 'norEduOrgUniqueIdentifier': NOREDUPERSON_OID+'7', - 'norEduOrgUniqueNumber': NOREDUPERSON_OID+'1', - 'norEduOrgUnitUniqueIdentifier': NOREDUPERSON_OID+'8', - 'norEduOrgUnitUniqueNumber': NOREDUPERSON_OID+'2', - 'norEduPersonBirthDate': NOREDUPERSON_OID+'3', - 'norEduPersonLIN': NOREDUPERSON_OID+'4', - 'norEduPersonNIN': NOREDUPERSON_OID+'5', - 'o': X500ATTR+'10', - 'organizationName': X500ATTR+'10', - 'organizationalUnitName': X500ATTR+'11', - 'ou': X500ATTR+'11', - 'owner': X500ATTR+'32', - 'physicalDeliveryOfficeName': X500ATTR+'19', - 'pkcs9email': PKCS_9+'1', - 'postOfficeBox': X500ATTR+'18', - 'postalAddress': X500ATTR+'16', - 'postalCode': X500ATTR+'17', - 'preferredDeliveryMethod': X500ATTR+'28', - 'preferredLanguage': NETSCAPE_LDAP+'39', - 'presentationAddress': X500ATTR+'29', - 'protocolInformation': X500ATTR+'48', - 'pseudonym': X500ATTR+'65', - 'registeredAddress': X500ATTR+'26', - 'rfc822Mailbox': UCL_DIR_PILOT+'3', - 'roleOccupant': X500ATTR+'33', - 'searchGuide': X500ATTR+'14', - 'serialNumber': X500ATTR+'5', - 'sn': X500ATTR+'4', - 'st': X500ATTR+'8', - 'stateOrProvinceName': X500ATTR+'8', - 'street': X500ATTR+'9', - 'streetAddress': X500ATTR+'9', - 'supportedAlgorithms': X500ATTR+'52', - 'supportedApplicationContext': X500ATTR+'30', - 'surname': X500ATTR+'4', - 'telephoneNumber': X500ATTR+'20', - 'teletexTerminalIdentifier': X500ATTR+'22', - 'telexNumber': X500ATTR+'21', - 'title': X500ATTR+'12', - 'uniqueMember': X500ATTR+'50', - 'userCertificate': X500ATTR+'36', - 'userPKCS12': NETSCAPE_LDAP+'216', - 'userSMIMECertificate': NETSCAPE_LDAP+'40', - 'x121Address': X500ATTR+'24', - 'x500UniqueIdentifier': X500ATTR+'45', - } -} diff --git a/docker/setup.sh b/docker/setup.sh deleted file mode 100755 index 3545c5156..000000000 --- a/docker/setup.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -set -e - -VENV_DIR=/opt/satosa - -python3 -m venv "$VENV_DIR" - -"${VENV_DIR}/bin/pip" install --upgrade pip -"${VENV_DIR}/bin/pip" install -e /src/satosa/ diff --git a/docker/start.sh b/docker/start.sh deleted file mode 100755 index dd57b2ee2..000000000 --- a/docker/start.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env sh - -set -e - -# for Click library to work in satosa-saml-metadata -export LC_ALL="C.UTF-8" -export LANG="C.UTF-8" - -if [ -z "${DATA_DIR}" ] -then DATA_DIR=/opt/satosa/etc -fi - -if [ ! -d "${DATA_DIR}" ] -then mkdir -p "${DATA_DIR}" -fi - -if [ -z "${PROXY_PORT}" ] -then PROXY_PORT="8000" -fi - -if [ -z "${METADATA_DIR}" ] -then METADATA_DIR="${DATA_DIR}" -fi - -if [ ! -d "${DATA_DIR}/attributemaps" ] -then cp -pr /opt/satosa/attributemaps "${DATA_DIR}/attributemaps" -fi - -# activate virtualenv -. /opt/satosa/bin/activate - -# generate metadata for frontend(IdP interface) and backend(SP interface) -# write the result to mounted volume -mkdir -p "${METADATA_DIR}" -satosa-saml-metadata \ - "${DATA_DIR}/proxy_conf.yaml" \ - "${DATA_DIR}/metadata.key" \ - "${DATA_DIR}/metadata.crt" \ - --dir "${METADATA_DIR}" - -# if the user provided a gunicorn configuration, use it -if [ -f "$GUNICORN_CONF" ] -then conf_opt="--config ${GUNICORN_CONF}" -else conf_opt="--chdir ${DATA_DIR}" -fi - -# if HTTPS cert is available, use it -https_key="${DATA_DIR}/https.key" -https_crt="${DATA_DIR}/https.crt" -if [ -f "$https_key" -a -f "$https_crt" ] -then https_opts="--keyfile ${https_key} --certfile ${https_crt}" -fi - -# if a chain is available, use it -chain_pem="${DATA_DIR}/chain.pem" -if [ -f "$chain_pem" ] -then chain_opts="--ca-certs chain.pem" -fi - -# start the proxy -exec gunicorn $conf_opt \ - -b 0.0.0.0:"${PROXY_PORT}" \ - satosa.wsgi:app \ - $https_opts \ - $chain_opts \ - ; diff --git a/example/cdb.json.example b/example/cdb.json.example new file mode 100644 index 000000000..611574b5d --- /dev/null +++ b/example/cdb.json.example @@ -0,0 +1,10 @@ +{ + "the_client_id": { + "response_types": ["code", "and", "other", "types"], + "client_id": "the_client_id", + "client_secret": "the_client_secret", + "redirect_uris": [ + "http://example.org/rp/the_redirect_uri" + ] + } +} diff --git a/example/internal_attributes.yaml.example b/example/internal_attributes.yaml.example index dc8b5fe1f..a1c5dff9a 100644 --- a/example/internal_attributes.yaml.example +++ b/example/internal_attributes.yaml.example @@ -1,43 +1,43 @@ attributes: address: openid: [address.street_address] - orcid: [addresses.str] + orcid: [address] saml: [postaladdress] displayname: openid: [nickname] - orcid: [name.credit-name] + orcid: [displayname] github: [login] saml: [displayName] edupersontargetedid: facebook: [id] linkedin: [id] - orcid: [orcid] + orcid: [edupersontargetedid] github: [id] openid: [sub] saml: [eduPersonTargetedID] givenname: facebook: [first_name] linkedin: [email-address] - orcid: [name.given-names.value] + orcid: [givenname] openid: [given_name] saml: [givenName] mail: facebook: [email] linkedin: [email-address] - orcid: [emails.str] + orcid: [mail] github: [email] openid: [email] - saml: [email, emailAdress, mail] + saml: [email, emailAddress, mail] name: facebook: [name] - orcid: [name.credit-name] + orcid: [name] github: [name] openid: [name] saml: [cn] surname: facebook: [last_name] linkedin: [lastName] - orcid: [name.family-name.value] + orcid: [surname] openid: [family_name] saml: [sn, surname] user_id_from_attrs: [edupersontargetedid] diff --git a/example/plugins/backends/apple_backend.yaml.example b/example/plugins/backends/apple_backend.yaml.example new file mode 100644 index 000000000..bae8e5673 --- /dev/null +++ b/example/plugins/backends/apple_backend.yaml.example @@ -0,0 +1,28 @@ +module: satosa.backends.apple.AppleBackend +name: apple +config: + provider_metadata: + issuer: https://appleid.apple.com + client: + verify_ssl: yes + auth_req_params: + response_type: code + scope: [openid, email, name] + response_mode: form_post + token_endpoint_auth_method: client_secret_post + client_metadata: + application_name: Sign in with Apple + application_type: web + client_id: 'CLIENT_ID_HERE' + client_secret: 'CLIENT_SECRET_HERE' + redirect_uris: [/] + subject_type: pairwise + entity_info: + organization: + display_name: + - ['Apple', 'en'] + name: + - ['Apple Inc.', 'en'] + ui_info: + display_name: + - ['Sign in with Apple', 'en'] diff --git a/example/plugins/backends/idpyoidc_backend.yaml.example b/example/plugins/backends/idpyoidc_backend.yaml.example new file mode 100644 index 000000000..45d011b21 --- /dev/null +++ b/example/plugins/backends/idpyoidc_backend.yaml.example @@ -0,0 +1,12 @@ +module: satosa.backends.idpy_oidc.IdpyOIDCBackend +name: oidc +config: + client_type: oidc + redirect_uris: [/] + client_id: !ENV SATOSA_OIDC_BACKEND_CLIENTID + client_secret: !ENV SATOSA_OIDC_BACKEND_CLIENTSECRET + response_types_supported: ["code"] + scopes_supported: ["openid", "profile", "email"] + subject_type_supported: ["public"] + provider_info: + issuer: !ENV SATOSA_OIDC_BACKEND_ISSUER \ No newline at end of file diff --git a/example/plugins/backends/reflector_backend.yaml.example b/example/plugins/backends/reflector_backend.yaml.example new file mode 100644 index 000000000..185a08035 --- /dev/null +++ b/example/plugins/backends/reflector_backend.yaml.example @@ -0,0 +1,3 @@ +module: satosa.backends.reflector.ReflectorBackend +name: Reflector +config: diff --git a/example/plugins/backends/saml2_backend.yaml.example b/example/plugins/backends/saml2_backend.yaml.example index a71dfd0d4..76f9406ee 100644 --- a/example/plugins/backends/saml2_backend.yaml.example +++ b/example/plugins/backends/saml2_backend.yaml.example @@ -3,17 +3,31 @@ name: Saml2 config: idp_blacklist_file: /path/to/blacklist.json + acr_mapping: + "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" + + # disco_srv must be defined if there is more than one IdP in the metadata specified above + disco_srv: http://disco.example.com + + entityid_endpoint: true mirror_force_authn: no memorize_idp: no use_memorized_idp_when_force_authn: no + send_requester_id: no + enable_metadata_reload: no + acs_selection_strategy: prefer_matching_host sp_config: + name: "SP Name" + description: "SP Description" key_file: backend.key cert_file: backend.crt organization: {display_name: Example Identities, name: Example Identities Org., url: 'http://www.example.com'} contact_person: - - {contact_type: technical, email_address: technical@example.com, given_name: Technical} - - {contact_type: support, email_address: support@example.com, given_name: Support} + - {contact_type: technical, email_address: 'mailto:technical@example.com', given_name: Technical} + - {contact_type: support, email_address: 'mailto:support@example.com', given_name: Support} + - {contact_type: other, email_address: 'mailto:security@example.com', given_name: Security, extension_attributes: {'xmlns:remd': 'http://refeds.org/metadata', 'remd:contactType': 'http://refeds.org/metadata/contactType/security'}} metadata: local: [idp.xml] @@ -52,10 +66,10 @@ config: - [//acs/post, 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST'] discovery_response: - [//disco, 'urn:oasis:names:tc:SAML:profiles:SSO:idp-discovery-protocol'] - name_id_format: 'urn:oasis:names:tc:SAML:2.0:nameid-format:transient' - # A name_id_format of 'None' will cause the authentication request to not - # include a Format attribute in the NameIDPolicy. - # name_id_format: 'None' + + # name_id_format: a list of strings to set the element in SP metadata + # name_id_policy_format: a string to set the Format attribute in the NameIDPolicy element + # of the authentication request + # name_id_format_allow_create: sets the AllowCreate attribute in the NameIDPolicy element + # of the authentication request name_id_format_allow_create: true - # disco_srv must be defined if there is more than one IdP in the metadata specified above - disco_srv: http://disco.example.com diff --git a/example/plugins/frontends/openid_connect_frontend.yaml.example b/example/plugins/frontends/openid_connect_frontend.yaml.example index 6c94ea758..d7a5584d8 100644 --- a/example/plugins/frontends/openid_connect_frontend.yaml.example +++ b/example/plugins/frontends/openid_connect_frontend.yaml.example @@ -2,8 +2,36 @@ module: satosa.frontends.openid_connect.OpenIDConnectFrontend name: OIDC config: signing_key_path: frontend.key - db_uri: mongodb://db.example.com # optional: only support MongoDB, will default to in-memory storage if not specified + signing_key_id: frontend.key1 + + # Defines the database connection URI for the databases: + # - authz_code_db + # - access_token_db + # - refresh_token_db + # - sub_db + # - user_db + # + # supported storage backends: + # - In-memory dictionary + # - MongoDB (e.g. mongodb://db.example.com) + # - Redis (e.g. redis://example/0) + # - Stateless (eg. stateless://user:encryptionkey?alg=aes256) + # + # This configuration is optional. + # By default, the in-memory storage is used. + db_uri: mongodb://db.example.com + + # Where to store clients. + # + # If client_db_uri is set, the database connection is used. + # Otherwise, if client_db_path is set, the JSON file is used. + # By default, an in-memory dictionary is used. + client_db_uri: mongodb://db.example.com client_db_path: /path/to/your/cdb.json + + # if not specified, it is randomly generated on every startup + sub_hash_salt: randomSALTvalue + provider: client_registration_supported: Yes response_types_supported: ["code", "id_token token"] @@ -11,5 +39,10 @@ config: scopes_supported: ["openid", "email"] extra_scopes: foo_scope: - - bar_claim - - baz_claim + - bar_claim + - baz_claim + id_token_lifetime: 3600 + extra_id_token_claims: + foo_client: + - bar_claim + - baz_claim diff --git a/example/plugins/frontends/saml2_frontend.yaml.example b/example/plugins/frontends/saml2_frontend.yaml.example index 87bc4203f..342ae03f5 100644 --- a/example/plugins/frontends/saml2_frontend.yaml.example +++ b/example/plugins/frontends/saml2_frontend.yaml.example @@ -1,11 +1,32 @@ module: satosa.frontends.saml2.SAMLFrontend name: Saml2IDP config: + #acr_mapping: + # "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + # "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" + + endpoints: + single_sign_on_service: + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post + 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect + + # If configured and not false or empty the common domain cookie _saml_idp will be set + # with or have appended the IdP used for authentication. The default is not to set the + # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie + # will be set to the value for the 'domain' key. If no 'domain' is set then the domain + # from the BASE defined for the proxy will be used. + #common_domain_cookie: + # domain: .example.com + + entityid_endpoint: true + enable_metadata_reload: no + idp_config: organization: {display_name: Example Identities, name: Example Identities Org., url: 'http://www.example.com'} contact_person: - - {contact_type: technical, email_address: technical@example.com, given_name: Technical} - - {contact_type: support, email_address: support@example.com, given_name: Support} + - {contact_type: technical, email_address: 'mailto:technical@example.com', given_name: Technical} + - {contact_type: support, email_address: 'mailto:support@example.com', given_name: Support} + - {contact_type: other, email_address: 'mailto:security@example.com', given_name: Security, extension_attributes: {'xmlns:remd': 'http://refeds.org/metadata', 'remd:contactType': 'http://refeds.org/metadata/contactType/security'}} key_file: frontend.key cert_file: frontend.crt metadata: @@ -49,19 +70,3 @@ config: name_form: urn:oasis:names:tc:SAML:2.0:attrname-format:uri encrypt_assertion: false encrypted_advice_attributes: false - acr_mapping: - "": default-LoA - "https://accounts.google.com": LoA1 - - endpoints: - single_sign_on_service: - 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post - 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect - - # If configured and not false or empty the common domain cookie _saml_idp will be set - # with or have appended the IdP used for authentication. The default is not to set the - # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie - # will be set to the value for the 'domain' key. If no 'domain' is set then the domain - # from the BASE defined for the proxy will be used. - #common_domain_cookie: - # domain: .example.com diff --git a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example index 5ab44fee0..f5a87e9f2 100644 --- a/example/plugins/frontends/saml2_virtualcofrontend.yaml.example +++ b/example/plugins/frontends/saml2_virtualcofrontend.yaml.example @@ -2,7 +2,7 @@ module: satosa.frontends.saml2.SAMLVirtualCoFrontend name: Saml2IDP config: collaborative_organizations: - # The encodeable name for the CO will be URL encoded and used + # The encodeable name for the CO will be URL encoded and used # both for the entityID and the SSO endpoints of the virtual IdP. # The entityID has the form # @@ -12,7 +12,7 @@ config: # # {base}/{backend}/{co_name}/{path} # - - encodedable_name: MESS + - encodeable_name: MESS # If organization and contact_person details appear they # will override the same from the base configuration in # the generated metadata for the CO IdP. @@ -22,8 +22,8 @@ config: url: https://messproject.org contact_person: - contact_type: technical - email_address: help@messproject.org - given_name MESS Technical Support + email_address: 'mailto:help@messproject.org' + given_name: MESS Technical Support # SAML attributes and static values about the CO to be asserted for each user. # The key is the SATOSA internal attribute name. co_static_saml_attributes: @@ -49,7 +49,12 @@ config: metadata: local: [sp.xml] - entityid: //proxy.xml + # Available placeholders to use while constructing entityid, + # : Backend name + # : collaborative_organizations encodeable_name + # : Base url of installation + # : Name of this virtual co-frontend + entityid: //idp/ accepted_time_diff: 60 service: idp: @@ -86,14 +91,16 @@ config: lifetime: {minutes: 15} name_form: urn:oasis:names:tc:SAML:2.0:attrname-format:uri acr_mapping: - "": default-LoA - "https://accounts.google.com": LoA1 + "": "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified" + "https://accounts.google.com": "http://eidas.europa.eu/LoA/low" endpoints: single_sign_on_service: 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST': sso/post 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect': sso/redirect + enable_metadata_reload: no + # If configured and not false or empty the common domain cookie _saml_idp will be set # with or have appended the IdP used for authentication. The default is not to set the # cookie. If the value is a dictionary with key 'domain' then the domain for the cookie diff --git a/example/plugins/microservices/attribute_generation.yaml.example b/example/plugins/microservices/attribute_generation.yaml.example index a1c65c91b..45f5b269f 100644 --- a/example/plugins/microservices/attribute_generation.yaml.example +++ b/example/plugins/microservices/attribute_generation.yaml.example @@ -7,5 +7,5 @@ config: eduPersonAffiliation: member;employee default: default: - schacHomeOrganization: {{eduPersonPrincipalName.scope}} + schacHomeOrganization: "{{eduPersonPrincipalName.scope}}" schacHomeOrganizationType: tomfoolery provider diff --git a/example/plugins/microservices/attribute_policy.yaml.example b/example/plugins/microservices/attribute_policy.yaml.example new file mode 100644 index 000000000..3a32c78df --- /dev/null +++ b/example/plugins/microservices/attribute_policy.yaml.example @@ -0,0 +1,12 @@ +module: satosa.micro_services.attribute_policy.AttributePolicy +name: AttributePolicy +config: + attribute_policy: + : + allowed: + - mail + - name + - givenname + - surname + + diff --git a/example/plugins/microservices/attribute_processor.yaml.example b/example/plugins/microservices/attribute_processor.yaml.example index 8d946f684..a20bb2faa 100644 --- a/example/plugins/microservices/attribute_processor.yaml.example +++ b/example/plugins/microservices/attribute_processor.yaml.example @@ -15,3 +15,10 @@ config: - name: ScopeProcessor module: satosa.micro_services.processors.scope_processor scope: example.com + - attribute: role + processors: + - name: RegexSubProcessor + module: satosa.micro_services.processors.regex_sub_processor + regex_sub_match_pattern: !ENV REGEX_MATCH_PATTERN + regex_sub_replace_pattern: !ENV REGEX_REPLACE_PATTERN + diff --git a/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example b/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example new file mode 100644 index 000000000..90aed60eb --- /dev/null +++ b/example/plugins/microservices/custom_routing_decide_by_requester.yaml.example @@ -0,0 +1,8 @@ +module: satosa.micro_services.custom_routing.DecideBackendByRequester +name: DecideBackendByRequester +config: + default_backend: Saml2 + requester_mapping: + 'requestor-id': 'backend_custom' + + diff --git a/example/plugins/microservices/disco_to_target_issuer.yaml.example b/example/plugins/microservices/disco_to_target_issuer.yaml.example new file mode 100644 index 000000000..5d5d0100c --- /dev/null +++ b/example/plugins/microservices/disco_to_target_issuer.yaml.example @@ -0,0 +1,6 @@ +module: satosa.micro_services.disco.DiscoToTargetIssuer +name: DiscoToTargetIssuer +config: + # the regex that will intercept http requests to be handled with this microservice + disco_endpoints: + - ".*/disco" diff --git a/example/plugins/microservices/filter_attributes.yaml.example b/example/plugins/microservices/filter_attributes.yaml.example index f368493b5..185f2dec0 100644 --- a/example/plugins/microservices/filter_attributes.yaml.example +++ b/example/plugins/microservices/filter_attributes.yaml.example @@ -2,6 +2,35 @@ module: satosa.micro_services.attribute_modifications.FilterAttributeValues name: AttributeFilter config: attribute_filters: + # default rules for any IdentityProvider + "": + # default rules for any requester + "": + # enforce controlled vocabulary (via simple notation) + eduPersonAffiliation: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)$" + eduPersonPrimaryAffiliation: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)$" + eduPersonScopedAffiliation: + # enforce controlled vocabulary (via extended notation) + regexp: "^(faculty|student|staff|alum|member|affiliate|employee|library-walk-in)@" + # enforce correct scope + shibmdscope_match_scope: + eduPersonPrincipalName: + # enforce correct scope + shibmdscope_match_scope: + subject-id: + # enforce attribute syntax + regexp: "^[0-9A-Za-z][-=0-9A-Za-z]{0,126}@[0-9A-Za-z][-.0-9A-Za-z]{0,126}\\Z" + # enforce correct scope + shibmdscope_match_scope: + pairwise-id: + # enforce attribute syntax + regexp: "^[0-9A-Za-z][-=0-9A-Za-z]{0,126}@[0-9A-Za-z][-.0-9A-Za-z]{0,126}\\Z" + # enforce correct scope + shibmdscope_match_scope: + schacHomeOrganization: + # enforce scoping rule on attribute value + shibmdscope_match_value: + target_provider1: requester1: attr1: "^foo:bar$" diff --git a/example/plugins/microservices/idp_hinting.yaml.example b/example/plugins/microservices/idp_hinting.yaml.example new file mode 100644 index 000000000..8dbc26932 --- /dev/null +++ b/example/plugins/microservices/idp_hinting.yaml.example @@ -0,0 +1,7 @@ +module: satosa.micro_services.idp_hinting.IdpHinting +name: IdpHinting +config: + allowed_params: + - idp_hinting + - idp_hint + - idphint diff --git a/example/plugins/microservices/ldap_attribute_store.yaml.example b/example/plugins/microservices/ldap_attribute_store.yaml.example index 43dd20e1f..afcc7237c 100644 --- a/example/plugins/microservices/ldap_attribute_store.yaml.example +++ b/example/plugins/microservices/ldap_attribute_store.yaml.example @@ -1,14 +1,19 @@ -module: LdapAttributeStore +module: satosa.micro_services.ldap_attribute_store.LdapAttributeStore name: LdapAttributeStore config: - # The microservice may be configured per SP. - # The configuration key is the entityID of the SP. - # The empty key ("") specifies the default configuration - "": - ldap_url: ldaps://ldap.example.org + # The microservice may be configured per entityID. + # The configuration key is the entityID of the requesting SP, + # the authenticating IdP, or the entityID of the CO virtual IdP. + # The key "default" specifies the default configuration + default: + ldap_url: "ldaps://ldap.example.org" bind_dn: cn=admin,dc=example,dc=org - bind_password: xxxxxxxx + # Obtain bind password from environment variable LDAP_BIND_PASSWORD. + bind_password: !ENV LDAP_BIND_PASSWORD + # Obtain bind password from file pointed to by + # environment variable LDAP_BIND_PASSWORD_FILE. + # bind_password: !ENVFILE LDAP_BIND_PASSWORD search_base: ou=People,dc=example,dc=org read_only: true auto_bind: true @@ -22,6 +27,9 @@ config: # pool_keepalive: seconds to wait between calls to server to keep the # connection alive; default: 10 pool_keepalive: 10 + # pool_lifetime: number of seconds before recreating a new connection + # in a pooled connection strategy. + pool_lifetime: None # Attributes to return from LDAP query. query_return_attributes: @@ -79,6 +87,13 @@ config: ldap_identifier_attribute: uid + # Override the contructed search_filter with ldap_identifier_attribute + # with an own filter. This allows more complex queries. + # {0} will be injected with the ordered_identifier_candidates. + # For example: + # search_filter: "(&(uid={0})(isMemberOf=authorized))" + search_filter: None + # Whether to clear values for attributes incoming # to this microservice. Default is no or false. clear_input_attributes: no @@ -88,13 +103,31 @@ config: user_id_from_attrs: - employeeNumber + # If true, do not only process the first ldap result, but iterate over + # the result and process all of them. + use_all_results: false + # Where to redirect the browser if no record is returned # from LDAP. The default is not to redirect. on_ldap_search_result_empty: https://my.vo.org/please/go/enroll - # The microservice may be configured per SP. - # The configuration key is the entityID of the SP. - # Αny missing parameters are looked up from the default configuration. + # The microservice may be configured per entityID or per extracted attribute. + # The configuration key is the entityID of the requesting SP, + # the authenticating IdP, the entityID of the CO virtual IdP, or the + # extracted attribute defined by `global.provider_attribute`. + # When more than one configured key matches during a flow + # the priority ordering is provider attribute, requesting SP, then authenticating IdP, then + # CO virtual IdP. Αny missing parameters are taken from the + # default configuration. + global: + provider_attribute: domain + + # domain attribute is extracted in a previous microserver and used as a key + # here. + company.com: + ldap_url: ldaps://ldap.company.com + search_base: ou=group,dc=identity,dc=company,dc=com + https://sp.myserver.edu/shibboleth-sp: search_base: ou=People,o=MyVO,dc=example,dc=org search_return_attributes: @@ -105,6 +138,10 @@ config: user_id_from_attrs: - uid - # The microservice may be configured to ignore a particular SP. + https://federation-proxy.my.edu/satosa/idp/proxy/some_co: + search_base: ou=People,o=some_co,dc=example,dc=org + + # The microservice may be configured to ignore a particular entityID. https://another.sp.myserver.edu: ignore: true + diff --git a/example/plugins/microservices/primary_identifier.yaml.example b/example/plugins/microservices/primary_identifier.yaml.example new file mode 100644 index 000000000..0b14d7127 --- /dev/null +++ b/example/plugins/microservices/primary_identifier.yaml.example @@ -0,0 +1,57 @@ +module: satosa.micro_services.primary_identifier.PrimaryIdentifier +name: PrimaryIdentifier +config: + # The ordered identifier candidates are searched in order + # to find a candidate primary identifier. The search ends + # when the first candidate is found. The identifier or attribute + # names are the internal SATOSA names for the attributes as + # defined in internal_attributes.yaml. The configuration below + # would search in order for eduPersonUniqueID, eduPersonPrincipalName + # combined with a SAML2 Persistent NameID, eduPersonPrincipalName + # combined with eduPersonTargetedId, eduPersonPrincipalName, + # SAML 2 Persistent NameID, and finally eduPersonTargetedId. + ordered_identifier_candidates: + - attribute_names: [epuid] + # The line below combines, if found, eduPersonPrincipalName and SAML 2 + # persistent NameID to create a primary identifier. + - attribute_names: [eppn, name_id] + name_id_format: urn:oasis:names:tc:SAML:2.0:nameid-format:persistent + # The line below combines, if found, eduPersonPrincipalName and + # eduPersonTargetedId to create a primary identifier. + - attribute_names: [eppn, edupersontargetedid] + - attribute_names: [eppn] + - attribute_names: [name_id] + name_id_format: urn:oasis:names:tc:SAML:2.0:nameid-format:persistent + # The line below adds the IdP entityID to the value for the SAML2 + # Persistent NameID to ensure the value is fully scoped. + add_scope: issuer_entityid + - attribute_names: [edupersontargetedid] + add_scope: issuer_entityid + + # The internal SATOSA attribute into which to place the primary + # identifier value once found from the above configured ordered + # candidates. + primary_identifier: uid + + # Whether or not to clear the input attributes after setting the + # primary identifier value. + clear_input_attributes: no + + # Whether to replace subject_id with the constructed primary identifier + replace_subject_id: no + + # If defined redirect to this page if no primary identifier can + # be found. + on_error: https://my.org/errors/no_primary_identifier + + # The microservice may be configured per entityID. + # The configuration key is the entityID of the requesting SP, + # or the authenticating IdP. An SP configuration overrides an IdP + # configuration when there is a conflict. + "https://my.org/idp/shibboleth": + ordered_identifier_candidates: + - attribute_names: [eppn] + + "https://service.my.org/sp/shibboleth": + ordered_identifier_candidates: + - attribute_names: [mail] diff --git a/example/plugins/microservices/target_based_routing.yaml.example b/example/plugins/microservices/target_based_routing.yaml.example new file mode 100644 index 000000000..55e699c53 --- /dev/null +++ b/example/plugins/microservices/target_based_routing.yaml.example @@ -0,0 +1,8 @@ +module: satosa.micro_services.custom_routing.DecideBackendByTargetIssuer +name: TargetRouter +config: + default_backend: Saml2 + + target_mapping: + "http://idpspid.testunical.it:8088": "spidSaml2" # map SAML entity with entity id 'target_id' to backend name + "http://eidas.testunical.it:8081/saml2/metadata": "eidasSaml2" diff --git a/example/proxy_conf.yaml.example b/example/proxy_conf.yaml.example index 0289a60b5..d6937f594 100644 --- a/example/proxy_conf.yaml.example +++ b/example/proxy_conf.yaml.example @@ -1,48 +1,71 @@ -#--- SATOSA Config ---# BASE: https://example.com -INTERNAL_ATTRIBUTES: "internal_attributes.yaml" + COOKIE_STATE_NAME: "SATOSA_STATE" CONTEXT_STATE_DELETE: yes STATE_ENCRYPTION_KEY: "asdASD123" -CUSTOM_PLUGIN_MODULE_PATHS: - - "plugins/backends" - - "plugins/frontends" - - "plugins/micro_services" + +cookies_samesite_compat: + - ["SATOSA_STATE", "SATOSA_STATE_LEGACY"] + +INTERNAL_ATTRIBUTES: "internal_attributes.yaml" + BACKEND_MODULES: - "plugins/backends/saml2_backend.yaml" + FRONTEND_MODULES: - "plugins/frontends/saml2_frontend.yaml" + MICRO_SERVICES: - "plugins/microservices/static_attributes.yaml" -cookies_samesite_compat: [ - ("SATOSA_STATE", "SATOSA_STATE_LEGACY"), -] - LOGGING: version: 1 formatters: simple: - format: "[%(asctime)-19.19s] [%(levelname)-5.5s]: %(message)s" + format: "[%(asctime)s] [%(levelname)s] [%(name)s.%(funcName)s] %(message)s" handlers: - console: + stdout: class: logging.StreamHandler + stream: "ext://sys.stdout" level: DEBUG formatter: simple - stream: ext://sys.stdout - info_file_handler: - class: logging.handlers.RotatingFileHandler - level: INFO + syslog: + class: logging.handlers.SysLogHandler + address: "/dev/log" + level: DEBUG + formatter: simple + debug_file: + class: logging.FileHandler + filename: satosa-debug.log + encoding: utf8 + level: DEBUG + formatter: simple + error_file: + class: logging.FileHandler + filename: satosa-error.log + encoding: utf8 + level: ERROR formatter: simple - filename: info.log + info_file: + class: logging.handlers.RotatingFileHandler + filename: satosa-info.log + encoding: utf8 maxBytes: 10485760 # 10MB backupCount: 20 - encoding: utf8 + level: INFO + formatter: simple loggers: satosa: level: DEBUG - handlers: [console] - propagate: no + saml2: + level: DEBUG + oidcendpoint: + level: DEBUG + pyop: + level: DEBUG + oic: + level: DEBUG root: - level: INFO - handlers: [info_file_handler] + level: DEBUG + handlers: + - stdout diff --git a/scripts/start_proxy.py b/scripts/start_proxy.py deleted file mode 100644 index 1d9af1162..000000000 --- a/scripts/start_proxy.py +++ /dev/null @@ -1,12 +0,0 @@ -import re -import sys - -from gunicorn.app.wsgiapp import run - -print('\n'.join(sys.path)) -# use this entrypoint to start the proxy from the IDE - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(run()) - diff --git a/scripts/travis_create_docker_image_branch.sh b/scripts/travis_create_docker_image_branch.sh deleted file mode 100755 index 13da052b4..000000000 --- a/scripts/travis_create_docker_image_branch.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -set -e - -docker login -u $DOCKER_USER -p $DOCKER_PASS -export REPO=satosa/satosa -export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi` -docker build -f Dockerfile -t $REPO:$TAG . -docker push $REPO diff --git a/scripts/travis_create_docker_image_tag.sh b/scripts/travis_create_docker_image_tag.sh deleted file mode 100755 index 504d2d41f..000000000 --- a/scripts/travis_create_docker_image_tag.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -set -e - -# Travis does not know which branch the repo is on when building a tag -# Make sure to only call this script when building tags - -docker login -u $DOCKER_USER -p $DOCKER_PASS -export REPO=satosa/satosa -export TAG=latest -docker build -f Dockerfile -t $REPO:$TAG . -if [ -n "$TRAVIS_TAG" ]; then - docker tag $REPO:$TAG $REPO:$TRAVIS_TAG -fi -docker push $REPO diff --git a/setup.cfg b/setup.cfg index 224a77957..88673863b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,25 @@ [metadata] -description-file = README.md \ No newline at end of file +description-file = README.md + + +[flake8] +max-line-length = 88 +author-attribute = forbidden +no-accept-encodings = True +assertive-snakecase = True +# assertive-test-pattern = +inline-quotes = " +multiline-quotes = """ +docstring-quotes = """ +application-import-names = satosa + +hang_closing = false +doctests = false +max-complexity = 10 +exclude = + .git + __pycache__ + doc/source/conf.py + docs/source/conf.py + build + dist diff --git a/setup.py b/setup.py index 0f9fb46eb..70d1e51ab 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name='SATOSA', - version='6.1.0', + version='8.5.1', description='Protocol proxy (SAML/OIDC).', author='DIRG', author_email='satosa-dev@lists.sunet.se', @@ -15,25 +15,33 @@ packages=find_packages('src/'), package_dir={'': 'src'}, install_requires=[ - "pyop >= 3.0.1", - "pysaml2 >= 5.0.0", + "pyop >= v3.4.0", + "pysaml2 >= 6.5.1", "pycryptodomex", "requests", "PyYAML", "gunicorn", "Werkzeug", "click", - "pystache", + "chevron", "cookies-samesite-compat", + "importlib-metadata >= 1.7.0; python_version <= '3.8'", ], extras_require={ - "ldap": ["ldap3"] + "ldap": ["ldap3"], + "pyop_mongo": ["pyop[mongo]"], + "pyop_redis": ["pyop[redis]"], + "idpy_oidc_backend": ["idpyoidc >= 2.1.0"], }, zip_safe=False, classifiers=[ "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ], entry_points={ "console_scripts": ["satosa-saml-metadata=satosa.scripts.satosa_saml_metadata:construct_saml_metadata"] diff --git a/src/satosa/__init__.py b/src/satosa/__init__.py index 52adfa0d1..eeadbe8f8 100644 --- a/src/satosa/__init__.py +++ b/src/satosa/__init__.py @@ -1,11 +1,4 @@ # -*- coding: utf-8 -*- -""" - satosa - ~~~~~~~~~~~~~~~~ +"""SATOSA: An any to any Single Sign On (SSO) proxy.""" - An any to any Single Sign On (SSO) proxy. - Has support for SAML2, OpenID Connect and some OAUth2 variants. - - :copyright: (c) 2016 by Umeå University. - :license: APACHE 2.0, see LICENSE for more details. -""" +from .version import version as __version__ # noqa: F401 diff --git a/src/satosa/attribute_mapping.py b/src/satosa/attribute_mapping.py index ebb008bc0..d5745864c 100644 --- a/src/satosa/attribute_mapping.py +++ b/src/satosa/attribute_mapping.py @@ -1,6 +1,7 @@ import logging from collections import defaultdict from itertools import chain +from typing import Mapping from mako.template import Template @@ -97,11 +98,12 @@ def to_internal(self, attribute_profile, external_dict): continue external_attribute_name = mapping[attribute_profile] - attribute_values = self._collate_attribute_values_by_priority_order(external_attribute_name, - external_dict) + attribute_values = self._collate_attribute_values_by_priority_order( + external_attribute_name, external_dict + ) if attribute_values: # Only insert key if it has some values - logline = "backend attribute {external} mapped to {internal}".format( - external=external_attribute_name, internal=internal_attribute_name + logline = "backend attribute {external} mapped to {internal} ({value})".format( + external=external_attribute_name, internal=internal_attribute_name, value=attribute_values ) logger.debug(logline) internal_dict[internal_attribute_name] = attribute_values @@ -157,6 +159,8 @@ def _get_nested_attribute_value(self, nested_key, data): d = data for key in keys: + if not isinstance(d, Mapping): + return None d = d.get(key) if d is None: return None @@ -205,8 +209,8 @@ def from_internal(self, attribute_profile, internal_dict): external_attribute_names = self.from_internal_attributes[internal_attribute_name][attribute_profile] # select the first attribute name external_attribute_name = external_attribute_names[0] - logline = "frontend attribute {external} mapped from {internal}".format( - external=external_attribute_name, internal=internal_attribute_name + logline = "frontend attribute {external} mapped from {internal} ({value})".format( + external=external_attribute_name, internal=internal_attribute_name, value=internal_dict[internal_attribute_name] ) logger.debug(logline) diff --git a/src/satosa/backends/apple.py b/src/satosa/backends/apple.py new file mode 100644 index 000000000..f7c1189ea --- /dev/null +++ b/src/satosa/backends/apple.py @@ -0,0 +1,127 @@ +""" +Apple backend module. +""" +import logging +from .openid_connect import OpenIDConnectBackend, STATE_KEY +from oic.oauth2.message import Message +from oic.oic.message import AuthorizationResponse +import satosa.logging_util as lu +from ..exception import SATOSAAuthenticationError +import json +import requests + + +logger = logging.getLogger(__name__) + + +# https://developer.okta.com/blog/2019/06/04/what-the-heck-is-sign-in-with-apple +class AppleBackend(OpenIDConnectBackend): + """Sign in with Apple backend""" + + def _get_tokens(self, authn_response, context): + """ + :param authn_response: authentication response from OP + :type authn_response: oic.oic.message.AuthorizationResponse + :return: access token and ID Token claims + :rtype: Tuple[Optional[str], Optional[Mapping[str, str]]] + """ + if "code" in authn_response: + # make token request + # https://developer.apple.com/documentation/sign_in_with_apple/generate_and_validate_tokens + args = { + "client_id": self.client.client_id, + "client_secret": self.client.client_secret, + "code": authn_response["code"], + "grant_type": "authorization_code", + "redirect_uri": self.client.registration_response["redirect_uris"][0], + } + + token_resp = requests.post( + "https://appleid.apple.com/auth/token", + data=args, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + ).json() + + logger.debug("apple response received") + logger.debug(token_resp) + + self._check_error_response(token_resp, context) + + keyjar = self.client.keyjar + id_token_claims = dict( + Message().from_jwt(token_resp["id_token"], keyjar=keyjar) + ) + + return token_resp["access_token"], id_token_claims + + return authn_response.get("access_token"), authn_response.get("id_token") + + def response_endpoint(self, context, *args): + """ + Handles the authentication response from the OP. + :type context: satosa.context.Context + :type args: Any + :rtype: satosa.response.Response + + :param context: SATOSA context + :param args: None + :return: + """ + backend_state = context.state[self.name] + + # Apple has no userinfo endpoint + # but may send some user information via POST in the first request. + # + # References: + # - https://developer.apple.com/documentation/sign_in_with_apple/sign_in_with_apple_rest_api/authenticating_users_with_sign_in_with_apple + # - https://developer.apple.com/documentation/sign_in_with_apple/namei + try: + userdata = context.request.get("user", "{}") + userinfo = json.loads(userdata) + except json.JSONDecodeError: + userinfo = {} + + authn_resp = self.client.parse_response( + AuthorizationResponse, info=context.request, sformat="dict" + ) + if backend_state[STATE_KEY] != authn_resp["state"]: + msg = "Missing or invalid state in authn response for state: {}".format( + backend_state + ) + logline = lu.LOG_FMT.format( + id=lu.get_session_id(context.state), message=msg + ) + logger.debug(logline) + raise SATOSAAuthenticationError( + context.state, "Missing or invalid state in authn response" + ) + + self._check_error_response(authn_resp, context) + access_token, id_token_claims = self._get_tokens(authn_resp, context) + if not id_token_claims: + id_token_claims = {} + + if not id_token_claims and not userinfo: + msg = "No id_token or userinfo, nothing to do.." + logline = lu.LOG_FMT.format( + id=lu.get_session_id(context.state), message=msg + ) + logger.error(logline) + raise SATOSAAuthenticationError(context.state, "No user info available.") + + all_user_claims = dict(list(userinfo.items()) + list(id_token_claims.items())) + + # convert "string or Boolean" claims to actual booleans + for bool_claim_name in ["email_verified", "is_private_email"]: + if type(all_user_claims.get(bool_claim_name)) == str: + all_user_claims[bool_claim_name] = ( + True if all_user_claims[bool_claim_name] == "true" else False + ) + + msg = "UserInfo: {}".format(all_user_claims) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + internal_resp = self._translate_response( + all_user_claims, self.client.authorization_endpoint + ) + return self.auth_callback_func(context, internal_resp) diff --git a/src/satosa/backends/github.py b/src/satosa/backends/github.py index 1da9dadbe..70944e371 100644 --- a/src/satosa/backends/github.py +++ b/src/satosa/backends/github.py @@ -99,7 +99,6 @@ def _authn_response(self, context): internal_response.attributes = self.converter.to_internal( self.external_type, user_info) internal_response.subject_id = str(user_info[self.user_id_attr]) - del context.state[self.name] return self.auth_callback_func(context, internal_response) def user_information(self, access_token): @@ -108,4 +107,4 @@ def user_information(self, access_token): r = requests.get(url, headers=headers) ret = r.json() ret['id'] = str(ret['id']) - return r.json() + return ret diff --git a/src/satosa/backends/idpy_oidc.py b/src/satosa/backends/idpy_oidc.py new file mode 100644 index 000000000..f3ea43f61 --- /dev/null +++ b/src/satosa/backends/idpy_oidc.py @@ -0,0 +1,156 @@ +""" +OIDC/OAuth2 backend module. +""" +import datetime +import logging +from urllib.parse import urlparse + +from idpyoidc.client.oauth2.stand_alone_client import StandAloneClient +from idpyoidc.server.user_authn.authn_context import UNSPECIFIED + +from satosa.backends.base import BackendModule +from satosa.internal import AuthenticationInformation +from satosa.internal import InternalData +import satosa.logging_util as lu +from ..exception import SATOSAAuthenticationError +from ..exception import SATOSAError +from ..response import Redirect + + +UTC = datetime.timezone.utc +logger = logging.getLogger(__name__) + + +class IdpyOIDCBackend(BackendModule): + """ + Backend module for OIDC and OAuth 2.0, can be directly used. + """ + + def __init__(self, auth_callback_func, internal_attributes, config, base_url, name): + """ + OIDC backend module. + :param auth_callback_func: Callback should be called by the module after the authorization + in the backend is done. + :param internal_attributes: Mapping dictionary between SATOSA internal attribute names and + the names returned by underlying IdP's/OP's as well as what attributes the calling SP's and + RP's expects namevice. + :param config: Configuration parameters for the module. + :param base_url: base url of the service + :param name: name of the plugin + + :type auth_callback_func: + (satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response + :type internal_attributes: dict[string, dict[str, str | list[str]]] + :type config: dict[str, dict[str, str] | list[str]] + :type base_url: str + :type name: str + """ + super().__init__(auth_callback_func, internal_attributes, base_url, name) + # self.auth_callback_func = auth_callback_func + # self.config = config + self.client = StandAloneClient(config=config["client"], client_type="oidc") + self.client.do_provider_info() + self.client.do_client_registration() + + _redirect_uris = self.client.context.claims.get_usage('redirect_uris') + if not _redirect_uris: + raise SATOSAError("Missing path in redirect uri") + self.redirect_path = urlparse(_redirect_uris[0]).path + + def start_auth(self, context, internal_request): + """ + See super class method satosa.backends.base#start_auth + + :type context: satosa.context.Context + :type internal_request: satosa.internal.InternalData + :rtype satosa.response.Redirect + """ + login_url = self.client.init_authorization() + return Redirect(login_url) + + def register_endpoints(self): + """ + Creates a list of all the endpoints this backend module needs to listen to. In this case + it's the authentication response from the underlying OP that is redirected from the OP to + the proxy. + :rtype: Sequence[(str, Callable[[satosa.context.Context], satosa.response.Response]] + :return: A list that can be used to map the request to SATOSA to this endpoint. + """ + url_map = [] + url_map.append((f"^{self.redirect_path.lstrip('/')}$", self.response_endpoint)) + return url_map + + def response_endpoint(self, context, *args): + """ + Handles the authentication response from the OP. + :type context: satosa.context.Context + :type args: Any + :rtype: satosa.response.Response + + :param context: SATOSA context + :param args: None + :return: + """ + + _info = self.client.finalize(context.request) + self._check_error_response(_info, context) + userinfo = _info.get('userinfo') + id_token = _info.get('id_token') + + if not id_token and not userinfo: + msg = "No id_token or userinfo, nothing to do.." + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + raise SATOSAAuthenticationError(context.state, "No user info available.") + + all_user_claims = dict(list(userinfo.items()) + list(id_token.items())) + msg = "UserInfo: {}".format(all_user_claims) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + internal_resp = self._translate_response(all_user_claims, _info["issuer"]) + return self.auth_callback_func(context, internal_resp) + + def _translate_response(self, response, issuer): + """ + Translates oidc response to SATOSA internal response. + :type response: dict[str, str] + :type issuer: str + :type subject_type: str + :rtype: InternalData + + :param response: Dictioary with attribute name as key. + :param issuer: The oidc op that gave the repsonse. + :param subject_type: public or pairwise according to oidc standard. + :return: A SATOSA internal response. + """ + timestamp_epoch = ( + response.get("auth_time") + or response.get("iat") + or int(datetime.datetime.now(UTC).timestamp()) + ) + timestamp_dt = datetime.datetime.fromtimestamp(timestamp_epoch, UTC) + timestamp_iso = timestamp_dt.isoformat().replace("+00:00", "Z") + auth_class_ref = response.get("acr") or response.get("amr") or UNSPECIFIED + auth_info = AuthenticationInformation(auth_class_ref, timestamp_iso, issuer) + + internal_resp = InternalData(auth_info=auth_info) + internal_resp.attributes = self.converter.to_internal("openid", response) + internal_resp.subject_id = response["sub"] + return internal_resp + + def _check_error_response(self, response, context): + """ + Check if the response is an error response. + :param response: the response from finalize() + :type response: oic.oic.message + :raise SATOSAAuthenticationError: if the response is an OAuth error response + """ + if "error" in response: + msg = "{name} error: {error} {description}".format( + name=type(response).__name__, + error=response["error"], + description=response.get("error_description", ""), + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + raise SATOSAAuthenticationError(context.state, "Access denied") diff --git a/src/satosa/backends/linkedin.py b/src/satosa/backends/linkedin.py index 06a5cbac8..8d3a85b4c 100644 --- a/src/satosa/backends/linkedin.py +++ b/src/satosa/backends/linkedin.py @@ -110,7 +110,6 @@ def _authn_response(self, context): self.external_type, user_info) internal_response.subject_id = user_info[self.user_id_attr] - del context.state[self.name] return self.auth_callback_func(context, internal_response) def user_information(self, access_token, api): diff --git a/src/satosa/backends/oauth.py b/src/satosa/backends/oauth.py index 2308f1eee..3e2bd041b 100644 --- a/src/satosa/backends/oauth.py +++ b/src/satosa/backends/oauth.py @@ -145,7 +145,6 @@ def _authn_response(self, context): internal_response = InternalData(auth_info=self.auth_info(context.request)) internal_response.attributes = self.converter.to_internal(self.external_type, user_info) internal_response.subject_id = user_info[self.user_id_attr] - del context.state[self.name] return self.auth_callback_func(context, internal_response) def auth_info(self, request): @@ -259,7 +258,7 @@ def user_information(self, access_token): try: picture_url = data["picture"]["data"]["url"] data["picture"] = picture_url - except KeyError as e: + except KeyError: pass return data @@ -319,6 +318,14 @@ def get_metadata_desc_for_oauth_backend(entity_id, config): ui_description.add_display_name(name[0], name[1]) for logo in ui_info.get("logo", []): ui_description.add_logo(logo["image"], logo["width"], logo["height"], logo["lang"]) + for keywords in ui_info.get("keywords", []): + ui_description.add_keywords(keywords.get("text", []), keywords.get("lang")) + for information_url in ui_info.get("information_url", []): + ui_description.add_information_url(information_url.get("text"), information_url.get("lang")) + for privacy_statement_url in ui_info.get("privacy_statement_url", []): + ui_description.add_information_url( + privacy_statement_url.get("text"), privacy_statement_url.get("lang") + ) description.ui_info = ui_description diff --git a/src/satosa/backends/openid_connect.py b/src/satosa/backends/openid_connect.py index 87772f565..58d47af9b 100644 --- a/src/satosa/backends/openid_connect.py +++ b/src/satosa/backends/openid_connect.py @@ -12,13 +12,16 @@ from oic.oic.message import RegistrationRequest from oic.utils.authn.authn_context import UNSPECIFIED from oic.utils.authn.client import CLIENT_AUTHN_METHOD +from oic.utils.settings import PyoidcSettings import satosa.logging_util as lu from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from .base import BackendModule from .oauth import get_metadata_desc_for_oauth_backend -from ..exception import SATOSAAuthenticationError, SATOSAError +from ..exception import SATOSAAuthenticationError +from ..exception import SATOSAError +from ..exception import SATOSAMissingStateError from ..response import Redirect @@ -55,11 +58,26 @@ def __init__(self, auth_callback_func, internal_attributes, config, base_url, na super().__init__(auth_callback_func, internal_attributes, base_url, name) self.auth_callback_func = auth_callback_func self.config = config - self.client = _create_client( - config["provider_metadata"], - config["client"]["client_metadata"], - config["client"].get("verify_ssl", True), - ) + cfg_verify_ssl = config["client"].get("verify_ssl", True) + oidc_settings = PyoidcSettings(verify_ssl=cfg_verify_ssl) + + try: + self.client = _create_client( + provider_metadata=config["provider_metadata"], + client_metadata=config["client"]["client_metadata"], + settings=oidc_settings, + ) + except Exception as exc: + msg = { + "message": f"Failed to initialize client", + "error": str(exc), + "client_metadata": self.config['client']['client_metadata'], + "provider_metadata": self.config['provider_metadata'], + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + raise SATOSAAuthenticationError(context.state, msg) from exc + if "scope" not in config["client"]["auth_req_params"]: config["auth_req_params"]["scope"] = "openid" if "response_type" not in config["client"]["auth_req_params"]: @@ -182,6 +200,22 @@ def response_endpoint(self, context, *args): :param args: None :return: """ + + if self.name not in context.state: + """ + If we end up here, it means that the user returns to the proxy + without the SATOSA session cookie. This can happen at least in the + following cases: + - the user deleted the cookie from the browser + - the browser of the user blocked the cookie + - the user has completed an authentication flow, the cookie has + been removed by SATOSA and then the user used the back button + of their browser and resend the authentication response, but + without the SATOSA session cookie + """ + error = "Received AuthN response without a SATOSA session cookie" + raise SATOSAMissingStateError(error) + backend_state = context.state[self.name] authn_resp = self.client.parse_response(AuthorizationResponse, info=context.request, sformat="dict") if backend_state[STATE_KEY] != authn_resp["state"]: @@ -212,7 +246,6 @@ def response_endpoint(self, context, *args): msg = "UserInfo: {}".format(all_user_claims) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - del context.state[self.name] internal_resp = self._translate_response(all_user_claims, self.client.authorization_endpoint) return self.auth_callback_func(context, internal_resp) @@ -243,7 +276,7 @@ def get_metadata_desc(self): return get_metadata_desc_for_oauth_backend(self.config["provider_metadata"]["issuer"], self.config) -def _create_client(provider_metadata, client_metadata, verify_ssl=True): +def _create_client(provider_metadata, client_metadata, settings=None): """ Create a pyoidc client instance. :param provider_metadata: provider configuration information @@ -254,7 +287,7 @@ def _create_client(provider_metadata, client_metadata, verify_ssl=True): :rtype: oic.oic.Client """ client = oic.Client( - client_authn_method=CLIENT_AUTHN_METHOD, verify_ssl=verify_ssl + client_authn_method=CLIENT_AUTHN_METHOD, settings=settings ) # Provider configuration information diff --git a/src/satosa/backends/orcid.py b/src/satosa/backends/orcid.py index aaa18b7e5..649e72451 100644 --- a/src/satosa/backends/orcid.py +++ b/src/satosa/backends/orcid.py @@ -73,16 +73,15 @@ def _authn_response(self, context): request_args=rargs, state=aresp['state']) user_info = self.user_information( - atresp['access_token'], atresp['orcid'], atresp['name']) + atresp['access_token'], atresp['orcid'], atresp.get('name')) internal_response = InternalData( auth_info=self.auth_info(context.request)) internal_response.attributes = self.converter.to_internal( self.external_type, user_info) internal_response.subject_id = user_info[self.user_id_attr] - del context.state[self.name] return self.auth_callback_func(context, internal_response) - def user_information(self, access_token, orcid, name): + def user_information(self, access_token, orcid, name=None): base_url = self.config['server_info']['user_info'] url = urljoin(base_url, '{}/person'.format(orcid)) headers = { @@ -92,13 +91,15 @@ def user_information(self, access_token, orcid, name): r = requests.get(url, headers=headers) r = r.json() emails, addresses = r['emails']['email'], r['addresses']['address'] + rname = r.get('name') or {} ret = dict( address=', '.join([e['country']['value'] for e in addresses]), displayname=name, edupersontargetedid=orcid, orcid=orcid, mail=' '.join([e['email'] for e in emails]), name=name, - givenname=r['name']['given-names']['value'], - surname=r['name']['family-name']['value'], + givenname=(rname.get('given-names') or {}).get('value'), + surname=(rname.get('family-name') or {}).get('value'), ) + return ret diff --git a/src/satosa/backends/reflector.py b/src/satosa/backends/reflector.py new file mode 100644 index 000000000..6a9055485 --- /dev/null +++ b/src/satosa/backends/reflector.py @@ -0,0 +1,85 @@ +""" +A reflector backend module for the satosa proxy +""" +import base64 +from datetime import datetime + +from satosa.internal import AuthenticationInformation +from satosa.internal import InternalData +from satosa.metadata_creation.description import MetadataDescription +from satosa.backends.base import BackendModule + + +class ReflectorBackend(BackendModule): + """ + A reflector backend module + """ + + ENTITY_ID = ORG_NAME = AUTH_CLASS_REF = SUBJECT_ID = "reflector" + + def __init__(self, outgoing, internal_attributes, config, base_url, name): + """ + :type outgoing: + (satosa.context.Context, satosa.internal.InternalData) -> satosa.response.Response + :type internal_attributes: dict[str, dict[str, list[str] | str]] + :type config: dict[str, Any] + :type base_url: str + :type name: str + + :param outgoing: Callback should be called by the module after + the authorization in the backend is done. + :param internal_attributes: Internal attribute map + :param config: The module config + :param base_url: base url of the service + :param name: name of the plugin + """ + super().__init__(outgoing, internal_attributes, base_url, name) + + def start_auth(self, context, internal_req): + """ + See super class method satosa.backends.base.BackendModule#start_auth + + :type context: satosa.context.Context + :type internal_req: satosa.internal.InternalData + :rtype: satosa.response.Response + """ + + timestamp = datetime.utcnow().timestamp() + auth_info = AuthenticationInformation( + auth_class_ref=ReflectorBackend.AUTH_CLASS_REF, + timestamp=timestamp, + issuer=ReflectorBackend.ENTITY_ID, + ) + + internal_resp = InternalData( + auth_info=auth_info, + attributes={}, + subject_type=None, + subject_id=ReflectorBackend.SUBJECT_ID, + ) + + return self.auth_callback_func(context, internal_resp) + + def register_endpoints(self): + """ + See super class method satosa.backends.base.BackendModule#register_endpoints + :rtype list[(str, ((satosa.context.Context, Any) -> Any, Any))] + """ + url_map = [] + return url_map + + def get_metadata_desc(self): + """ + See super class satosa.backends.backend_base.BackendModule#get_metadata_desc + :rtype: satosa.metadata_creation.description.MetadataDescription + """ + entity_descriptions = [] + description = MetadataDescription( + base64.urlsafe_b64encode(ReflectorBackend.ENTITY_ID.encode("utf-8")).decode( + "utf-8" + ) + ) + description.organization = ReflectorBackend.ORG_NAME + + entity_descriptions.append(description) + return entity_descriptions diff --git a/src/satosa/backends/saml2.py b/src/satosa/backends/saml2.py index 024e948d8..8be4572d4 100644 --- a/src/satosa/backends/saml2.py +++ b/src/satosa/backends/saml2.py @@ -10,27 +10,31 @@ from urllib.parse import urlparse from saml2 import BINDING_HTTP_REDIRECT -from saml2.client_base import Base +from saml2.client import Saml2Client from saml2.config import SPConfig from saml2.extension.mdui import NAMESPACE as UI_NAMESPACE from saml2.metadata import create_metadata_string from saml2.authn_context import requested_authn_context +from saml2.samlp import RequesterID +from saml2.samlp import Scoping import satosa.logging_util as lu import satosa.util as util from satosa.base import SAMLBaseModule from satosa.base import SAMLEIDASBaseModule +from satosa.base import STATE_KEY as STATE_KEY_BASE from satosa.context import Context from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.exception import SATOSAAuthenticationError +from satosa.exception import SATOSAMissingStateError +from satosa.exception import SATOSAAuthenticationFlowError from satosa.response import SeeOther, Response from satosa.saml_util import make_saml_response from satosa.metadata_creation.description import ( MetadataDescription, OrganizationDesc, ContactPersonDesc, UIInfoDesc ) from satosa.backends.base import BackendModule -from satosa.deprecated import SAMLInternalResponse logger = logging.getLogger(__name__) @@ -80,7 +84,9 @@ class SAMLBackend(BackendModule, SAMLBaseModule): KEY_SAML_DISCOVERY_SERVICE_URL = 'saml_discovery_service_url' KEY_SAML_DISCOVERY_SERVICE_POLICY = 'saml_discovery_service_policy' KEY_SP_CONFIG = 'sp_config' + KEY_SEND_REQUESTER_ID = 'send_requester_id' KEY_MIRROR_FORCE_AUTHN = 'mirror_force_authn' + KEY_IS_PASSIVE = 'is_passive' KEY_MEMORIZE_IDP = 'memorize_idp' KEY_USE_MEMORIZED_IDP_WHEN_FORCE_AUTHN = 'use_memorized_idp_when_force_authn' @@ -105,29 +111,38 @@ def __init__(self, outgoing, internal_attributes, config, base_url, name): super().__init__(outgoing, internal_attributes, base_url, name) self.config = self.init_config(config) - sp_config = SPConfig().load(copy.deepcopy( - config[SAMLBackend.KEY_SP_CONFIG]), False - ) - self.sp = Base(sp_config) - self.discosrv = config.get(SAMLBackend.KEY_DISCO_SRV) self.encryption_keys = [] self.outstanding_queries = {} self.idp_blacklist_file = config.get('idp_blacklist_file', None) - sp_keypairs = sp_config.getattr('encryption_keypairs', '') - sp_key_file = sp_config.getattr('key_file', '') - if sp_keypairs: - key_file_paths = [pair['key_file'] for pair in sp_keypairs] - elif sp_key_file: - key_file_paths = [sp_key_file] - else: - key_file_paths = [] + sp_config = SPConfig().load(copy.deepcopy(config[SAMLBackend.KEY_SP_CONFIG])) + + # if encryption_keypairs is defined, use those keys for decryption + # else, if key_file and cert_file are defined, use them for decryption + # otherwise, do not use any decryption key. + # ensure the choice is reflected back in the configuration. + sp_conf_encryption_keypairs = sp_config.getattr('encryption_keypairs', '') + sp_conf_key_file = sp_config.getattr('key_file', '') + sp_conf_cert_file = sp_config.getattr('cert_file', '') + sp_keypairs = ( + sp_conf_encryption_keypairs + if sp_conf_encryption_keypairs + else [{'key_file': sp_conf_key_file, 'cert_file': sp_conf_cert_file}] + if sp_conf_key_file and sp_conf_cert_file + else [] + ) + sp_config.setattr('', 'encryption_keypairs', sp_keypairs) + # load the encryption keys + key_file_paths = [pair['key_file'] for pair in sp_keypairs] for p in key_file_paths: with open(p) as key_file: self.encryption_keys.append(key_file.read()) + # finally, initialize the client object + self.sp = Saml2Client(sp_config) + def get_idp_entity_id(self, context): """ :type context: satosa.context.Context @@ -211,13 +226,21 @@ def disco_query(self, context): loc = self.sp.create_discovery_service_request( disco_url, self.sp.config.entityid, **args ) - return SeeOther(loc) - def construct_requested_authn_context(self, entity_id): - if not self.acr_mapping: - return None + msg = { + "message": "Sending user to the discovery service", + "disco_url": loc + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) + + return SeeOther(loc) - acr_entry = util.get_dict_defaults(self.acr_mapping, entity_id) + def construct_requested_authn_context(self, entity_id, *, target_accr=None): + acr_entry = ( + target_accr + or util.get_dict_defaults(self.acr_mapping or {}, entity_id) + ) if not acr_entry: return None @@ -229,7 +252,9 @@ def construct_requested_authn_context(self, entity_id): authn_context = requested_authn_context( acr_entry['class_ref'], comparison=acr_entry.get( - 'comparison', self.VALUE_ACR_COMPARISON_DEFAULT)) + 'comparison', self.VALUE_ACR_COMPARISON_DEFAULT + ) + ) return authn_context @@ -253,53 +278,113 @@ def authn_request(self, context, entity_id): with open(self.idp_blacklist_file) as blacklist_file: blacklist_array = json.load(blacklist_file)['blacklist'] if entity_id in blacklist_array: - msg = "IdP with EntityID {} is blacklisted".format(entity_id) + msg = { + "message": "AuthnRequest Failed", + "error": f"Selected IdP with EntityID {entity_id} is blacklisted for this backend", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline, exc_info=False) - raise SATOSAAuthenticationError(context.state, "Selected IdP is blacklisted for this backend") + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) kwargs = {} - authn_context = self.construct_requested_authn_context(entity_id) + target_accr = context.state.get(Context.KEY_TARGET_AUTHN_CONTEXT_CLASS_REF) + authn_context = self.construct_requested_authn_context(entity_id, target_accr=target_accr) if authn_context: kwargs["requested_authn_context"] = authn_context if self.config.get(SAMLBackend.KEY_MIRROR_FORCE_AUTHN): kwargs["force_authn"] = get_force_authn( context, self.config, self.sp.config ) + if self.config.get(SAMLBackend.KEY_SEND_REQUESTER_ID): + requester = context.state.state_dict[STATE_KEY_BASE]['requester'] + kwargs["scoping"] = Scoping(requester_id=[RequesterID(text=requester)]) + if self.config.get(SAMLBackend.KEY_IS_PASSIVE): + kwargs["is_passive"] = "true" try: - binding, destination = self.sp.pick_binding( - "single_sign_on_service", None, "idpsso", entity_id=entity_id - ) - msg = "binding: {}, destination: {}".format(binding, destination) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - - acs_endp, response_binding = self.sp.config.getattr("endpoints", "sp")["assertion_consumer_service"][0] - req_id, req = self.sp.create_authn_request( - destination, binding=response_binding, **kwargs - ) + acs_endp, response_binding = self._get_acs(context) relay_state = util.rndstr() - ht_args = self.sp.apply_binding(binding, "%s" % req, destination, relay_state=relay_state) - msg = "ht_args: {}".format(ht_args) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - except Exception as exc: - msg = "Failed to construct the AuthnRequest for state" + req_id, binding, http_info = self.sp.prepare_for_negotiated_authenticate( + entityid=entity_id, + assertion_consumer_service_url=acs_endp, + response_binding=response_binding, + relay_state=relay_state, + **kwargs, + ) + except Exception as e: + msg = { + "message": "AuthnRequest Failed", + "error": f"Failed to construct the AuthnRequest for state: {e}", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline, exc_info=True) - raise SATOSAAuthenticationError(context.state, "Failed to construct the AuthnRequest") from exc + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) from e if self.sp.config.getattr('allow_unsolicited', 'sp') is False: if req_id in self.outstanding_queries: - msg = "Request with duplicate id {}".format(req_id) + msg = { + "message": "AuthnRequest Failed", + "error": f"Request with duplicate id {req_id}", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + logger.info(logline) raise SATOSAAuthenticationError(context.state, msg) - self.outstanding_queries[req_id] = req + self.outstanding_queries[req_id] = req_id context.state[self.name] = {"relay_state": relay_state} - return make_saml_response(binding, ht_args) + return make_saml_response(binding, http_info) + + def _get_acs(self, context): + """ + Select the AssertionConsumerServiceURL and binding. + + :param context: The current context + :type context: satosa.context.Context + :return: Selected ACS URL and binding + :rtype: tuple(str, str) + """ + acs_strategy = self.config.get("acs_selection_strategy", "use_first_acs") + if acs_strategy == "use_first_acs": + acs_strategy_fn = self._use_first_acs + elif acs_strategy == "prefer_matching_host": + acs_strategy_fn = self._prefer_matching_host + else: + msg = "Invalid value for '{}' ({}). Using the first ACS instead".format( + "acs_selection_strategy", acs_strategy + ) + logger.error(msg) + acs_strategy_fn = self._use_first_acs + return acs_strategy_fn(context) + + def _use_first_acs(self, context): + return self.sp.config.getattr("endpoints", "sp")["assertion_consumer_service"][ + 0 + ] + + def _prefer_matching_host(self, context): + acs_config = self.sp.config.getattr("endpoints", "sp")[ + "assertion_consumer_service" + ] + try: + hostname = context.http_headers["HTTP_HOST"] + for acs, binding in acs_config: + parsed_acs = urlparse(acs) + if hostname == parsed_acs.netloc: + msg = "Selected ACS '{}' based on the request".format(acs) + logline = lu.LOG_FMT.format( + id=lu.get_session_id(context.state), message=msg + ) + logger.debug(logline) + return acs, binding + except (TypeError, KeyError): + pass + + msg = "Can't find an ACS URL to this hostname ({}), selecting the first one".format( + context.http_headers.get("HTTP_HOST", "") if context.http_headers else "" + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + return self._use_first_acs(context) def authn_response(self, context, binding): """ @@ -312,43 +397,77 @@ def authn_response(self, context, binding): :param binding: The saml binding type :return: response """ - if not context.request["SAMLResponse"]: - msg = "Missing Response for state" + + if self.name not in context.state: + """ + If we end up here, it means that the user returns to the proxy + without the SATOSA session cookie. This can happen at least in the + following cases: + - the user deleted the cookie from the browser + - the browser of the user blocked the cookie + - the user has completed an authentication flow, the cookie has + been removed by SATOSA and then the user used the back button + of their browser and resend the authentication response, but + without the SATOSA session cookie + """ + msg = { + "message": "Authentication failed", + "error": "Received AuthN response without a SATOSA session cookie", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - raise SATOSAAuthenticationError(context.state, "Missing Response") + logger.info(logline) + raise SATOSAMissingStateError(msg) + + samlresponse = context.request.get("SAMLResponse") + if not samlresponse: + msg = { + "message": "Authentication failed", + "error": "SAML Response not found in context.request", + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) try: authn_response = self.sp.parse_authn_request_response( - context.request["SAMLResponse"], - binding, outstanding=self.outstanding_queries) - except Exception as err: - msg = "Failed to parse authn request for state" + samlresponse, binding, outstanding=self.outstanding_queries + ) + except Exception as e: + msg = { + "message": "Authentication failed", + "error": f"Failed to parse Authn response: {e}", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline, exc_info=True) - raise SATOSAAuthenticationError(context.state, "Failed to parse authn request") from err + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) from e if self.sp.config.getattr('allow_unsolicited', 'sp') is False: req_id = authn_response.in_response_to if req_id not in self.outstanding_queries: - msg = "No request with id: {}".format(req_id), + msg = { + "message": "Authentication failed", + "error": f"No corresponding request with id: {req_id}", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + logger.info(logline) raise SATOSAAuthenticationError(context.state, msg) del self.outstanding_queries[req_id] # check if the relay_state matches the cookie state - if context.state[self.name]["relay_state"] != context.request["RelayState"]: - msg = "State did not match relay state for state" + if context.state[self.name].get("relay_state") != context.request["RelayState"]: + msg = { + "message": "Authentication failed", + "error": "Response state query param did not match relay state for request", + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) - raise SATOSAAuthenticationError(context.state, "State did not match relay state") + logger.info(logline) + raise SATOSAAuthenticationError(context.state, msg) - context.decorate(Context.KEY_BACKEND_METADATA_STORE, self.sp.metadata) + context.decorate(Context.KEY_METADATA_STORE, self.sp.metadata) if self.config.get(SAMLBackend.KEY_MEMORIZE_IDP): issuer = authn_response.response.issuer.text.strip() context.state[Context.KEY_MEMORIZED_IDP] = issuer - context.state.pop(self.name, None) context.state.pop(Context.KEY_FORCE_AUTHN, None) return self.auth_callback_func(context, self._translate_response(authn_response, context.state)) @@ -365,13 +484,18 @@ def disco_response(self, context): info = context.request state = context.state - try: - entity_id = info["entityID"] - except KeyError as err: - msg = "No IDP chosen for state" - logline = lu.LOG_FMT.format(id=lu.get_session_id(state), message=msg) - logger.debug(logline, exc_info=True) - raise SATOSAAuthenticationError(state, "No IDP chosen") from err + if 'SATOSA_BASE' not in state: + raise SATOSAAuthenticationFlowError("Discovery response without AuthN request") + + entity_id = info.get("entityID") + msg = { + "message": "Received response from the discovery service", + "entity_id": entity_id, + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) + if not entity_id: + raise SATOSAAuthenticationError(state, msg) from err return self.authn_request(context, entity_id) @@ -388,15 +512,22 @@ def _translate_response(self, response, state): # The response may have been encrypted by the IdP so if we have an # encryption key, try it. if self.encryption_keys: - response.parse_assertion(self.encryption_keys) + response.parse_assertion(keys=self.encryption_keys) - authn_info = response.authn_info()[0] - auth_class_ref = authn_info[0] - timestamp = response.assertion.authn_statement[0].authn_instant issuer = response.response.issuer.text - + authn_context_ref, authenticating_authorities, authn_instant = next( + iter(response.authn_info()), [None, None, None] + ) + authenticating_authority = ( + authenticating_authorities[-1] + if authenticating_authorities + else None + ) auth_info = AuthenticationInformation( - auth_class_ref, timestamp, issuer, + auth_class_ref=authn_context_ref, + timestamp=authn_instant, + authority=authenticating_authority, + issuer=issuer, ) # The SAML response may not include a NameID. @@ -415,11 +546,20 @@ def _translate_response(self, response, state): subject_id=name_id, ) - msg = "backend received attributes:\n{}".format( - json.dumps(response.ava, indent=4) - ) + msg = "backend received attributes: {}".format(response.ava) logline = lu.LOG_FMT.format(id=lu.get_session_id(state), message=msg) logger.debug(logline) + + msg = { + "message": "Attributes received by the backend", + "issuer": issuer, + "attributes": " ".join(list(response.ava.keys())) + } + if name_id_format: + msg['name_id'] = name_id_format + logline = lu.LOG_FMT.format(id=lu.get_session_id(state), message=msg) + logger.info(logline) + return internal_resp def _metadata_endpoint(self, context): @@ -431,12 +571,13 @@ def _metadata_endpoint(self, context): :param context: The current context :return: response with metadata """ - msg = "Sending metadata response" + msg = "Sending metadata response for entityId = {}".format(self.sp.config.entityid) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - metadata_string = create_metadata_string(None, self.sp.config, 4, None, None, None, None, - None).decode("utf-8") + metadata_string = create_metadata_string( + configfile=None, config=self.sp.config, valid=4 + ).decode("utf-8") return Response(metadata_string, content="text/xml") def register_endpoints(self): @@ -471,12 +612,29 @@ def register_endpoints(self): ("^%s$" % parsed_endp.path[1:], self.disco_response)) if self.expose_entityid_endpoint(): + logger.debug("Exposing backend entity endpoint = {}".format(self.sp.config.entityid)) parsed_entity_id = urlparse(self.sp.config.entityid) url_map.append(("^{0}".format(parsed_entity_id.path[1:]), self._metadata_endpoint)) + if self.enable_metadata_reload(): + url_map.append( + ("^%s/%s$" % (self.name, "reload-metadata"), self._reload_metadata)) + return url_map + def _reload_metadata(self, context): + """ + Reload SAML metadata + """ + logger.debug("Reloading metadata") + res = self.sp.reload_metadata( + copy.deepcopy(self.config[SAMLBackend.KEY_SP_CONFIG]['metadata']) + ) + message = "Metadata reload %s" % ("OK" if res else "failed") + status = "200 OK" if res else "500 FAILED" + return Response(message=message, status=status) + def get_metadata_desc(self): """ See super class satosa.backends.backend_base.BackendModule#get_metadata_desc @@ -532,6 +690,14 @@ def get_metadata_desc(self): ui_info_desc.add_display_name(name["text"], name["lang"]) for logo in ui_info.get("logo", []): ui_info_desc.add_logo(logo["text"], logo["width"], logo["height"], logo.get("lang")) + for keywords in ui_info.get("keywords", []): + ui_info_desc.add_keywords(keywords.get("text", []), keywords.get("lang")) + for information_url in ui_info.get("information_url", []): + ui_info_desc.add_information_url(information_url.get("text"), information_url.get("lang")) + for privacy_statement_url in ui_info.get("privacy_statement_url", []): + ui_info_desc.add_privacy_statement_url( + privacy_statement_url.get("text"), privacy_statement_url.get("lang") + ) description.ui_info = ui_info_desc entity_descriptions.append(description) diff --git a/src/satosa/base.py b/src/satosa/base.py index ae041ab0e..40af19979 100644 --- a/src/satosa/base.py +++ b/src/satosa/base.py @@ -4,27 +4,34 @@ import json import logging import uuid -import warnings as _warnings from saml2.s_utils import UnknownSystemEntity from satosa import util -from satosa.micro_services import consent - +from satosa.response import BadRequest +from satosa.response import NotFound +from satosa.response import Redirect from .context import Context -from .exception import SATOSAConfigurationError -from .exception import SATOSAError, SATOSAAuthenticationError, SATOSAUnknownError -from .micro_services.account_linking import AccountLinking -from .micro_services.consent import Consent -from .plugin_loader import load_backends, load_frontends -from .plugin_loader import load_request_microservices, load_response_microservices -from .routing import ModuleRouter, SATOSANoBoundEndpointError -from .state import cookie_to_state, SATOSAStateError, State, state_to_cookie - -from satosa.deprecated import hash_attributes +from .exception import SATOSAAuthenticationError +from .exception import SATOSAAuthenticationFlowError +from .exception import SATOSABadRequestError +from .exception import SATOSAError +from .exception import SATOSAMissingStateError +from .exception import SATOSANoBoundEndpointError +from .exception import SATOSAUnknownError +from .exception import SATOSAStateError +from .plugin_loader import load_backends +from .plugin_loader import load_frontends +from .plugin_loader import load_request_microservices +from .plugin_loader import load_response_microservices +from .routing import ModuleRouter +from .state import State +from .state import cookie_to_state +from .state import state_to_cookie import satosa.logging_util as lu + logger = logging.getLogger(__name__) STATE_KEY = "SATOSA_BASE" @@ -45,22 +52,6 @@ def __init__(self, config): """ self.config = config - for option in ["USER_ID_HASH_SALT"]: - if option in self.config: - msg = ( - "'{opt}' configuration option is deprecated." - " Use the hasher microservice instead." - ).format(opt=option) - _warnings.warn(msg, DeprecationWarning) - - for option in ["hash"]: - if option in self.config["INTERNAL_ATTRIBUTES"]: - msg = ( - "'{opt}' configuration option is deprecated." - " Use the hasher microservice instead." - ).format(opt=option) - _warnings.warn(msg, DeprecationWarning) - logger.info("Loading backend modules...") backends = load_backends(self.config, self._auth_resp_callback_func, self.config["INTERNAL_ATTRIBUTES"]) @@ -84,7 +75,6 @@ def __init__(self, config): self.config["MICRO_SERVICES"], self.config["INTERNAL_ATTRIBUTES"], self.config["BASE"])) - self._verify_response_micro_services(self.response_micro_services) self._link_micro_services(self.response_micro_services, self._auth_resp_finish) self.module_router = ModuleRouter(frontends, backends, @@ -99,17 +89,6 @@ def _link_micro_services(self, micro_services, finisher): micro_services[-1].next = finisher - def _verify_response_micro_services(self, response_micro_services): - account_linking_index = next((i for i in range(len(response_micro_services)) - if isinstance(response_micro_services[i], AccountLinking)), -1) - if account_linking_index > 0: - raise SATOSAConfigurationError("Account linking must be configured first in the list of micro services") - - consent_index = next((i for i in range(len(response_micro_services)) - if isinstance(response_micro_services[i], Consent)), -1) - if consent_index != -1 and consent_index < len(response_micro_services) - 1: - raise SATOSAConfigurationError("Consent must be configured last in the list of micro services") - def _auth_req_callback_func(self, context, internal_request): """ This function is called by a frontend module when an authorization request has been @@ -126,16 +105,7 @@ def _auth_req_callback_func(self, context, internal_request): """ state = context.state state[STATE_KEY] = {"requester": internal_request.requester} - # TODO consent module should manage any state it needs by itself - try: - state_dict = context.state[consent.STATE_KEY] - except KeyError: - state_dict = context.state[consent.STATE_KEY] = {} - finally: - state_dict.update({ - "filter": internal_request.attributes or [], - "requester_name": internal_request.requester_name, - }) + msg = "Requesting provider: {}".format(internal_request.requester) logline = lu.LOG_FMT.format(id=lu.get_session_id(state), message=msg) logger.info(logline) @@ -155,12 +125,6 @@ def _auth_resp_finish(self, context, internal_response): if user_id_to_attr: internal_response.attributes[user_id_to_attr] = [internal_response.subject_id] - hash_attributes( - self.config["INTERNAL_ATTRIBUTES"].get("hash", []), - internal_response.attributes, - self.config.get("USER_ID_HASH_SALT", ""), - ) - # remove all session state unless CONTEXT_STATE_DELETE is False context.state.delete = self.config.get("CONTEXT_STATE_DELETE", True) context.request = None @@ -234,7 +198,7 @@ def _run_bound_endpoint(self, context, spec): err_id=error.error_id, state=state ) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline, error.state, exc_info=True) + logger.error(logline, exc_info=True) return self._handle_satosa_authentication_error(error) def _load_state(self, context): @@ -250,13 +214,13 @@ def _load_state(self, context): self.config["COOKIE_STATE_NAME"], self.config["STATE_ENCRYPTION_KEY"], ) - except SATOSAStateError as e: + except SATOSAStateError: state = State() finally: context.state = state - msg = "Loaded state {state} from cookie {cookie}".format(state=state, cookie=context.cookie) + msg = f"Loaded state {state} from cookie {context.cookie}" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.info(logline) + logger.debug(logline) def _save_state(self, resp, context): """ @@ -269,8 +233,23 @@ def _save_state(self, resp, context): :param context: Session context """ - cookie = state_to_cookie(context.state, self.config["COOKIE_STATE_NAME"], "/", - self.config["STATE_ENCRYPTION_KEY"]) + cookie_name = self.config["COOKIE_STATE_NAME"] + cookie = state_to_cookie( + context.state, + name=cookie_name, + path="/", + encryption_key=self.config["STATE_ENCRYPTION_KEY"], + secure=self.config.get("COOKIE_SECURE"), + httponly=self.config.get("COOKIE_HTTPONLY"), + samesite=self.config.get("COOKIE_SAMESITE"), + max_age=self.config.get("COOKIE_MAX_AGE"), + ) + resp.headers = [ + (name, value) + for (name, value) in resp.headers + if name != "Set-Cookie" + or not value.startswith(f"{cookie_name}=") + ] resp.headers.append(tuple(cookie.output().split(": ", 1))) def run(self, context): @@ -288,28 +267,110 @@ def run(self, context): spec = self.module_router.endpoint_routing(context) resp = self._run_bound_endpoint(context, spec) self._save_state(resp, context) - except SATOSANoBoundEndpointError: + except SATOSABadRequestError as e: + error_id = uuid.uuid4().urn + msg = { + "message": "Bad Request", + "error": str(e), + "error_id": error_id, + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" + return Redirect(generic_error_url) + return BadRequest(e.error) + except SATOSAMissingStateError as e: + error_id = uuid.uuid4().urn + msg = { + "message": "Missing SATOSA State", + "error": str(e), + "error_id": error_id, + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" + return Redirect(generic_error_url) raise - except SATOSAError: - msg = "Uncaught SATOSA error" + except SATOSAAuthenticationFlowError as e: + error_id = uuid.uuid4().urn + msg = { + "message": "SATOSA Authentication Flow Error", + "error": str(e), + "error_id": error_id, + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline, exc_info=True) + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" + return Redirect(generic_error_url) raise - except UnknownSystemEntity as err: - msg = "configuration error: unknown system entity " + str(err) + except SATOSANoBoundEndpointError as e: + error_id = uuid.uuid4().urn + msg = { + "message": "URL-path is not bound to any endpoint function", + "error": str(e), + "error_id": error_id, + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline, exc_info=False) + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" + return Redirect(generic_error_url) + return NotFound("The Service or Identity Provider you requested could not be found.") + except SATOSAError as e: + error_id = uuid.uuid4().urn + msg = { + "message": "Uncaught SATOSA error", + "error": str(e), + "error_id": error_id, + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" + return Redirect(generic_error_url) raise - except Exception as err: - msg = "Uncaught exception" + except UnknownSystemEntity as e: + error_id = uuid.uuid4().urn + msg = { + "message": "Configuration error: unknown system entity", + "error": str(e), + "error_id": error_id, + } logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline, exc_info=True) - raise SATOSAUnknownError("Unknown error") from err - return resp + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + redirect_url = f"{generic_error_url}?errorid={error_id}" + return Redirect(generic_error_url) + raise + except Exception as e: + error_id = uuid.uuid4().urn + msg = { + "message": "Uncaught exception", + "error": str(e), + "error_id": error_id, + } + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + generic_error_url = self.config.get("ERROR_URL") + if generic_error_url: + return Redirect(generic_error_url) + raise SATOSAUnknownError("Unknown error") from e + else: + return resp class SAMLBaseModule(object): KEY_ENTITYID_ENDPOINT = 'entityid_endpoint' + KEY_ENABLE_METADATA_RELOAD = 'enable_metadata_reload' KEY_ATTRIBUTE_PROFILE = 'attribute_profile' KEY_ACR_MAPPING = 'acr_mapping' VALUE_ATTRIBUTE_PROFILE_DEFAULT = 'saml' @@ -325,6 +386,15 @@ def expose_entityid_endpoint(self): value = self.config.get(self.KEY_ENTITYID_ENDPOINT, False) return bool(value) + def enable_metadata_reload(self): + """ + Check whether metadata reload has been enabled in config + + return: bool + """ + value = self.config.get(self.KEY_ENABLE_METADATA_RELOAD, False) + return bool(value) + class SAMLEIDASBaseModule(SAMLBaseModule): VALUE_ATTRIBUTE_PROFILE_DEFAULT = 'eidas' diff --git a/src/satosa/context.py b/src/satosa/context.py index 2413624d2..2cd8243ac 100644 --- a/src/satosa/context.py +++ b/src/satosa/context.py @@ -1,36 +1,44 @@ -from satosa.exception import SATOSAError +from warnings import warn as _warn - -class SATOSABadContextError(SATOSAError): - """ - Raise this exception if validating the Context and failing. - """ - pass +from satosa.exception import SATOSABadContextError class Context(object): """ Holds methods for sharing proxy data through the current request """ - KEY_BACKEND_METADATA_STORE = 'metadata_store' + KEY_METADATA_STORE = 'metadata_store' KEY_TARGET_ENTITYID = 'target_entity_id' KEY_FORCE_AUTHN = 'force_authn' KEY_MEMORIZED_IDP = 'memorized_idp' + KEY_REQUESTER_METADATA = 'requester_metadata' + KEY_AUTHN_CONTEXT_CLASS_REF = 'authn_context_class_ref' + KEY_TARGET_AUTHN_CONTEXT_CLASS_REF = 'target_authn_context_class_ref' def __init__(self): self._path = None self.request = None + self.request_uri = None + self.request_method = None + self.qs_params = None + self.server = None + self.http_headers = None + self.cookie = None + self.request_authorization = None self.target_backend = None self.target_frontend = None self.target_micro_service = None # This dict is a data carrier between frontend and backend modules. self.internal_data = {} - self.cookie = None self.state = None - def __repr__(self): - from pprint import pformat - return pformat(vars(self)) + @property + def KEY_BACKEND_METADATA_STORE(self): + msg = "'{old_key}' is deprecated; use '{new_key}' instead.".format( + old_key="KEY_BACKEND_METADATA_STORE", new_key="KEY_METADATA_STORE" + ) + _warn(msg, DeprecationWarning) + return Context.KEY_METADATA_STORE @property def path(self): diff --git a/src/satosa/deprecated.py b/src/satosa/deprecated.py deleted file mode 100644 index 2ab16c6ed..000000000 --- a/src/satosa/deprecated.py +++ /dev/null @@ -1,272 +0,0 @@ -import datetime -import warnings as _warnings -from enum import Enum - -from saml2.saml import NAMEID_FORMAT_TRANSIENT -from saml2.saml import NAMEID_FORMAT_PERSISTENT -from saml2.saml import NAMEID_FORMAT_EMAILADDRESS -from saml2.saml import NAMEID_FORMAT_UNSPECIFIED - -from satosa.internal import AuthenticationInformation as _AuthenticationInformation -from satosa.internal import InternalData as _InternalData -from satosa import util - - -class InternalRequest(_InternalData): - def __init__(self, user_id_hash_type, requester, requester_name=None): - msg = ( - "InternalRequest is deprecated." - " Use satosa.internal.InternalData class instead." - ) - _warnings.warn(msg, DeprecationWarning) - super().__init__( - user_id_hash_type=user_id_hash_type, - requester=requester, - requester_name=requester_name, - ) - - @classmethod - def from_dict(cls, data): - instance = cls( - user_id_hash_type=data.get("hash_type"), - requester=data.get("requester"), - requester_name=data.get("requester_name"), - ) - return instance - - -class InternalResponse(_InternalData): - def __init__(self, auth_info=None): - msg = ( - "InternalResponse is deprecated." - " Use satosa.internal.InternalData class instead." - ) - _warnings.warn(msg, DeprecationWarning) - auth_info = auth_info or _AuthenticationInformation() - super().__init__(auth_info=auth_info) - - @classmethod - def from_dict(cls, data): - """ - :type data: dict[str, dict[str, str] | str] - :rtype: satosa.internal_data.InternalResponse - :param data: A dict representation of an InternalResponse object - :return: An InternalResponse object - """ - auth_info = _AuthenticationInformation.from_dict(data.get("auth_info")) - instance = cls(auth_info=auth_info) - instance.user_id_hash_type = data.get("hash_type") - instance.attributes = data.get("attributes", {}) - instance.user_id = data.get("user_id") - instance.requester = data.get("requester") - return instance - - -class SAMLInternalResponse(InternalResponse): - """ - Like the parent InternalResponse, holds internal representation of - service related data, but includes additional details relevant to - SAML interoperability. - - :type name_id: instance of saml2.saml.NameID from pysaml2 - """ - - def __init__(self, auth_info=None): - msg = ( - "SAMLInternalResponse is deprecated." - " Use satosa.internal.InternalData class instead." - ) - _warnings.warn(msg, DeprecationWarning) - super().__init__(auth_info=auth_info) - - -class UserIdHashType(Enum): - """ - All different user id hash types - """ - - transient = 1 - persistent = 2 - pairwise = 3 - public = 4 - emailaddress = 5 - unspecified = 6 - - def __getattr__(self, name): - if name != "_value_": - msg = "UserIdHashType is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - return self.__getattribute__(name) - - @classmethod - def from_string(cls, str): - msg = "UserIdHashType is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - try: - return getattr(cls, str) - except AttributeError: - raise ValueError("Unknown hash type '{}'".format(str)) - - -class UserIdHasher(object): - """ - Class for creating different user id types - """ - - STATE_KEY = "IDHASHER" - - @staticmethod - def save_state(internal_request, state): - """ - Saves all necessary information needed by the UserIdHasher - - :type internal_request: satosa.internal_data.InternalRequest - - :param internal_request: The request - :param state: The current state - """ - state_data = {"hash_type": internal_request.user_id_hash_type} - state[UserIdHasher.STATE_KEY] = state_data - - @staticmethod - def hash_data(salt, value): - """ - Hashes a value together with a salt. - :type salt: str - :type value: str - :param salt: hash salt - :param value: value to hash together with the salt - :return: hash value (SHA512) - """ - msg = "UserIdHasher is deprecated; use satosa.util.hash_data instead." - _warnings.warn(msg, DeprecationWarning) - return util.hash_data(salt, value) - - @staticmethod - def hash_type(state): - state_data = state[UserIdHasher.STATE_KEY] - hash_type = state_data["hash_type"] - return hash_type - - @staticmethod - def hash_id(salt, user_id, requester, state): - """ - Sets a user id to the internal_response, - in the format specified by the internal response - - :type salt: str - :type user_id: str - :type requester: str - :type state: satosa.state.State - :rtype: str - - :param salt: A salt string for the ID hashing - :param user_id: the user id - :param user_id_hash_type: Hashing type - :param state: The current state - :return: the internal_response containing the hashed user ID - """ - hash_type_to_format = { - NAMEID_FORMAT_TRANSIENT: "{id}{req}{time}", - NAMEID_FORMAT_PERSISTENT: "{id}{req}", - "pairwise": "{id}{req}", - "public": "{id}", - NAMEID_FORMAT_EMAILADDRESS: "{id}", - NAMEID_FORMAT_UNSPECIFIED: "{id}", - } - - format_args = { - "id": user_id, - "req": requester, - "time": datetime.datetime.utcnow().timestamp(), - } - - hash_type = UserIdHasher.hash_type(state) - try: - fmt = hash_type_to_format[hash_type] - except KeyError as e: - raise ValueError("Unknown hash type: {}".format(hash_type)) from e - else: - user_id = fmt.format(**format_args) - - hasher = ( - (lambda salt, value: value) - if hash_type - in [NAMEID_FORMAT_EMAILADDRESS, NAMEID_FORMAT_UNSPECIFIED] - else util.hash_data - ) - return hasher(salt, user_id) - - -def saml_name_id_format_to_hash_type(name_format): - """ - Translate pySAML2 name format to satosa format - - :type name_format: str - :rtype: satosa.internal_data.UserIdHashType - :param name_format: SAML2 name format - :return: satosa format - """ - msg = "saml_name_id_format_to_hash_type is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - - name_id_format_to_hash_type = { - NAMEID_FORMAT_TRANSIENT: UserIdHashType.transient, - NAMEID_FORMAT_PERSISTENT: UserIdHashType.persistent, - NAMEID_FORMAT_EMAILADDRESS: UserIdHashType.emailaddress, - NAMEID_FORMAT_UNSPECIFIED: UserIdHashType.unspecified, - } - - return name_id_format_to_hash_type.get( - name_format, UserIdHashType.transient - ) - - -def hash_type_to_saml_name_id_format(hash_type): - """ - Translate satosa format to pySAML2 name format - - :type hash_type: satosa.internal_data.UserIdHashType - :rtype: str - :param hash_type: satosa format - :return: pySAML2 name format - """ - msg = "hash_type_to_saml_name_id_format is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - - hash_type_to_name_id_format = { - UserIdHashType.transient: NAMEID_FORMAT_TRANSIENT, - UserIdHashType.persistent: NAMEID_FORMAT_PERSISTENT, - UserIdHashType.emailaddress: NAMEID_FORMAT_EMAILADDRESS, - UserIdHashType.unspecified: NAMEID_FORMAT_UNSPECIFIED, - } - - return hash_type_to_name_id_format.get(hash_type, NAMEID_FORMAT_PERSISTENT) - - -def oidc_subject_type_to_hash_type(subject_type): - msg = "oidc_subject_type_to_hash_type is deprecated and will be removed." - _warnings.warn(msg, DeprecationWarning) - - if subject_type == "public": - return UserIdHashType.public - - return UserIdHashType.pairwise - - -def hash_attributes(hash_attributes, internal_attributes, salt): - msg = ( - "'USER_ID_HASH_SALT' configuration option is deprecated." - " 'hash' configuration option is deprecated." - " Use the hasher microservice instead." - ) - _warnings.warn(msg, DeprecationWarning) - - # Hash all attributes specified in INTERNAL_ATTRIBUTES["hash"] - for attribute in hash_attributes: - # hash all attribute values individually - if attribute in internal_attributes: - hashed_values = [ - util.hash_data(salt, v) for v in internal_attributes[attribute] - ] - internal_attributes[attribute] = hashed_values diff --git a/src/satosa/exception.py b/src/satosa/exception.py index 02f3c0554..770d26283 100644 --- a/src/satosa/exception.py +++ b/src/satosa/exception.py @@ -67,3 +67,56 @@ def message(self): :return: Exception message """ return self._message.format(error_id=self.error_id) + + +class SATOSABasicError(SATOSAError): + """ + eduTEAMS error + """ + def __init__(self, error): + self.error = error + + +class SATOSAMissingStateError(SATOSABasicError): + """ + SATOSA Missing State error. + + This exception should be raised when SATOSA receives a request as part of + an authentication flow and while the session state cookie is expected for + that step, it is not included in the request + """ + pass + + +class SATOSAAuthenticationFlowError(SATOSABasicError): + """ + SATOSA Flow error. + + This exception should be raised when SATOSA receives a request that cannot + be serviced because previous steps in the authentication flow for that session + cannot be found + """ + pass + + +class SATOSABadRequestError(SATOSABasicError): + """ + SATOSA Bad Request error. + + This exception should be raised when we want to return an HTTP 400 Bad Request + """ + pass + + +class SATOSABadContextError(SATOSAError): + """ + Raise this exception if validating the Context and failing. + """ + pass + + +class SATOSANoBoundEndpointError(SATOSAError): + """ + Raised when a given url path is not bound to any endpoint function + """ + pass diff --git a/src/satosa/frontends/openid_connect.py b/src/satosa/frontends/openid_connect.py index e93cf4998..88041b373 100644 --- a/src/satosa/frontends/openid_connect.py +++ b/src/satosa/frontends/openid_connect.py @@ -1,22 +1,34 @@ """ A OpenID Connect frontend module for the satosa proxy """ + import json import logging from collections import defaultdict from urllib.parse import urlencode, urlparse from jwkest.jwk import rsa_load, RSAKey + from oic.oic import scope2claims -from oic.oic.message import (AuthorizationRequest, AuthorizationErrorResponse, TokenErrorResponse, - UserInfoErrorResponse) -from oic.oic.provider import RegistrationEndpoint, AuthorizationEndpoint, TokenEndpoint, UserinfoEndpoint +from oic.oic.message import AuthorizationRequest +from oic.oic.message import AuthorizationErrorResponse +from oic.oic.message import TokenErrorResponse +from oic.oic.message import UserInfoErrorResponse +from oic.oic.provider import RegistrationEndpoint +from oic.oic.provider import AuthorizationEndpoint +from oic.oic.provider import TokenEndpoint +from oic.oic.provider import UserinfoEndpoint + from pyop.access_token import AccessToken from pyop.authz_state import AuthorizationState -from pyop.exceptions import (InvalidAuthenticationRequest, InvalidClientRegistrationRequest, - InvalidClientAuthentication, OAuthError, BearerTokenError, InvalidAccessToken) +from pyop.exceptions import InvalidAuthenticationRequest +from pyop.exceptions import InvalidClientRegistrationRequest +from pyop.exceptions import InvalidClientAuthentication +from pyop.exceptions import OAuthError +from pyop.exceptions import BearerTokenError +from pyop.exceptions import InvalidAccessToken from pyop.provider import Provider -from pyop.storage import MongoWrapper +from pyop.storage import StorageBase from pyop.subject_identifier import HashBasedSubjectIdentifierFactory from pyop.userinfo import Userinfo from pyop.util import should_fragment_encode @@ -29,96 +41,82 @@ import satosa.logging_util as lu from satosa.internal import InternalData -from satosa.deprecated import oidc_subject_type_to_hash_type logger = logging.getLogger(__name__) +class MirrorPublicSubjectIdentifierFactory(HashBasedSubjectIdentifierFactory): + def create_public_identifier(self, user_id): + return user_id + + class OpenIDConnectFrontend(FrontendModule): """ A OpenID Connect frontend module """ def __init__(self, auth_req_callback_func, internal_attributes, conf, base_url, name): - self._validate_config(conf) + _validate_config(conf) super().__init__(auth_req_callback_func, internal_attributes, base_url, name) self.config = conf - self.signing_key = RSAKey(key=rsa_load(conf["signing_key_path"]), use="sig", alg="RS256") - - def _create_provider(self, endpoint_baseurl): - response_types_supported = self.config["provider"].get("response_types_supported", ["id_token"]) - subject_types_supported = self.config["provider"].get("subject_types_supported", ["pairwise"]) - scopes_supported = self.config["provider"].get("scopes_supported", ["openid"]) - extra_scopes = self.config["provider"].get("extra_scopes") - capabilities = { - "issuer": self.base_url, - "authorization_endpoint": "{}/{}".format(endpoint_baseurl, AuthorizationEndpoint.url), - "jwks_uri": "{}/jwks".format(endpoint_baseurl), - "response_types_supported": response_types_supported, - "id_token_signing_alg_values_supported": [self.signing_key.alg], - "response_modes_supported": ["fragment", "query"], - "subject_types_supported": subject_types_supported, - "claim_types_supported": ["normal"], - "claims_parameter_supported": True, - "claims_supported": [attribute_map["openid"][0] - for attribute_map in self.internal_attributes["attributes"].values() - if "openid" in attribute_map], - "request_parameter_supported": False, - "request_uri_parameter_supported": False, - "scopes_supported": scopes_supported - } - - if 'code' in response_types_supported: - capabilities["token_endpoint"] = "{}/{}".format(endpoint_baseurl, TokenEndpoint.url) - - if self.config["provider"].get("client_registration_supported", False): - capabilities["registration_endpoint"] = "{}/{}".format(endpoint_baseurl, RegistrationEndpoint.url) - - authz_state = self._init_authorization_state() + provider_config = self.config["provider"] + provider_config["issuer"] = base_url + + self.signing_key = RSAKey( + key=rsa_load(self.config["signing_key_path"]), + use="sig", + alg="RS256", + kid=self.config.get("signing_key_id", ""), + ) + db_uri = self.config.get("db_uri") + self.stateless = db_uri and StorageBase.type(db_uri) == "stateless" + self.user_db = ( + StorageBase.from_uri(db_uri, db_name="satosa", collection="authz_codes") + if db_uri and not self.stateless + else {} + ) + + sub_hash_salt = self.config.get("sub_hash_salt", rndstr(16)) + mirror_public = self.config.get("sub_mirror_public", False) + authz_state = _init_authorization_state( + provider_config, db_uri, sub_hash_salt, mirror_public + ) + + client_db_uri = self.config.get("client_db_uri") cdb_file = self.config.get("client_db_path") - if db_uri: - cdb = MongoWrapper(db_uri, "satosa", "clients") + if client_db_uri: + cdb = StorageBase.from_uri( + client_db_uri, db_name="satosa", collection="clients", ttl=None + ) elif cdb_file: with open(cdb_file) as f: cdb = json.loads(f.read()) else: cdb = {} - self.user_db = MongoWrapper(db_uri, "satosa", "authz_codes") if db_uri else {} - self.provider = Provider( + + self.endpoint_baseurl = "{}/{}".format(self.base_url, self.name) + self.provider = _create_provider( + provider_config, + self.endpoint_baseurl, + self.internal_attributes, self.signing_key, - capabilities, authz_state, + self.user_db, cdb, - Userinfo(self.user_db), - extra_scopes=extra_scopes, ) - def _init_authorization_state(self): - sub_hash_salt = self.config.get("sub_hash_salt", rndstr(16)) - db_uri = self.config.get("db_uri") - if db_uri: - authz_code_db = MongoWrapper(db_uri, "satosa", "authz_codes") - access_token_db = MongoWrapper(db_uri, "satosa", "access_tokens") - refresh_token_db = MongoWrapper(db_uri, "satosa", "refresh_tokens") - sub_db = MongoWrapper(db_uri, "satosa", "subject_identifiers") - else: - authz_code_db = None - access_token_db = None - refresh_token_db = None - sub_db = None - - token_lifetimes = {k: self.config["provider"][k] for k in ["authorization_code_lifetime", - "access_token_lifetime", - "refresh_token_lifetime", - "refresh_token_threshold"] - if k in self.config["provider"]} - return AuthorizationState(HashBasedSubjectIdentifierFactory(sub_hash_salt), authz_code_db, access_token_db, - refresh_token_db, sub_db, **token_lifetimes) - - def handle_authn_response(self, context, internal_resp, extra_id_token_claims=None): + def _get_extra_id_token_claims(self, user_id, client_id): + if "extra_id_token_claims" in self.config["provider"]: + config = self.config["provider"]["extra_id_token_claims"].get(client_id, []) + if type(config) is list and len(config) > 0: + requested_claims = {k: None for k in config} + return self.provider.userinfo.get_claims_for(user_id, requested_claims) + return {} + + def handle_authn_response(self, context, internal_resp): """ See super class method satosa.frontends.base.FrontendModule#handle_authn_response :type context: satosa.context.Context @@ -129,13 +127,21 @@ def handle_authn_response(self, context, internal_resp, extra_id_token_claims=No auth_req = self._get_authn_request_from_state(context.state) claims = self.converter.from_internal("openid", internal_resp.attributes) - self.user_db[internal_resp.subject_id] = dict(combine_claim_values(claims.items())) + # Filter unset claims + claims = {k: v for k, v in claims.items() if v} + self.user_db[internal_resp.subject_id] = dict( + combine_claim_values(claims.items()) + ) auth_resp = self.provider.authorize( auth_req, internal_resp.subject_id, - extra_id_token_claims=extra_id_token_claims, + extra_id_token_claims=lambda user_id, client_id: + self._get_extra_id_token_claims(user_id, client_id), ) + if self.stateless: + del self.user_db[internal_resp.subject_id] + del context.state[self.name] http_response = auth_resp.request(auth_req["redirect_uri"], should_fragment_encode(auth_req)) return SeeOther(http_response) @@ -183,9 +189,6 @@ def register_endpoints(self, backend_names): else: backend_name = backend_names[0] - endpoint_baseurl = "{}/{}".format(self.base_url, self.name) - self._create_provider(endpoint_baseurl) - provider_config = ("^.well-known/openid-configuration$", self.provider_config) jwks_uri = ("^{}/jwks$".format(self.name), self.jwks) @@ -196,38 +199,36 @@ def register_endpoints(self, backend_names): auth_path = urlparse(auth_endpoint).path.lstrip("/") else: auth_path = "{}/{}".format(self.name, AuthorizationEndpoint.url) + authentication = ("^{}$".format(auth_path), self.handle_authn_request) url_map = [provider_config, jwks_uri, authentication] if any("code" in v for v in self.provider.configuration_information["response_types_supported"]): - self.provider.configuration_information["token_endpoint"] = "{}/{}".format(endpoint_baseurl, - TokenEndpoint.url) - token_endpoint = ("^{}/{}".format(self.name, TokenEndpoint.url), self.token_endpoint) + self.provider.configuration_information["token_endpoint"] = "{}/{}".format( + self.endpoint_baseurl, TokenEndpoint.url + ) + token_endpoint = ( + "^{}/{}".format(self.name, TokenEndpoint.url), self.token_endpoint + ) url_map.append(token_endpoint) - self.provider.configuration_information["userinfo_endpoint"] = "{}/{}".format(endpoint_baseurl, - UserinfoEndpoint.url) - userinfo_endpoint = ("^{}/{}".format(self.name, UserinfoEndpoint.url), self.userinfo_endpoint) + self.provider.configuration_information["userinfo_endpoint"] = ( + "{}/{}".format(self.endpoint_baseurl, UserinfoEndpoint.url) + ) + userinfo_endpoint = ( + "^{}/{}".format(self.name, UserinfoEndpoint.url), self.userinfo_endpoint + ) url_map.append(userinfo_endpoint) + if "registration_endpoint" in self.provider.configuration_information: - client_registration = ("^{}/{}".format(self.name, RegistrationEndpoint.url), self.client_registration) + client_registration = ( + "^{}/{}".format(self.name, RegistrationEndpoint.url), + self.client_registration, + ) url_map.append(client_registration) return url_map - def _validate_config(self, config): - """ - Validates that all necessary config parameters are specified. - :type config: dict[str, dict[str, Any] | str] - :param config: the module config - """ - if config is None: - raise ValueError("OIDCFrontend conf can't be 'None'.") - - for k in {"signing_key_path", "provider"}: - if k not in config: - raise ValueError("Missing configuration parameter '{}' for OpenID Connect frontend.".format(k)) - def _get_authn_request_from_state(self, state): """ Extract the clietns request stoed in the SATOSA state. @@ -360,7 +361,10 @@ def token_endpoint(self, context): """ headers = {"Authorization": context.request_authorization} try: - response = self.provider.handle_token_request(urlencode(context.request), headers) + response = self.provider.handle_token_request( + urlencode(context.request), + headers, + lambda user_id, client_id: self._get_extra_id_token_claims(user_id, client_id)) return Response(response.to_json(), content="application/json") except InvalidClientAuthentication as e: logline = "invalid client authentication at token endpoint" @@ -391,6 +395,136 @@ def userinfo_endpoint(self, context): return response +def _validate_config(config): + """ + Validates that all necessary config parameters are specified. + :type config: dict[str, dict[str, Any] | str] + :param config: the module config + """ + if config is None: + raise ValueError("OIDCFrontend configuration can't be 'None'.") + + for k in {"signing_key_path", "provider"}: + if k not in config: + raise ValueError("Missing configuration parameter '{}' for OpenID Connect frontend.".format(k)) + + if "signing_key_id" in config and type(config["signing_key_id"]) is not str: + raise ValueError( + "The configuration parameter 'signing_key_id' is not defined as a string for OpenID Connect frontend.") + + +def _create_provider( + provider_config, + endpoint_baseurl, + internal_attributes, + signing_key, + authz_state, + user_db, + cdb, +): + response_types_supported = provider_config.get("response_types_supported", ["id_token"]) + subject_types_supported = provider_config.get("subject_types_supported", ["pairwise"]) + scopes_supported = provider_config.get("scopes_supported", ["openid"]) + extra_scopes = provider_config.get("extra_scopes") + capabilities = { + "issuer": provider_config["issuer"], + "authorization_endpoint": "{}/{}".format(endpoint_baseurl, AuthorizationEndpoint.url), + "jwks_uri": "{}/jwks".format(endpoint_baseurl), + "response_types_supported": response_types_supported, + "id_token_signing_alg_values_supported": [signing_key.alg], + "response_modes_supported": ["fragment", "query"], + "subject_types_supported": subject_types_supported, + "claim_types_supported": ["normal"], + "claims_parameter_supported": True, + "claims_supported": [ + attribute_map["openid"][0] + for attribute_map in internal_attributes["attributes"].values() + if "openid" in attribute_map + ], + "request_parameter_supported": False, + "request_uri_parameter_supported": False, + "scopes_supported": scopes_supported + } + + if 'code' in response_types_supported: + capabilities["token_endpoint"] = "{}/{}".format( + endpoint_baseurl, TokenEndpoint.url + ) + + if provider_config.get("client_registration_supported", False): + capabilities["registration_endpoint"] = "{}/{}".format( + endpoint_baseurl, RegistrationEndpoint.url + ) + + provider = Provider( + signing_key, + capabilities, + authz_state, + cdb, + Userinfo(user_db), + extra_scopes=extra_scopes, + id_token_lifetime=provider_config.get("id_token_lifetime", 3600), + ) + return provider + + +def _init_authorization_state( + provider_config, db_uri, sub_hash_salt, mirror_public=False +): + if db_uri: + authz_code_db = StorageBase.from_uri( + db_uri, + db_name="satosa", + collection="authz_codes", + ttl=provider_config.get("authorization_code_lifetime", 600), + ) + access_token_db = StorageBase.from_uri( + db_uri, + db_name="satosa", + collection="access_tokens", + ttl=provider_config.get("access_token_lifetime", 3600), + ) + refresh_token_db = StorageBase.from_uri( + db_uri, + db_name="satosa", + collection="refresh_tokens", + ttl=provider_config.get("refresh_token_lifetime", None), + ) + sub_db = StorageBase.from_uri( + db_uri, db_name="satosa", collection="subject_identifiers", ttl=None + ) + else: + authz_code_db = None + access_token_db = None + refresh_token_db = None + sub_db = None + + token_lifetimes = { + k: provider_config[k] + for k in [ + "authorization_code_lifetime", + "access_token_lifetime", + "refresh_token_lifetime", + "refresh_token_threshold", + ] + if k in provider_config + } + + subject_id_factory = ( + MirrorPublicSubjectIdentifierFactory(sub_hash_salt) + if mirror_public + else HashBasedSubjectIdentifierFactory(sub_hash_salt) + ) + return AuthorizationState( + subject_id_factory, + authz_code_db, + access_token_db, + refresh_token_db, + sub_db, + **token_lifetimes, + ) + + def combine_return_input(values): return values diff --git a/src/satosa/frontends/ping.py b/src/satosa/frontends/ping.py index 8eda3948c..27fec279c 100644 --- a/src/satosa/frontends/ping.py +++ b/src/satosa/frontends/ping.py @@ -1,15 +1,14 @@ import logging import satosa.logging_util as lu -import satosa.micro_services.base -from satosa.logging_util import satosa_logging +from satosa.frontends.base import FrontendModule from satosa.response import Response logger = logging.getLogger(__name__) -class PingFrontend(satosa.frontends.base.FrontendModule): +class PingFrontend(FrontendModule): """ SATOSA frontend that responds to a query with a simple 200 OK, intended to be used as a simple heartbeat monitor. @@ -20,12 +19,12 @@ def __init__(self, auth_req_callback_func, internal_attributes, config, base_url self.config = config - def handle_authn_response(self, context, internal_resp, extra_id_token_claims=None): + def handle_authn_response(self, context, internal_resp): """ See super class method satosa.frontends.base.FrontendModule#handle_authn_response :type context: satosa.context.Context :type internal_response: satosa.internal.InternalData - :rtype oic.utils.http_util.Response + :rtype: satosa.response.Response """ raise NotImplementedError() @@ -33,7 +32,7 @@ def handle_backend_error(self, exception): """ See super class satosa.frontends.base.FrontendModule :type exception: satosa.exception.SATOSAError - :rtype: oic.utils.http_util.Response + :rtype: satosa.response.Response """ raise NotImplementedError() @@ -50,6 +49,8 @@ def register_endpoints(self, backend_names): def ping_endpoint(self, context): """ + :type context: satosa.context.Context + :rtype: satosa.response.Response """ msg = "Ping returning 200 OK" logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) diff --git a/src/satosa/frontends/saml2.py b/src/satosa/frontends/saml2.py index 6ce12a476..22f43376d 100644 --- a/src/satosa/frontends/saml2.py +++ b/src/satosa/frontends/saml2.py @@ -34,12 +34,12 @@ from ..response import ServiceError from ..saml_util import make_saml_response from satosa.exception import SATOSAError +from satosa.exception import SATOSABadRequestError +from satosa.exception import SATOSAMissingStateError import satosa.util as util import satosa.logging_util as lu from satosa.internal import InternalData -from satosa.deprecated import saml_name_id_format_to_hash_type -from satosa.deprecated import hash_type_to_saml_name_id_format logger = logging.getLogger(__name__) @@ -115,12 +115,18 @@ def register_endpoints(self, backend_names): :type backend_names: list[str] :rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))] """ + url_map = [] + + if self.enable_metadata_reload(): + url_map.append( + ("^%s/%s$" % (self.name, "reload-metadata"), self._reload_metadata)) + self.idp_config = self._build_idp_config_endpoints( self.config[self.KEY_IDP_CONFIG], backend_names) # Create the idp - idp_config = IdPConfig().load(copy.deepcopy(self.idp_config), metadata_construction=False) + idp_config = IdPConfig().load(copy.deepcopy(self.idp_config)) self.idp = Server(config=idp_config) - return self._register_endpoints(backend_names) + return self._register_endpoints(backend_names) + url_map def _create_state_data(self, context, resp_args, relay_state): """ @@ -148,7 +154,23 @@ def load_state(self, state): :param state: The current state :return: The dictionary given by the save_state function """ - state_data = state[self.name] + try: + state_data = state[self.name] + except KeyError: + """ + If we end up here, it means that the user returns to the proxy + without the SATOSA session cookie. This can happen at least in the + following cases: + - the user deleted the cookie from the browser + - the browser of the user blocked the cookie + - the user has completed an authentication flow, the cookie has + been removed by SATOSA and then the user used the back button + of their browser and resend the authentication response, but + without the SATOSA session cookie + """ + error = "Received AuthN response without a SATOSA session cookie" + raise SATOSAMissingStateError(error) + if isinstance(state_data["resp_args"]["name_id_policy"], str): state_data["resp_args"]["name_id_policy"] = name_id_policy_from_string( state_data["resp_args"]["name_id_policy"]) @@ -169,10 +191,8 @@ def _validate_config(self, config): raise ValueError("No configuration given") for key in required_keys: - try: - _val = config[key] - except KeyError as e: - raise ValueError("Missing configuration key: %s" % key) from e + if key not in config: + raise ValueError("Missing configuration key: %s" % key) def _handle_authn_request(self, context, binding_in, idp): """ @@ -188,7 +208,16 @@ def _handle_authn_request(self, context, binding_in, idp): :param idp: The saml frontend idp server :return: response """ - req_info = idp.parse_authn_request(context.request["SAMLRequest"], binding_in) + + try: + req_info = idp.parse_authn_request(context.request["SAMLRequest"], binding_in) + except KeyError: + """ + HTTP clients that call the SSO endpoint without sending SAML AuthN + request will receive a "400 Bad Request" response + """ + raise SATOSABadRequestError("HTTP request does not include a SAML AuthN request") + authn_req = req_info.message msg = "{}".format(authn_req) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) @@ -206,7 +235,7 @@ def _handle_authn_request(self, context, binding_in, idp): return ServiceError("Incorrect request from requester: %s" % e) requester = resp_args["sp_entity_id"] - context.state[self.name] = self._create_state_data(context, idp.response_args(authn_req), + context.state[self.name] = self._create_state_data(context, resp_args, context.request.get("RelayState")) subject = authn_req.subject @@ -249,6 +278,12 @@ def _handle_authn_request(self, context, binding_in, idp): idp, idp_policy, requester, context.state ) + authn_context_class_ref_nodes = getattr( + authn_req.requested_authn_context, 'authn_context_class_ref', [] + ) + authn_context = [ref.text for ref in authn_context_class_ref_nodes] + context.decorate(Context.KEY_AUTHN_CONTEXT_CLASS_REF, authn_context) + context.decorate(Context.KEY_METADATA_STORE, self.idp.metadata) return self.auth_req_callback_func(context, internal_req) def _get_approved_attributes(self, idp, idp_policy, sp_entity_id, state): @@ -274,7 +309,7 @@ def _get_approved_attributes(self, idp, idp_policy, sp_entity_id, state): for aconv in attrconvs: if aconv.name_format == name_format: all_attributes = {v: None for v in aconv._fro.values()} - attribute_filter = list(idp_policy.restrict(all_attributes, sp_entity_id, idp.metadata).keys()) + attribute_filter = list(idp_policy.restrict(all_attributes, sp_entity_id).keys()) break attribute_filter = self.converter.to_internal_filter(self.attribute_profile, attribute_filter) msg = "Filter: {}".format(attribute_filter) @@ -286,9 +321,14 @@ def _filter_attributes(self, idp, internal_response, context,): idp_policy = idp.config.getattr("policy", "idp") attributes = {} if idp_policy: - approved_attributes = self._get_approved_attributes(idp, idp_policy, internal_response.requester, - context.state) - attributes = {k: v for k, v in internal_response.attributes.items() if k in approved_attributes} + approved_attributes = self._get_approved_attributes( + idp, idp_policy, internal_response.requester, context.state + ) + attributes = { + k: v + for k, v in internal_response.attributes.items() + if k in approved_attributes + } return attributes @@ -353,56 +393,73 @@ def _handle_authn_response(self, context, internal_response, idp): logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - policies = self.idp_config.get( - 'service', {}).get('idp', {}).get('policy', {}) + idp_conf = self.idp_config.get('service', {}).get('idp', {}) + policies = idp_conf.get('policy', {}) sp_policy = policies.get('default', {}) sp_policy.update(policies.get(sp_entity_id, {})) sign_assertion = sp_policy.get('sign_assertion', False) sign_response = sp_policy.get('sign_response', True) - sign_alg = sp_policy.get('sign_alg', 'SIG_RSA_SHA256') - digest_alg = sp_policy.get('digest_alg', 'DIGEST_SHA256') encrypt_assertion = sp_policy.get('encrypt_assertion', False) encrypted_advice_attributes = sp_policy.get('encrypted_advice_attributes', False) + signing_algorithm = idp_conf.get('signing_algorithm') + digest_algorithm = idp_conf.get('digest_algorithm') + sign_alg_attr = sp_policy.get('sign_alg', 'SIG_RSA_SHA256') + digest_alg_attr = sp_policy.get('digest_alg', 'DIGEST_SHA256') + # Construct arguments for method create_authn_response # on IdP Server instance args = { - 'identity' : ava, - 'name_id' : name_id, - 'authn' : auth_info, - 'sign_response' : sign_response, + # Add the SP details + **resp_args, + # AuthnResponse data + 'identity': ava, + 'name_id': name_id, + 'authn': auth_info, + 'sign_response': sign_response, 'sign_assertion': sign_assertion, 'encrypt_assertion': encrypt_assertion, - 'encrypted_advice_attributes': encrypted_advice_attributes + 'encrypted_advice_attributes': encrypted_advice_attributes, } - # Add the SP details - args.update(**resp_args) + args['sign_alg'] = signing_algorithm + if not args['sign_alg']: + try: + args['sign_alg'] = getattr(xmldsig, sign_alg_attr) + except AttributeError as e: + msg = "Unsupported sign algorithm {}".format(sign_alg_attr) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + raise Exception(msg) from e + + msg = "signing with algorithm {}".format(args['sign_alg']) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) - try: - args['sign_alg'] = getattr(xmldsig, sign_alg) - except AttributeError as e: - msg = "Unsupported sign algorithm {}".format(sign_alg) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline) - raise Exception(msg) from e - else: - msg = "signing with algorithm {}".format(args['sign_alg']) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + args['digest_alg'] = digest_algorithm + if not args['digest_alg']: + try: + args['digest_alg'] = getattr(xmldsig, digest_alg_attr) + except AttributeError as e: + msg = "Unsupported digest algorithm {}".format(digest_alg_attr) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.error(logline) + raise Exception(msg) from e + + msg = "using digest algorithm {}".format(args['digest_alg']) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) - try: - args['digest_alg'] = getattr(xmldsig, digest_alg) - except AttributeError as e: - msg = "Unsupported digest algorithm {}".format(digest_alg) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline) - raise Exception(msg) from e - else: - msg = "using digest algorithm {}".format(args['digest_alg']) + if sign_alg_attr or digest_alg_attr: + msg = ( + "sign_alg and digest_alg are deprecated; " + "instead, use signing_algorithm and digest_algorithm " + "under the service/idp configuration path " + "(not under policy/default)." + ) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + logger.warning(msg) resp = idp.create_authn_response(**args) http_args = idp.apply_binding( @@ -414,6 +471,21 @@ def _handle_authn_response(self, context, internal_response, idp): self._set_common_domain_cookie(internal_response, http_args, context) del context.state[self.name] + + msg = { + "message": "Sending SAML AuthN Response", + "issuer": internal_response.auth_info.issuer, + "requester": sp_entity_id, + "signed response": sign_response, + "signed assertion": sign_assertion, + "encrypted": encrypt_assertion, + "attributes": " ".join(list(ava.keys())) + } + if nameid_format: + msg['name_id'] = nameid_format + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.info(logline) + return make_saml_response(resp_args["binding"], http_args) def _handle_backend_error(self, exception, idp): @@ -451,13 +523,26 @@ def _metadata_endpoint(self, context): :param context: The current context :return: response with metadata """ - msg = "Sending metadata response" + msg = "Sending metadata response for entityId = {}".format(self.idp.config.entityid) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) - metadata_string = create_metadata_string(None, self.idp.config, 4, None, None, None, None, - None).decode("utf-8") + metadata_string = create_metadata_string( + configfile=None, config=self.idp.config, valid=4 + ).decode("utf-8") return Response(metadata_string, content="text/xml") + def _reload_metadata(self, context): + """ + Reload SAML metadata + """ + logger.debug("Reloading metadata") + res = self.idp.reload_metadata( + copy.deepcopy(self.config[SAMLFrontend.KEY_IDP_CONFIG]['metadata']) + ) + message = "Metadata reload %s" % ("OK" if res else "failed") + status = "200 OK" if res else "500 FAILED" + return Response(message=message, status=status) + def _register_endpoints(self, providers): """ Register methods to endpoints @@ -479,6 +564,7 @@ def _register_endpoints(self, providers): functools.partial(self.handle_authn_request, binding_in=binding))) if self.expose_entityid_endpoint(): + logger.debug("Exposing frontend entity endpoint = {}".format(self.idp.config.entityid)) parsed_entity_id = urlparse(self.idp.config.entityid) url_map.append(("^{0}".format(parsed_entity_id.path[1:]), self._metadata_endpoint)) @@ -584,7 +670,7 @@ def _get_sp_display_name(self, idp, entity_id): try: return extensions[0]["display_name"] - except (IndexError, KeyError) as e: + except (IndexError, KeyError): pass return None @@ -633,7 +719,7 @@ def _load_idp_dynamic_endpoints(self, context): """ target_entity_id = context.target_entity_id_from_path() idp_conf_file = self._load_endpoints_to_config(context.target_backend, target_entity_id) - idp_config = IdPConfig().load(idp_conf_file, metadata_construction=False) + idp_config = IdPConfig().load(idp_conf_file) return Server(config=idp_config) def _load_idp_dynamic_entity_id(self, state): @@ -649,7 +735,7 @@ def _load_idp_dynamic_entity_id(self, state): # Change the idp entity id dynamically idp_config_file = copy.deepcopy(self.idp_config) idp_config_file["entityid"] = "{}/{}".format(self.idp_config["entityid"], state[self.name]["target_entity_id"]) - idp_config = IdPConfig().load(idp_config_file, metadata_construction=False) + idp_config = IdPConfig().load(idp_config_file) return Server(config=idp_config) def handle_authn_request(self, context, binding_in): @@ -743,6 +829,10 @@ class SAMLVirtualCoFrontend(SAMLFrontend): KEY_ORGANIZATION = 'organization' KEY_ORGANIZATION_KEYS = ['display_name', 'name', 'url'] + def __init__(self, auth_req_callback_func, internal_attributes, config, base_url, name): + self.has_multiple_backends = False + super().__init__(auth_req_callback_func, internal_attributes, config, base_url, name) + def handle_authn_request(self, context, binding_in): """ See super class @@ -915,7 +1005,7 @@ def _add_endpoints_to_config(self, config, co_name, backend_name): return config - def _add_entity_id(self, context, config, co_name): + def _add_entity_id(self, config, co_name, backend_name): """ Use the CO name to construct the entity ID for the virtual IdP for the CO and add it to the config. Also add it to the @@ -923,23 +1013,34 @@ def _add_entity_id(self, context, config, co_name): The entity ID has the form - {base_entity_id}/{co_name} + {base_entity_id}/{backend_name}/{co_name} :type context: The current context :type config: satosa.satosa_config.SATOSAConfig :type co_name: str + :type backend_name: str :rtype: satosa.satosa_config.SATOSAConfig :param context: :param config: satosa proxy config :param co_name: CO name + :param backend_name: Backend name :return: config with updated entity ID """ base_entity_id = config['entityid'] - co_entity_id = "{}/{}".format(base_entity_id, quote_plus(co_name)) - config['entityid'] = co_entity_id - context.decorate(self.KEY_CO_ENTITY_ID, co_entity_id) + # If not using template for entityId and does not has multiple backends, then for backward compatibility append co_name at end + if "" not in base_entity_id and not self.has_multiple_backends: + base_entity_id = "{}/{}".format(base_entity_id, "") + + replace = [ + ("", quote_plus(backend_name)), + ("", quote_plus(co_name)) + ] + for _replace in replace: + base_entity_id = base_entity_id.replace(_replace[0], _replace[1]) + + config['entityid'] = base_entity_id return config @@ -992,7 +1093,7 @@ def _co_names_from_config(self): return co_names - def _create_co_virtual_idp(self, context): + def _create_co_virtual_idp(self, context, co_name=None): """ Create a virtual IdP to represent the CO. @@ -1002,7 +1103,7 @@ def _create_co_virtual_idp(self, context): :param context: :return: An idp server """ - co_name = self._get_co_name(context) + co_name = co_name or self._get_co_name(context) context.decorate(self.KEY_CO_NAME, co_name) # Verify that we are configured for this CO. If the CO was not @@ -1022,15 +1123,15 @@ def _create_co_virtual_idp(self, context): # and the entityID for the CO virtual IdP. backend_name = context.target_backend idp_config = copy.deepcopy(self.idp_config) - idp_config = self._add_endpoints_to_config(idp_config, - co_name, - backend_name) - idp_config = self._add_entity_id(context, idp_config, co_name) + idp_config = self._add_endpoints_to_config( + idp_config, co_name, backend_name + ) + idp_config = self._add_entity_id(idp_config, co_name, backend_name) + context.decorate(self.KEY_CO_ENTITY_ID, idp_config['entityid']) # Use the overwritten IdP config to generate a pysaml2 config object # and from it a server object. - pysaml2_idp_config = IdPConfig().load(idp_config, - metadata_construction=False) + pysaml2_idp_config = IdPConfig().load(idp_config) server = Server(config=pysaml2_idp_config) @@ -1057,10 +1158,22 @@ def _register_endpoints(self, backend_names): :param backend_names: A list of backend names :return: A list of url and endpoint function pairs """ + + # Throw exception if there is possibility of duplicate entity ids when using co_names with multiple backends + self.has_multiple_backends = len(backend_names) > 1 + co_names = self._co_names_from_config() + all_entity_ids = [] + for backend_name in backend_names: + for co_name in co_names: + all_entity_ids.append(self._add_entity_id(copy.deepcopy(self.idp_config), co_name, backend_name)['entityid']) + + if len(all_entity_ids) != len(set(all_entity_ids)): + raise ValueError("Duplicate entities ids would be created for co-frontends, please make sure to make entity ids unique. " + "You can use and to achieve it. See example yaml file.") + # Create a regex pattern that will match any of the CO names. We # escape special characters like '+' and '.' that are valid # characters in an URL encoded string. - co_names = self._co_names_from_config() url_encoded_co_names = [re.escape(quote_plus(name)) for name in co_names] co_name_pattern = "|".join(url_encoded_co_names) @@ -1112,4 +1225,29 @@ def _register_endpoints(self, backend_names): logline = "Adding mapping {}".format(mapping) logger.debug(logline) + if self.expose_entityid_endpoint(): + for backend_name in backend_names: + for co_name in co_names: + idp_config = self._add_entity_id(copy.deepcopy(self.idp_config), co_name, backend_name) + entity_id = idp_config['entityid'] + logger.debug("Exposing frontend entity endpoint = {}".format(entity_id)) + parsed_entity_id = urlparse(entity_id) + metadata_endpoint = "^{0}".format(parsed_entity_id.path[1:]) + the_callable = functools.partial(self._metadata_endpoint, co_name=co_name) + url_to_callable_mappings.append((metadata_endpoint, the_callable)) + return url_to_callable_mappings + + def _metadata_endpoint(self, context, co_name): + """ + Endpoint for retrieving the virtual frontend metadata + :type context: satosa.context.Context + :rtype: satosa.response.Response + + :param context: The current context + :return: response with metadata + """ + # Using the context of the current request and saved state from the + # authentication request dynamically create an IdP instance. + self.idp = self._create_co_virtual_idp(context, co_name=co_name) + return super()._metadata_endpoint(context=context); diff --git a/src/satosa/internal.py b/src/satosa/internal.py index 2302a3da2..a96b19b1f 100644 --- a/src/satosa/internal.py +++ b/src/satosa/internal.py @@ -35,7 +35,7 @@ def __setattr__(self, key, value): def __getattr__(self, key): if key == "data": - return self.data + return super().data try: value = self.__getitem__(key) @@ -85,7 +85,13 @@ class AuthenticationInformation(_Datafy): """ def __init__( - self, auth_class_ref=None, timestamp=None, issuer=None, *args, **kwargs + self, + auth_class_ref=None, + timestamp=None, + issuer=None, + authority=None, + *args, + **kwargs, ): """ Initiate the data carrier @@ -102,6 +108,7 @@ def __init__( self.auth_class_ref = auth_class_ref self.timestamp = timestamp self.issuer = issuer + self.authority = authority class InternalData(_Datafy): @@ -109,13 +116,6 @@ class InternalData(_Datafy): A base class for the data carriers between frontends/backends """ - _DEPRECATED_TO_NEW_MEMBERS = { - "name_id": "subject_id", - "user_id": "subject_id", - "user_id_hash_type": "subject_type", - "approved_attributes": "attributes", - } - def __init__( self, auth_info=None, @@ -124,10 +124,6 @@ def __init__( subject_id=None, subject_type=None, attributes=None, - user_id=None, - user_id_hash_type=None, - name_id=None, - approved_attributes=None, *args, **kwargs, ): @@ -138,10 +134,6 @@ def __init__( :param subject_id: :param subject_type: :param attributes: - :param user_id: - :param user_id_hash_type: - :param name_id: - :param approved_attributes: :type auth_info: AuthenticationInformation :type requester: str @@ -149,10 +141,6 @@ def __init__( :type subject_id: str :type subject_type: str :type attributes: dict - :type user_id: str - :type user_id_hash_type: str - :type name_id: str - :type approved_attributes: dict """ super().__init__(self, *args, **kwargs) self.auth_info = ( @@ -166,26 +154,6 @@ def __init__( if requester_name is not None else [{"text": requester, "lang": "en"}] ) - self.subject_id = ( - subject_id - if subject_id is not None - else user_id - if user_id is not None - else name_id - if name_id is not None - else None - ) - self.subject_type = ( - subject_type - if subject_type is not None - else user_id_hash_type - if user_id_hash_type is not None - else None - ) - self.attributes = ( - attributes - if attributes is not None - else approved_attributes - if approved_attributes is not None - else {} - ) + self.subject_id = subject_id + self.subject_type = subject_type + self.attributes = attributes if attributes is not None else {} diff --git a/src/satosa/internal_data.py b/src/satosa/internal_data.py deleted file mode 100644 index 7e3a8e89e..000000000 --- a/src/satosa/internal_data.py +++ /dev/null @@ -1,14 +0,0 @@ -import warnings as _warnings - -from satosa.internal import InternalData -from satosa.internal import AuthenticationInformation -from satosa.deprecated import UserIdHashType -from satosa.deprecated import UserIdHasher -from satosa.deprecated import InternalRequest -from satosa.deprecated import InternalResponse - - -_warnings.warn( - "internal_data is deprecated; use satosa.internal instead.", - DeprecationWarning, -) diff --git a/src/satosa/metadata_creation/description.py b/src/satosa/metadata_creation/description.py index 26abdd555..4aa82fa31 100644 --- a/src/satosa/metadata_creation/description.py +++ b/src/satosa/metadata_creation/description.py @@ -52,6 +52,9 @@ def __init__(self): self._description = [] self._display_name = [] self._logos = [] + self._keywords = [] + self._information_url = [] + self._privacy_statement_url = [] def add_description(self, text, lang): """ @@ -96,6 +99,52 @@ def add_logo(self, text, width, height, lang=None): logo_entry["lang"] = lang self._logos.append(logo_entry) + def add_keywords(self, text, lang): + """ + Binds keywords to the given language + :type text: List + :type lang: str + + :param text: List of keywords + :param lang: language + """ + + if text: + self._keywords.append( + { + "text": [_keyword.replace(" ", "+") for _keyword in text], + "lang": lang if lang else "en", + } + ) + + def add_information_url(self, text, lang): + """ + Binds information_url to the given language + :type text: str + :type lang: str + + :param text: Information URL + :param lang: language + """ + + if text: + self._information_url.append({"text": text, "lang": lang if lang else "en"}) + + def add_privacy_statement_url(self, text, lang): + """ + Binds privacy_statement_url to the given language + :type text: str + :type lang: str + + :param text: Privacy statement URL + :param lang: language + """ + + if text: + self._privacy_statement_url.append( + {"text": text, "lang": lang if lang else "en"} + ) + def to_dict(self): """ Returns a dictionary representation of the UIInfoDesc object. @@ -110,6 +159,12 @@ def to_dict(self): ui_info["display_name"] = self._display_name if self._logos: ui_info["logo"] = self._logos + if self._keywords: + ui_info["keywords"] = self._keywords + if self._information_url: + ui_info["information_url"] = self._information_url + if self._privacy_statement_url: + ui_info["privacy_statement_url"] = self._privacy_statement_url return {"service": {"idp": {"ui_info": ui_info}}} if ui_info else {} @@ -227,9 +282,9 @@ def to_dict(self): if self._organization: description.update(self._organization.to_dict()) if self._contact_person: - description['contact_person'] = [] + description["contact_person"] = [] for person in self._contact_person: - description['contact_person'].append(person.to_dict()) + description["contact_person"].append(person.to_dict()) if self._ui_info: description.update(self._ui_info.to_dict()) return description diff --git a/src/satosa/metadata_creation/saml_metadata.py b/src/satosa/metadata_creation/saml_metadata.py index 1a9e1d730..f88bbaaec 100644 --- a/src/satosa/metadata_creation/saml_metadata.py +++ b/src/satosa/metadata_creation/saml_metadata.py @@ -17,7 +17,7 @@ def _create_entity_descriptor(entity_config): - cnf = Config().load(copy.deepcopy(entity_config), metadata_construction=True) + cnf = entity_config if isinstance(entity_config, Config) else Config().load(copy.deepcopy(entity_config)) return entity_descriptor(cnf) @@ -28,7 +28,7 @@ def _create_backend_metadata(backend_modules): if isinstance(plugin_module, SAMLBackend): logline = "Generating SAML backend '{}' metadata".format(plugin_module.name) logger.info(logline) - backend_metadata[plugin_module.name] = [_create_entity_descriptor(plugin_module.config["sp_config"])] + backend_metadata[plugin_module.name] = [_create_entity_descriptor(plugin_module.sp.config)] return backend_metadata @@ -80,7 +80,7 @@ def _create_frontend_metadata(frontend_modules, backend_modules): logger.info(logline) idp_config = copy.deepcopy(frontend.config["idp_config"]) idp_config = frontend._add_endpoints_to_config(idp_config, co_name, backend.name) - idp_config = frontend._add_entity_id(idp_config, co_name) + idp_config = frontend._add_entity_id(idp_config, co_name, backend.name) idp_config = frontend._overlay_for_saml_metadata(idp_config, co_name) entity_desc = _create_entity_descriptor(idp_config) frontend_metadata[frontend.name].append(entity_desc) @@ -154,3 +154,18 @@ def create_signed_entity_descriptor(entity_descriptor, security_context, valid_f raise ValueError("Could not construct valid EntityDescriptor tag") return xmldoc + + +def create_entity_descriptor_metadata(entity_descriptor, valid_for=None): + """ + :param entity_descriptor: the entity descriptor to create metadata for + :param valid_for: number of hours the metadata should be valid + :return: the EntityDescriptor metadata + + :type entity_descriptor: saml2.md.EntityDescriptor] + :type valid_for: Optional[int] + """ + if valid_for: + entity_descriptor.valid_until = in_a_while(hours=valid_for) + + return str(entity_descriptor) diff --git a/src/satosa/micro_services/attribute_authorization.py b/src/satosa/micro_services/attribute_authorization.py index 1bcaf8cda..60f4afe4b 100644 --- a/src/satosa/micro_services/attribute_authorization.py +++ b/src/satosa/micro_services/attribute_authorization.py @@ -5,59 +5,86 @@ from ..util import get_dict_defaults class AttributeAuthorization(ResponseMicroService): - """ -A microservice that performs simple regexp-based authorization based on response -attributes. The configuration assumes a dict with two keys: attributes_allow -and attributes_deny. An examples speaks volumes: + A microservice that performs simple regexp-based authorization based on response + attributes. There are two configuration options to match attribute values in order + to allow or deny authorization. + + The configuration is wrapped in two nested dicts that specialize the options per + requester (SP/RP) and issuer (IdP/OP). + + There are also two options to enforce presence of the attributes that are going to + be checked. -```yaml -config: - attribute_allow: - target_provider1: + Example configuration: + + ```yaml + config: + force_attributes_presence_on_allow: true + attribute_allow: + target_provider1: requester1: - attr1: - - "^foo:bar$" - - "^kaka$" + attr1: + - "^foo:bar$" + - "^kaka$" default: - attr1: - - "plupp@.+$" - "": + attr1: + - "plupp@.+$" + "": "": - attr2: - - "^knytte:.*$" - attribute_deny: - default: - default: - eppn: - - "^[^@]+$" + attr2: + - "^knytte:.*$" -``` + force_attributes_presence_on_deny: false + attribute_deny: + default: + default: + eppn: + - "^[^@]+$" + ``` -The use of "" and 'default' is synonymous. Attribute rules are not overloaded -or inherited. For instance a response from "provider2" would only be allowed -through if the eppn attribute had all values containing an '@' (something -perhaps best implemented via an allow rule in practice). Responses from -target_provider1 bound for requester1 would be allowed through only if attr1 -contained foo:bar or kaka. Note that attribute filters (the leaves of the -structure above) are ORed together - i.e any attribute match is sufficient. + The use of "" and "default" is synonymous. Attribute rules are not overloaded + or inherited. For instance a response from "provider2" would only be allowed + through if the eppn attribute had all values containing an '@' (something + perhaps best implemented via an allow rule in practice). Responses from + target_provider1 bound for requester1 would be allowed through only if attr1 + contained foo:bar or kaka. Note that attribute filters (the leaves of the + structure above) are ORed together - i.e any attribute match is sufficient. """ def __init__(self, config, *args, **kwargs): super().__init__(*args, **kwargs) self.attribute_allow = config.get("attribute_allow", {}) self.attribute_deny = config.get("attribute_deny", {}) + self.force_attributes_presence_on_allow = config.get("force_attributes_presence_on_allow", False) + self.force_attributes_presence_on_deny = config.get("force_attributes_presence_on_deny", False) def _check_authz(self, context, attributes, requester, provider): for attribute_name, attribute_filters in get_dict_defaults(self.attribute_allow, requester, provider).items(): - if attribute_name in attributes: - if not any([any(filter(re.compile(af).search, attributes[attribute_name])) for af in attribute_filters]): + attr_values = attributes.get(attribute_name) + if attr_values is not None: + if not any( + [ + any(filter(lambda x: re.search(af, x), attr_values)) + for af in attribute_filters + ] + ): raise SATOSAAuthenticationError(context.state, "Permission denied") + elif self.force_attributes_presence_on_allow: + raise SATOSAAuthenticationError(context.state, "Permission denied") for attribute_name, attribute_filters in get_dict_defaults(self.attribute_deny, requester, provider).items(): - if attribute_name in attributes: - if any([any(filter(re.compile(af).search, attributes[attribute_name])) for af in attribute_filters]): + attr_values = attributes.get(attribute_name) + if attr_values is not None: + if any( + [ + any(filter(lambda x: re.search(af, x), attributes[attribute_name])) + for af in attribute_filters + ] + ): raise SATOSAAuthenticationError(context.state, "Permission denied") + elif self.force_attributes_presence_on_deny: + raise SATOSAAuthenticationError(context.state, "Permission denied") def process(self, context, data): self._check_authz(context, data.attributes, data.requester, data.auth_info.issuer) diff --git a/src/satosa/micro_services/attribute_generation.py b/src/satosa/micro_services/attribute_generation.py index 485491554..907a8462d 100644 --- a/src/satosa/micro_services/attribute_generation.py +++ b/src/satosa/micro_services/attribute_generation.py @@ -1,54 +1,55 @@ import re -import pystache +from chevron import render as render_mustache from .base import ResponseMicroService from ..util import get_dict_defaults + class MustachAttrValue(object): def __init__(self, attr_name, values): - self._attr_name = attr_name - self._values = values - if any(['@' in v for v in values]): - local_parts = [] - domain_parts = [] - scopes = dict() - for v in values: - (local_part, sep, domain_part) = v.partition('@') - # probably not needed now... - local_parts.append(local_part) - domain_parts.append(domain_part) - scopes[domain_part] = True - self._scopes = list(scopes.keys()) - else: - self._scopes = None + self._attr_name = attr_name + self._values = values + if any(['@' in v for v in values]): + local_parts = [] + domain_parts = [] + scopes = dict() + for v in values: + (local_part, sep, domain_part) = v.partition('@') + # probably not needed now... + local_parts.append(local_part) + domain_parts.append(domain_part) + scopes[domain_part] = True + self._scopes = list(scopes.keys()) + else: + self._scopes = None def __str__(self): return ";".join(self._values) @property def values(self): - [{self._attr_name: v} for v in self._values] - - @property + return [{self._attr_name: v} for v in self._values] + + @property def value(self): if len(self._values) == 1: - return self._values[0] + return self._values[0] else: - return self._values + return self._values @property def first(self): if len(self._values) > 0: - return self._values[0] + return self._values[0] else: - return "" + return "" @property def scope(self): if self._scopes is not None: - return self._scopes[0] + return self._scopes[0] return "" - + class AddSyntheticAttributes(ResponseMicroService): """ @@ -124,13 +125,25 @@ def __init__(self, config, *args, **kwargs): def _synthesize(self, attributes, requester, provider): syn_attributes = dict() context = dict() - - for attr_name,values in attributes.items(): - context[attr_name] = MustachAttrValue(attr_name, values) + + for attr_name, values in attributes.items(): + context[attr_name] = MustachAttrValue( + attr_name, + values + if values + and isinstance(values, list) + and all(isinstance(value, str) for value in values) + else [], + ) recipes = get_dict_defaults(self.synthetic_attributes, requester, provider) for attr_name, fmt in recipes.items(): - syn_attributes[attr_name] = [v.strip().strip(';') for v in re.split("[;\n]+", pystache.render(fmt, context))] + syn_attributes[attr_name] = [ + value + for token in re.split("[;\n]+", render_mustache(fmt, context)) + for value in [token.strip().strip(';')] + if value + ] return syn_attributes def process(self, context, data): diff --git a/src/satosa/micro_services/attribute_modifications.py b/src/satosa/micro_services/attribute_modifications.py index 67633af27..bb00761b4 100644 --- a/src/satosa/micro_services/attribute_modifications.py +++ b/src/satosa/micro_services/attribute_modifications.py @@ -1,7 +1,11 @@ import re +import logging from .base import ResponseMicroService +from ..context import Context +from ..exception import SATOSAError +logger = logging.getLogger(__name__) class AddStaticAttributes(ResponseMicroService): """ @@ -29,28 +33,62 @@ def __init__(self, config, *args, **kwargs): def process(self, context, data): # apply default filters provider_filters = self.attribute_filters.get("", {}) - self._apply_requester_filters(data.attributes, provider_filters, data.requester) + target_provider = data.auth_info.issuer + self._apply_requester_filters(data.attributes, provider_filters, data.requester, context, target_provider) # apply target provider specific filters - target_provider = data.auth_info.issuer provider_filters = self.attribute_filters.get(target_provider, {}) - self._apply_requester_filters(data.attributes, provider_filters, data.requester) + self._apply_requester_filters(data.attributes, provider_filters, data.requester, context, target_provider) return super().process(context, data) - def _apply_requester_filters(self, attributes, provider_filters, requester): + def _apply_requester_filters(self, attributes, provider_filters, requester, context, target_provider): # apply default requester filters default_requester_filters = provider_filters.get("", {}) - self._apply_filter(attributes, default_requester_filters) + self._apply_filters(attributes, default_requester_filters, context, target_provider) # apply requester specific filters requester_filters = provider_filters.get(requester, {}) - self._apply_filter(attributes, requester_filters) - - def _apply_filter(self, attributes, attribute_filters): - for attribute_name, attribute_filter in attribute_filters.items(): - regex = re.compile(attribute_filter) - if attribute_name == "": # default filter for all attributes - for attribute, values in attributes.items(): - attributes[attribute] = list(filter(regex.search, attributes[attribute])) - elif attribute_name in attributes: - attributes[attribute_name] = list(filter(regex.search, attributes[attribute_name])) + self._apply_filters(attributes, requester_filters, context, target_provider) + + def _apply_filters(self, attributes, attribute_filters, context, target_provider): + for attribute_name, attribute_filters in attribute_filters.items(): + if type(attribute_filters) == str: + # convert simple notation to filter list + attribute_filters = {'regexp': attribute_filters} + + for filter_type, filter_value in attribute_filters.items(): + + if filter_type == "regexp": + filter_func = re.compile(filter_value).search + elif filter_type == "shibmdscope_match_scope": + mdstore = context.get_decoration(Context.KEY_METADATA_STORE) + md_scopes = list(mdstore.shibmd_scopes(target_provider,"idpsso_descriptor")) if mdstore else [] + filter_func = lambda v: self._shibmdscope_match_scope(v, md_scopes) + elif filter_type == "shibmdscope_match_value": + mdstore = context.get_decoration(Context.KEY_METADATA_STORE) + md_scopes = list(mdstore.shibmd_scopes(target_provider,"idpsso_descriptor")) if mdstore else [] + filter_func = lambda v: self._shibmdscope_match_value(v, md_scopes) + else: + raise SATOSAError("Unknown filter type") + + if attribute_name == "": # default filter for all attributes + for attribute, values in attributes.items(): + attributes[attribute] = list(filter(filter_func, attributes[attribute])) + elif attribute_name in attributes: + attributes[attribute_name] = list(filter(filter_func, attributes[attribute_name])) + + def _shibmdscope_match_value(self, value, md_scopes): + for md_scope in md_scopes: + if not md_scope['regexp'] and md_scope['text'] == value: + return True + elif md_scope['regexp'] and re.fullmatch(md_scope['text'], value): + return True + return False + + def _shibmdscope_match_scope(self, value, md_scopes): + split_value = value.split('@') + if len(split_value) != 2: + logger.info(f"Discarding invalid scoped value {value}") + return False + value_scope = split_value[1] + return self._shibmdscope_match_value(value_scope, md_scopes) diff --git a/src/satosa/micro_services/attribute_policy.py b/src/satosa/micro_services/attribute_policy.py new file mode 100644 index 000000000..81151d0e4 --- /dev/null +++ b/src/satosa/micro_services/attribute_policy.py @@ -0,0 +1,35 @@ +import logging + +import satosa.logging_util as lu + +from .base import ResponseMicroService + +logger = logging.getLogger(__name__) + + +class AttributePolicy(ResponseMicroService): + """ + Module to filter Attributes by a given Policy. + """ + + def __init__(self, config, *args, **kwargs): + super().__init__(*args, **kwargs) + self.attribute_policy = config["attribute_policy"] + + def process(self, context, data): + state = context.state + session_id = lu.get_session_id(state) + + msg = "Incoming data.attributes {}".format(data.attributes) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) + + policy = self.attribute_policy.get(data.requester, {}) + if "allowed" in policy: + for key in (data.attributes.keys() - set(policy["allowed"])): + del data.attributes[key] + + msg = "Returning data.attributes {}".format(data.attributes) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) + return super().process(context, data) diff --git a/src/satosa/micro_services/attribute_processor.py b/src/satosa/micro_services/attribute_processor.py index 1973402b2..7232e484e 100644 --- a/src/satosa/micro_services/attribute_processor.py +++ b/src/satosa/micro_services/attribute_processor.py @@ -1,5 +1,4 @@ import importlib -import json import logging from satosa.exception import SATOSAError diff --git a/src/satosa/micro_services/consent.py b/src/satosa/micro_services/consent.py index 968b28327..a469e2189 100644 --- a/src/satosa/micro_services/consent.py +++ b/src/satosa/micro_services/consent.py @@ -66,7 +66,7 @@ def _handle_consent_response(self, context): except ConnectionError as e: msg = "Consent service is not reachable, no consent given." logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline) + logger.error(logline, exc_info=e) # Send an internal_response without any attributes consent_attributes = None @@ -91,7 +91,8 @@ def _approve_new_consent(self, context, internal_response, id_hash): "attr": internal_response.attributes, "id": id_hash, "redirect_endpoint": "%s/consent%s" % (self.base_url, self.endpoint), - "requester_name": context.state[STATE_KEY]["requester_name"] + "requester": internal_response.requester, + "requester_name": internal_response.requester_name, } if self.locked_attr: consent_args["locked_attrs"] = [self.locked_attr] @@ -122,11 +123,12 @@ def process(self, context, internal_response): :param internal_response: the response :return: response """ - consent_state = context.state[STATE_KEY] - - internal_response.attributes = self._filter_attributes(internal_response.attributes, consent_state["filter"]) - id_hash = self._get_consent_id(internal_response.requester, internal_response.subject_id, - internal_response.attributes) + context.state[STATE_KEY] = context.state.get(STATE_KEY, {}) + id_hash = self._get_consent_id( + internal_response.requester, + internal_response.subject_id, + internal_response.attributes, + ) try: # Check if consent is already given @@ -134,7 +136,7 @@ def process(self, context, internal_response): except requests.exceptions.ConnectionError as e: msg = "Consent service is not reachable, no consent is given." logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.error(logline) + logger.error(logline, exc_info=e) # Send an internal_response without any attributes internal_response.attributes = {} return self._end_consent(context, internal_response) @@ -225,7 +227,7 @@ def _end_consent(self, context, internal_response): :param internal_response: the response :return: response """ - del context.state[STATE_KEY] + context.state.pop(STATE_KEY, None) return super().process(context, internal_response) def register_endpoints(self): diff --git a/src/satosa/micro_services/custom_logging.py b/src/satosa/micro_services/custom_logging.py index c82d03449..14d435d8f 100644 --- a/src/satosa/micro_services/custom_logging.py +++ b/src/satosa/micro_services/custom_logging.py @@ -39,7 +39,7 @@ def process(self, context, data): try: spEntityID = context.state.state_dict['SATOSA_BASE']['requester'] idpEntityID = data.auth_info.issuer - except KeyError as err: + except KeyError: msg = "{} Unable to determine the entityID's for the IdP or SP".format(logprefix) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) @@ -71,8 +71,6 @@ def process(self, context, data): logger.error(logline) return super().process(context, data) - record = None - try: msg = "{} Using context {}".format(logprefix, context) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) diff --git a/src/satosa/micro_services/custom_routing.py b/src/satosa/micro_services/custom_routing.py index d903502be..5706ce9aa 100644 --- a/src/satosa/micro_services/custom_routing.py +++ b/src/satosa/micro_services/custom_routing.py @@ -2,6 +2,8 @@ from base64 import urlsafe_b64encode from satosa.context import Context +from satosa.internal import InternalData + from .base import RequestMicroService from ..exception import SATOSAConfigurationError from ..exception import SATOSAError @@ -10,6 +12,52 @@ logger = logging.getLogger(__name__) +class CustomRoutingError(SATOSAError): + """SATOSA exception raised by CustomRouting rules""" + pass + + +class DecideBackendByTargetIssuer(RequestMicroService): + """ + Select target backend based on the target issuer. + """ + + def __init__(self, config:dict, *args, **kwargs): + """ + Constructor. + + :param config: microservice configuration loaded from yaml file + :type config: Dict[str, Dict[str, str]] + """ + super().__init__(*args, **kwargs) + + self.target_mapping = config['target_mapping'] + self.default_backend = config['default_backend'] + + def process(self, context:Context, data:InternalData): + """Set context.target_backend based on the target issuer""" + + target_issuer = context.get_decoration(Context.KEY_TARGET_ENTITYID) + if not target_issuer: + logger.info('skipping backend decision because no target_issuer was found') + return super().process(context, data) + + target_backend = ( + self.target_mapping.get(target_issuer) + or self.default_backend + ) + + report = { + 'msg': 'decided target backend by target issuer', + 'target_issuer': target_issuer, + 'target_backend': target_backend, + } + logger.info(report) + + context.target_backend = target_backend + return super().process(context, data) + + class DecideBackendByRequester(RequestMicroService): """ Select which backend should be used based on who the requester is. @@ -19,11 +67,13 @@ def __init__(self, config, *args, **kwargs): """ Constructor. :param config: mapping from requester identifier to - backend module name under the key 'requester_mapping' + backend module name under the key 'requester_mapping'. + May also include default backend under key 'default_backend'. :type config: Dict[str, Dict[str, str]] """ super().__init__(*args, **kwargs) self.requester_mapping = config['requester_mapping'] + self.default_backend = config.get('default_backend') def process(self, context, data): """ @@ -31,7 +81,7 @@ def process(self, context, data): :param context: request context :param data: the internal request """ - context.target_backend = self.requester_mapping[data.requester] + context.target_backend = self.requester_mapping.get(data.requester) or self.default_backend return super().process(context, data) diff --git a/src/satosa/micro_services/disco.py b/src/satosa/micro_services/disco.py new file mode 100644 index 000000000..274f18780 --- /dev/null +++ b/src/satosa/micro_services/disco.py @@ -0,0 +1,58 @@ +from satosa.context import Context +from satosa.internal import InternalData + +from .base import RequestMicroService +from ..exception import SATOSAError + + +class DiscoToTargetIssuerError(SATOSAError): + """SATOSA exception raised by CustomRouting rules""" + + +class DiscoToTargetIssuer(RequestMicroService): + def __init__(self, config:dict, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.disco_endpoints = config['disco_endpoints'] + if not isinstance(self.disco_endpoints, list) or not self.disco_endpoints: + raise DiscoToTargetIssuerError('disco_endpoints must be a list of str') + + def process(self, context:Context, data:InternalData): + context.state[self.name] = { + 'target_frontend': context.target_frontend, + 'internal_data': data.to_dict(), + } + return super().process(context, data) + + def register_endpoints(self): + """ + URL mapping of additional endpoints this micro service needs to register for callbacks. + + Example of a mapping from the url path '/callback' to the callback() method of a micro service: + reg_endp = [ + ('^/callback1$', self.callback), + ] + + :rtype List[Tuple[str, Callable[[satosa.context.Context, Any], satosa.response.Response]]] + + :return: A list with functions and args bound to a specific endpoint url, + [(regexp, Callable[[satosa.context.Context], satosa.response.Response]), ...] + """ + + return [ + (path , self._handle_disco_response) + for path in self.disco_endpoints + ] + + def _handle_disco_response(self, context:Context): + target_issuer = context.request.get('entityID') + if not target_issuer: + raise DiscoToTargetIssuerError('no valid entity_id in the disco response') + + target_frontend = context.state.get(self.name, {}).get('target_frontend') + data_serialized = context.state.get(self.name, {}).get('internal_data', {}) + data = InternalData.from_dict(data_serialized) + + context.target_frontend = target_frontend + context.decorate(Context.KEY_TARGET_ENTITYID, target_issuer) + return super().process(context, data) diff --git a/src/satosa/micro_services/idp_hinting.py b/src/satosa/micro_services/idp_hinting.py new file mode 100644 index 000000000..90569d706 --- /dev/null +++ b/src/satosa/micro_services/idp_hinting.py @@ -0,0 +1,60 @@ +import logging + +from .base import RequestMicroService +from ..exception import SATOSAConfigurationError +from ..exception import SATOSAError + + +logger = logging.getLogger(__name__) + + +class IdpHintingError(SATOSAError): + """ + SATOSA exception raised by IdpHinting microservice + """ + pass + + +class IdpHinting(RequestMicroService): + """ + Detect if an idp hinting feature have been requested + """ + + def __init__(self, config, *args, **kwargs): + """ + Constructor. + :param config: microservice configuration + :type config: Dict[str, Dict[str, str]] + """ + super().__init__(*args, **kwargs) + try: + self.idp_hint_param_names = config['allowed_params'] + except KeyError: + raise SATOSAConfigurationError( + f"{self.__class__.__name__} can't find allowed_params" + ) + + def process(self, context, data): + """ + This intercepts if idp_hint paramenter is in use + :param context: request context + :param data: the internal request + """ + target_entity_id = context.get_decoration(context.KEY_TARGET_ENTITYID) + qs_params = context.qs_params + + issuer_is_already_selected = bool(target_entity_id) + query_string_is_missing = not qs_params + if issuer_is_already_selected or query_string_is_missing: + return super().process(context, data) + + hints = ( + entity_id + for param_name in self.idp_hint_param_names + for qs_param_name, entity_id in qs_params.items() + if param_name == qs_param_name + ) + hint = next(hints, None) + + context.decorate(context.KEY_TARGET_ENTITYID, hint) + return super().process(context, data) diff --git a/src/satosa/micro_services/ldap_attribute_store.py b/src/satosa/micro_services/ldap_attribute_store.py index 333254648..a6f78a2ff 100644 --- a/src/satosa/micro_services/ldap_attribute_store.py +++ b/src/satosa/micro_services/ldap_attribute_store.py @@ -7,15 +7,19 @@ import copy import logging +import random +import string import urllib import ldap3 from ldap3.core.exceptions import LDAPException +import satosa.logging_util as lu from satosa.exception import SATOSAError -from satosa.logging_util import satosa_logging from satosa.micro_services.base import ResponseMicroService from satosa.response import Redirect +from satosa.frontends.saml2 import SAMLVirtualCoFrontend +from satosa.routing import STATE_KEY as ROUTING_STATE_KEY logger = logging.getLogger(__name__) @@ -42,6 +46,7 @@ class LdapAttributeStore(ResponseMicroService): "clear_input_attributes": False, "ignore": False, "ldap_identifier_attribute": None, + "search_filter": None, "ldap_url": None, "ldap_to_internal_map": None, "on_ldap_search_result_empty": None, @@ -57,6 +62,7 @@ class LdapAttributeStore(ResponseMicroService): "client_strategy": "REUSABLE", "pool_size": 10, "pool_keepalive": 10, + "pool_lifetime": None, } def __init__(self, config, *args, **kwargs): @@ -64,7 +70,7 @@ def __init__(self, config, *args, **kwargs): if "default" in config and "" in config: msg = """Use either 'default' or "" in config but not both""" - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) if "" in config: @@ -72,21 +78,27 @@ def __init__(self, config, *args, **kwargs): if "default" not in config: msg = "No default configuration is present" - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) self.config = {} + # Get provider attribute + self.provider_attribute = None + if "global" in config: + if "provider_attribute" in config["global"]: + self.provider_attribute = config["global"]["provider_attribute"] + # Process the default configuration first then any per-SP overrides. sp_list = ["default"] - sp_list.extend([key for key in config.keys() if key != "default"]) + sp_list.extend([key for key in config.keys() if key != "default" and key != "global"]) connections = {} for sp in sp_list: if not isinstance(config[sp], dict): msg = "Configuration value for {} must be a dictionary" - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) # Initialize configuration using module defaults then update @@ -109,14 +121,14 @@ def __init__(self, config, *args, **kwargs): if connection_params in connections: sp_config["connection"] = connections[connection_params] msg = "Reusing LDAP connection for SP {}".format(sp) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) else: try: connection = self._ldap_connection_factory(sp_config) connections[connection_params] = connection sp_config["connection"] = connection msg = "Created new LDAP connection for SP {}".format(sp) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) except LdapAttributeStoreError: # It is acceptable to not have a default LDAP connection # but all SP overrides must have a connection, either @@ -124,13 +136,13 @@ def __init__(self, config, *args, **kwargs): if sp != "default": msg = "No LDAP connection can be initialized for SP {}" msg = msg.format(sp) - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) self.config[sp] = sp_config msg = "LDAP Attribute Store microservice initialized" - satosa_logging(logger, logging.INFO, msg, None) + logger.info(msg) def _construct_filter_value( self, candidate, name_id_value, name_id_format, issuer, attributes @@ -174,7 +186,7 @@ def _construct_filter_value( for attr_value in [attributes.get(identifier_name)] ] msg = "Found candidate values {}".format(values) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) # If one of the configured identifier names is name_id then if there is # also a configured name_id_format add the value for the NameID of that @@ -188,7 +200,7 @@ def _construct_filter_value( and candidate_name_id_format == name_id_format ): msg = "IdP asserted NameID {}".format(name_id_value) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) candidate_nameid_value = name_id_value # Only add the NameID value asserted by the IdP if it is not @@ -199,18 +211,18 @@ def _construct_filter_value( if candidate_nameid_value not in values: msg = "Added NameID {} to candidate values" msg = msg.format(candidate_nameid_value) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) values.append(candidate_nameid_value) else: msg = "NameID {} value also asserted as attribute value" msg = msg.format(candidate_nameid_value) - satosa_logging(logger, logging.WARN, msg, None) + logger.warning(msg) # If no value was asserted by the IdP for one of the configured list of # identifier names for this candidate then go onto the next candidate. if None in values: msg = "Candidate is missing value so skipping" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) return None # All values for the configured list of attribute names are present @@ -223,14 +235,14 @@ def _construct_filter_value( else candidate["add_scope"] ) msg = "Added scope {} to values".format(scope) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) values.append(scope) # Concatenate all values to create the filter value. value = "".join(values) msg = "Constructed filter value {}".format(value) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) return value @@ -281,13 +293,13 @@ def _ldap_connection_factory(self, config): server = ldap3.Server(**args) msg = "Creating a new LDAP connection" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) msg = "Using LDAP URL {}".format(ldap_url) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) msg = "Using bind DN {}".format(bind_dn) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) auto_bind_string = config["auto_bind"] auto_bind_map = { @@ -303,11 +315,17 @@ def _ldap_connection_factory(self, config): pool_size = config["pool_size"] pool_keepalive = config["pool_keepalive"] + pool_lifetime = config["pool_lifetime"] + pool_name = ''.join(random.sample(string.ascii_lowercase, 6)) + if client_strategy == ldap3.REUSABLE: msg = "Using pool size {}".format(pool_size) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) msg = "Using pool keep alive {}".format(pool_keepalive) - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) + if pool_lifetime: + msg = "Using pool lifetime {}".format(pool_lifetime) + logger.debug(msg) try: connection = ldap3.Connection( @@ -318,20 +336,22 @@ def _ldap_connection_factory(self, config): client_strategy=client_strategy, read_only=read_only, version=version, + pool_name=pool_name, pool_size=pool_size, pool_keepalive=pool_keepalive, + pool_lifetime=pool_lifetime, ) msg = "Successfully connected to LDAP server" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) except LDAPException as e: msg = "Caught exception when connecting to LDAP server: {}" msg = msg.format(e) - satosa_logging(logger, logging.ERROR, msg, None) + logger.error(msg) raise LdapAttributeStoreError(msg) msg = "Successfully connected to LDAP server" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) return connection @@ -343,7 +363,7 @@ def _populate_attributes(self, config, record): ldap_attributes = record.get("attributes", None) if not ldap_attributes: msg = "No attributes returned with LDAP record" - satosa_logging(logger, logging.DEBUG, msg, None) + logger.debug(msg) return ldap_to_internal_map = ( @@ -358,7 +378,8 @@ def _populate_attributes(self, config, record): for attr, values in ldap_attributes.items(): internal_attr = ldap_to_internal_map.get(attr, None) if not internal_attr and ";" in attr: - internal_attr = ldap_to_internal_map.get(attr.split(";")[0], None) + internal_attr = ldap_to_internal_map.get(attr.split(";")[0], + None) if internal_attr and values: attributes[internal_attr] = ( @@ -367,8 +388,8 @@ def _populate_attributes(self, config, record): else [values] ) msg = "Recording internal attribute {} with values {}" - msg = msg.format(internal_attr, attributes[internal_attr]) - satosa_logging(logger, logging.DEBUG, msg, None) + logline = msg.format(internal_attr, attributes[internal_attr]) + logger.debug(logline) return attributes @@ -393,21 +414,45 @@ def process(self, context, data): Default interface for microservices. Process the input data for the input context. """ - issuer = data.auth_info.issuer + state = context.state + session_id = lu.get_session_id(state) + requester = data.requester - config = self.config.get(requester) or self.config["default"] + issuer = data.auth_info.issuer + + frontend_name = state.get(ROUTING_STATE_KEY) + co_entity_id_key = SAMLVirtualCoFrontend.KEY_CO_ENTITY_ID + co_entity_id = state.get(frontend_name, {}).get(co_entity_id_key) + + entity_ids = [requester, issuer, co_entity_id, "default"] + if self.provider_attribute: + try: + entity_ids.insert( + 0, + data.attributes[self.provider_attribute][0] + ) + except (KeyError, IndexError): + pass + + config, entity_id = next((self.config.get(e), e) + for e in entity_ids if self.config.get(e)) + msg = { "message": "entityID for the involved entities", "requester": requester, "issuer": issuer, "config": self._filter_config(config), } - satosa_logging(logger, logging.DEBUG, msg, context.state) + if co_entity_id: + msg["co_entity_id"] = co_entity_id + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) - # Ignore this SP entirely if so configured. + # Ignore this entityID entirely if so configured. if config["ignore"]: - msg = "Ignoring SP {}".format(requester) - satosa_logging(logger, logging.INFO, msg, context.state) + msg = "Ignoring entityID {}".format(entity_id) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.info(logline) return super().process(context, data) # The list of values for the LDAP search filters that will be tried in @@ -431,7 +476,8 @@ def process(self, context, data): if filter_value ] msg = {"message": "Search filters", "filter_values": filter_values} - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) # Initialize an empty LDAP record. The first LDAP record found using # the ordered # list of search filter values will be the record used. @@ -439,15 +485,26 @@ def process(self, context, data): results = None exp_msg = None + connection = config["connection"] + msg = { + "message": "LDAP server host", + "server host": connection.server.host, + } + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) + for filter_val in filter_values: - connection = config["connection"] - ldap_ident_attr = config["ldap_identifier_attribute"] - search_filter = "({0}={1})".format(ldap_ident_attr, filter_val) + if config["search_filter"]: + search_filter = config["search_filter"].format(filter_val) + else: + ldap_ident_attr = config["ldap_identifier_attribute"] + search_filter = "({0}={1})".format(ldap_ident_attr, filter_val) msg = { "message": "LDAP query with constructed search filter", "search filter": search_filter, } - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) attributes = ( config["query_return_attributes"] @@ -468,13 +525,15 @@ def process(self, context, data): exp_msg = "Caught unhandled exception: {}".format(err) if exp_msg: - satosa_logging(logger, logging.ERROR, exp_msg, context.state) + logline = lu.LOG_FMT.format(id=session_id, message=exp_msg) + logger.error(logline) return super().process(context, data) if not results: msg = "Querying LDAP server: No results for {}." msg = msg.format(filter_val) - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) continue if isinstance(results, bool): @@ -483,18 +542,21 @@ def process(self, context, data): responses = connection.get_response(results)[0] msg = "Done querying LDAP server" - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) msg = "LDAP server returned {} records".format(len(responses)) - satosa_logging(logger, logging.INFO, msg, context.state) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.info(logline) # For now consider only the first record found (if any). if len(responses) > 0: - if len(responses) > 1: + if len(responses) > 1 and not config.get("use_all_results", False): msg = "LDAP server returned {} records using search filter" msg = msg + " value {}" msg = msg.format(len(responses), filter_val) - satosa_logging(logger, logging.WARN, msg, context.state) - record = responses[0] + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.warning(logline) + responses = responses[0:1] break # Before using a found record, if any, to populate attributes @@ -502,73 +564,82 @@ def process(self, context, data): if config["clear_input_attributes"]: msg = "Clearing values for these input attributes: {}" msg = msg.format(data.attributes) - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) data.attributes = {} - # This adapts records with different search and connection strategy - # (sync without pool), it should be tested with anonimous bind with - # message_id. - if isinstance(results, bool) and record: - record = { - "dn": record.entry_dn if hasattr(record, "entry_dn") else "", - "attributes": ( - record.entry_attributes_as_dict - if hasattr(record, "entry_attributes_as_dict") - else {} - ), - } - - # Use a found record, if any, to populate attributes and input for - # NameID - if record: - msg = { - "message": "Using record with DN and attributes", - "DN": record["dn"], - "attributes": record["attributes"], - } - satosa_logging(logger, logging.DEBUG, msg, context.state) - - # Populate attributes as configured. - new_attrs = self._populate_attributes(config, record) - - overwrite = config["overwrite_existing_attributes"] - for attr, values in new_attrs.items(): - if not overwrite: - values = list(set(data.attributes.get(attr, []) + values)) - data.attributes[attr] = values - - # Populate input for NameID if configured. SATOSA core does the - # hashing of input to create a persistent NameID. - user_ids = self._populate_input_for_name_id(config, record, data) - if user_ids: - data.subject_id = "".join(user_ids) - msg = "NameID value is {}".format(data.subject_id) - satosa_logging(logger, logging.DEBUG, msg, None) - - # Add the record to the context so that later microservices - # may use it if required. - context.decorate(KEY_FOUND_LDAP_RECORD, record) - msg = "Added record {} to context".format(record) - satosa_logging(logger, logging.DEBUG, msg, context.state) - else: - msg = "No record found in LDAP so no attributes will be added" - satosa_logging(logger, logging.WARN, msg, context.state) - on_ldap_search_result_empty = config["on_ldap_search_result_empty"] - if on_ldap_search_result_empty: - # Redirect to the configured URL with - # the entityIDs for the target SP and IdP used by the user - # as query string parameters (URL encoded). - encoded_sp_entity_id = urllib.parse.quote_plus(requester) - encoded_idp_entity_id = urllib.parse.quote_plus(issuer) - url = "{}?sp={}&idp={}".format( - on_ldap_search_result_empty, - encoded_sp_entity_id, - encoded_idp_entity_id, - ) - msg = "Redirecting to {}".format(url) - satosa_logging(logger, logging.INFO, msg, context.state) - return Redirect(url) + for record in responses: + # This adapts records with different search and connection strategy + # (sync without pool), it should be tested with anonimous bind with + # message_id. + if isinstance(results, bool) and record: + record = { + "dn": record.entry_dn if hasattr(record, "entry_dn") else "", + "attributes": ( + record.entry_attributes_as_dict + if hasattr(record, "entry_attributes_as_dict") + else {} + ), + } + + # Use a found record, if any, to populate attributes and input for + # NameID + if record: + msg = { + "message": "Using record with DN and attributes", + "DN": record["dn"], + "attributes": record["attributes"], + } + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) + + # Populate attributes as configured. + new_attrs = self._populate_attributes(config, record) + + overwrite = config["overwrite_existing_attributes"] + for attr, values in new_attrs.items(): + if not overwrite: + values = list(map(str, set(data.attributes.get(attr, []) + values))) + else: + values = list(map(str, set(values))) + data.attributes[attr] = values + + # Populate input for NameID if configured. SATOSA core does the + # hashing of input to create a persistent NameID. + user_ids = self._populate_input_for_name_id(config, record, data) + if user_ids: + data.subject_id = "".join(user_ids) + msg = "NameID value is {}".format(data.subject_id) + logger.debug(msg) + + # Add the record to the context so that later microservices + # may use it if required. + context.decorate(KEY_FOUND_LDAP_RECORD, record) + msg = "Added record {} to context".format(record) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) + else: + msg = "No record found in LDAP so no attributes will be added" + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.warning(logline) + on_ldap_search_result_empty = config["on_ldap_search_result_empty"] + if on_ldap_search_result_empty: + # Redirect to the configured URL with + # the entityIDs for the target SP and IdP used by the user + # as query string parameters (URL encoded). + encoded_sp_entity_id = urllib.parse.quote_plus(requester) + encoded_idp_entity_id = urllib.parse.quote_plus(issuer) + url = "{}?sp={}&idp={}".format( + on_ldap_search_result_empty, + encoded_sp_entity_id, + encoded_idp_entity_id, + ) + msg = "Redirecting to {}".format(url) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.info(logline) + return Redirect(url) msg = "Returning data.attributes {}".format(data.attributes) - satosa_logging(logger, logging.DEBUG, msg, context.state) + logline = lu.LOG_FMT.format(id=session_id, message=msg) + logger.debug(logline) return super().process(context, data) diff --git a/src/satosa/micro_services/primary_identifier.py b/src/satosa/micro_services/primary_identifier.py index 8b41b65c5..1df2479eb 100644 --- a/src/satosa/micro_services/primary_identifier.py +++ b/src/satosa/micro_services/primary_identifier.py @@ -5,7 +5,6 @@ the value for a configured attribute, for example uid. """ -import copy import logging import urllib.parse @@ -54,7 +53,7 @@ def constructPrimaryIdentifier(self, data, ordered_identifier_candidates): # Get the values asserted by the IdP for the configured list of attribute names for this candidate # and substitute None if the IdP did not assert any value for a configured attribute. - values = [ attributes.get(attribute_name, [None])[0] for attribute_name in candidate['attribute_names'] ] + values = [ attributes.get(attribute_name, [None])[0] for attribute_name in candidate['attribute_names'] if attribute_name != 'name_id' ] msg = "{} Found candidate values {}".format(logprefix, values) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) @@ -63,7 +62,6 @@ def constructPrimaryIdentifier(self, data, ordered_identifier_candidates): # name_id_format add the value for the NameID of that format if it was asserted by the IdP # or else add the value None. if 'name_id' in candidate['attribute_names']: - candidate_nameid_value = None candidate_nameid_value = None candidate_name_id_format = candidate.get('name_id_format') name_id_value = data.subject_id @@ -139,7 +137,7 @@ def process(self, context, data): # Find the entityID for the SP that initiated the flow try: spEntityID = context.state.state_dict['SATOSA_BASE']['requester'] - except KeyError as err: + except KeyError: msg = "{} Unable to determine the entityID for the SP requester".format(logprefix) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) @@ -152,7 +150,7 @@ def process(self, context, data): # Find the entityID for the IdP that issued the assertion try: idpEntityID = data.auth_info.issuer - except KeyError as err: + except KeyError: msg = "{} Unable to determine the entityID for the IdP issuer".format(logprefix) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.error(logline) @@ -191,6 +189,12 @@ def process(self, context, data): clear_input_attributes = self.config['clear_input_attributes'] else: clear_input_attributes = False + if 'replace_subject_id' in config: + replace_subject_id = config['replace_subject_id'] + elif 'replace_subject_id' in self.config: + replace_subject_id = self.config['replace_subject_id'] + else: + replace_subject_id = False if 'ignore' in config: ignore = True else: @@ -244,19 +248,29 @@ def process(self, context, data): # Clear input attributes if so configured. if clear_input_attributes: msg = "{} Clearing values for these input attributes: {}".format( - logprefix, data.attribute_names + logprefix, data.attributes.keys() ) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) logger.debug(logline) data.attributes = {} - # Set the primary identifier attribute to the value found. - data.attributes[primary_identifier] = primary_identifier_val - msg = "{} Setting attribute {} to value {}".format( - logprefix, primary_identifier, primary_identifier_val - ) - logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) - logger.debug(logline) + if primary_identifier: + # Set the primary identifier attribute to the value found. + data.attributes[primary_identifier] = primary_identifier_val + msg = "{} Setting attribute {} to value {}".format( + logprefix, primary_identifier, primary_identifier_val + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + + # Replace subject_id with the constructed primary identifier if so configured. + if replace_subject_id: + msg = "{} Setting subject_id to value {}".format( + logprefix, primary_identifier_val + ) + logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) + logger.debug(logline) + data.subject_id = primary_identifier_val msg = "{} returning data.attributes {}".format(logprefix, str(data.attributes)) logline = lu.LOG_FMT.format(id=lu.get_session_id(context.state), message=msg) diff --git a/src/satosa/micro_services/processors/base_processor.py b/src/satosa/micro_services/processors/base_processor.py index ad5eb10b5..b29b7f294 100644 --- a/src/satosa/micro_services/processors/base_processor.py +++ b/src/satosa/micro_services/processors/base_processor.py @@ -2,5 +2,5 @@ class BaseProcessor(object): def __init__(self): pass - def process(internal_data, attribute, **kwargs): + def process(self, internal_data, attribute, **kwargs): pass diff --git a/src/satosa/micro_services/processors/regex_sub_processor.py b/src/satosa/micro_services/processors/regex_sub_processor.py new file mode 100644 index 000000000..85b95b50a --- /dev/null +++ b/src/satosa/micro_services/processors/regex_sub_processor.py @@ -0,0 +1,43 @@ +from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning +from .base_processor import BaseProcessor +import re +import logging + +CONFIG_KEY_MATCH_PATTERN = 'regex_sub_match_pattern' +CONFIG_KEY_REPLACE_PATTERN = 'regex_sub_replace_pattern' +logger = logging.getLogger(__name__) +class RegexSubProcessor(BaseProcessor): + """ + Performs a regex sub against an attribute value. + Example configuration: + module: satosa.micro_services.attribute_processor.AttributeProcessor + name: AttributeProcessor + config: + process: + - attribute: role + processors: + - name: RegexSubProcessor + module: satosa.micro_services.custom.processors.regex_sub_processor + regex_sub_match_pattern: (?<=saml-provider\/)(.*)(?=,) + regex_sub_replace_pattern: \1-Test + + """ + + def process(self, internal_data, attribute, **kwargs): + regex_sub_match_pattern = r'{}'.format(kwargs.get(CONFIG_KEY_MATCH_PATTERN, '')) + if regex_sub_match_pattern == '': + raise AttributeProcessorError("The regex_sub_match_pattern needs to be set") + + regex_sub_replace_pattern = r'{}'.format(kwargs.get(CONFIG_KEY_REPLACE_PATTERN, '')) + if regex_sub_replace_pattern == '': + raise AttributeProcessorError("The regex_sub_replace_pattern needs to be set") + attributes = internal_data.attributes + + values = attributes.get(attribute, []) + new_values = [] + if not values: + raise AttributeProcessorWarning("Cannot apply regex_sub to {}, it has no values".format(attribute)) + for value in values: + new_values.append(re.sub(r'{}'.format(regex_sub_match_pattern), r'{}'.format(regex_sub_replace_pattern), value)) + logger.debug('regex_sub new_values: {}'.format(new_values)) + attributes[attribute] = new_values \ No newline at end of file diff --git a/src/satosa/micro_services/processors/scope_extractor_processor.py b/src/satosa/micro_services/processors/scope_extractor_processor.py index 48e8bda6c..863bc7740 100644 --- a/src/satosa/micro_services/processors/scope_extractor_processor.py +++ b/src/satosa/micro_services/processors/scope_extractor_processor.py @@ -31,6 +31,8 @@ def process(self, internal_data, attribute, **kwargs): values = attributes.get(attribute, []) if not values: raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it has no values".format(attribute)) + if not isinstance(values, list): + values = [values] if not any('@' in val for val in values): raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it's values are not scoped".format(attribute)) for value in values: diff --git a/src/satosa/micro_services/processors/scope_remover_processor.py b/src/satosa/micro_services/processors/scope_remover_processor.py index b6e61b7ed..82073b5b8 100644 --- a/src/satosa/micro_services/processors/scope_remover_processor.py +++ b/src/satosa/micro_services/processors/scope_remover_processor.py @@ -1,4 +1,4 @@ -from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning +from ..attribute_processor import AttributeProcessorWarning from .base_processor import BaseProcessor class ScopeRemoverProcessor(BaseProcessor): diff --git a/src/satosa/plugin_loader.py b/src/satosa/plugin_loader.py index 65c535de2..b7eb4cf46 100644 --- a/src/satosa/plugin_loader.py +++ b/src/satosa/plugin_loader.py @@ -7,8 +7,8 @@ from contextlib import contextmanager from pydoc import locate -import yaml -from yaml.error import YAMLError +from satosa.yaml import load as yaml_load +from satosa.yaml import YAMLError from .backends.base import BackendModule from .exception import SATOSAConfigurationError @@ -143,7 +143,7 @@ def _response_micro_service_filter(cls): def _load_plugin_config(config): try: - return yaml.safe_load(config) + return yaml_load(config) except YAMLError as exc: if hasattr(exc, 'problem_mark'): mark = exc.problem_mark diff --git a/src/satosa/proxy_server.py b/src/satosa/proxy_server.py index c1c12d2cc..e23be1418 100644 --- a/src/satosa/proxy_server.py +++ b/src/satosa/proxy_server.py @@ -1,32 +1,37 @@ -import io import json import logging import logging.config -import sys -from urllib.parse import parse_qsl - -import pkg_resources +from io import BytesIO +from urllib.parse import parse_qsl as _parse_query_string from cookies_samesite_compat import CookiesSameSiteCompatMiddleware + +import satosa + from .base import SATOSABase from .context import Context -from .response import ServiceError, NotFound -from .routing import SATOSANoBoundEndpointError -from saml2.s_utils import UnknownSystemEntity +from .response import ServiceError +from .response import NotFound + logger = logging.getLogger(__name__) +def parse_query_string(data): + query_param_pairs = _parse_query_string(data) + query_param_dict = dict(query_param_pairs) + if "resource" in query_param_dict: + query_param_dict["resource"] = [t[1] for t in query_param_pairs if t[0] == "resource"] + return query_param_dict + + def unpack_get(environ): """ Unpacks a redirect request query string. :param environ: whiskey application environment. :return: A dictionary with parameters. """ - if "QUERY_STRING" in environ: - return dict(parse_qsl(environ["QUERY_STRING"])) - - return None + return parse_query_string(environ.get("QUERY_STRING")) def unpack_post(environ, content_length): @@ -38,7 +43,7 @@ def unpack_post(environ, content_length): post_body = environ['wsgi.input'].read(content_length).decode("utf-8") data = None if "application/x-www-form-urlencoded" in environ["CONTENT_TYPE"]: - data = dict(parse_qsl(post_body)) + data = parse_query_string(post_body) elif "application/json" in environ["CONTENT_TYPE"]: data = json.loads(post_body) @@ -64,6 +69,27 @@ def unpack_request(environ, content_length=0): return data +def collect_server_headers(environ): + headers = { + header_name: header_value + for header_name, header_value in environ.items() + if header_name.startswith("SERVER_") + } + return headers + + +def collect_http_headers(environ): + headers = { + header_name: header_value + for header_name, header_value in environ.items() + if ( + header_name.startswith("HTTP_") + or header_name.startswith("REMOTE_") + ) + } + return headers + + class ToBytesMiddleware(object): """Converts a message to bytes to be sent by WSGI server.""" @@ -102,56 +128,71 @@ def __call__(self, environ, start_response, debug=False): context.path = path # copy wsgi.input stream to allow it to be re-read later by satosa plugins - # see: http://stackoverflow.com/ - # questions/1783383/how-do-i-copy-wsgi-input-if-i-want-to-process-post-data-more-than-once + # see: http://stackoverflow.com/questions/1783383/how-do-i-copy-wsgi-input-if-i-want-to-process-post-data-more-than-once content_length = int(environ.get('CONTENT_LENGTH', '0') or '0') - body = io.BytesIO(environ['wsgi.input'].read(content_length)) + body = BytesIO(environ['wsgi.input'].read(content_length)) environ['wsgi.input'] = body + context.request = unpack_request(environ, content_length) + context.request_uri = environ.get("REQUEST_URI") + context.request_method = environ.get("REQUEST_METHOD") + context.qs_params = parse_query_string(environ.get("QUERY_STRING")) + context.server = collect_server_headers(environ) + context.http_headers = collect_http_headers(environ) + context.cookie = context.http_headers.get("HTTP_COOKIE", "") + context.request_authorization = context.http_headers.get("HTTP_AUTHORIZATION", "") + environ['wsgi.input'].seek(0) - context.cookie = environ.get("HTTP_COOKIE", "") - context.request_authorization = environ.get("HTTP_AUTHORIZATION", "") + logline = { + "message": "Proxy server received request", + "request_method": context.request_method, + "request_uri": context.request_uri, + "content_length": content_length, + "request_data": context.request, + "query_params": context.qs_params, + "http_headers": context.http_headers, + "server_headers": context.server, + } + logger.debug(logline) try: resp = self.run(context) if isinstance(resp, Exception): raise resp return resp(environ, start_response) - except SATOSANoBoundEndpointError: - resp = NotFound("The Service or Identity Provider you requested could not be found.") - return resp(environ, start_response) - except Exception as err: - if type(err) != UnknownSystemEntity: - logline = "{}".format(err) - logger.exception(logline) + except Exception as e: + logger.exception(str(e)) if debug: raise - resp = ServiceError("%s" % err) + resp = ServiceError("%s" % e) return resp(environ, start_response) def make_app(satosa_config): try: - if "LOGGING" in satosa_config: - logging.config.dictConfig(satosa_config["LOGGING"]) - else: - stderr_handler = logging.StreamHandler(sys.stderr) - stderr_handler.setLevel(logging.DEBUG) - - root_logger = logging.getLogger("") - root_logger.addHandler(stderr_handler) - root_logger.setLevel(logging.DEBUG) - - try: - _ = pkg_resources.get_distribution(module.__name__) - logline = "Running SATOSA version {}".format( - pkg_resources.get.get_distribution("SATOSA").version - ) - logger.info(logline) - except (NameError, pkg_resources.DistributionNotFound): - pass + default_logging_config = { + "version": 1, + "formatters": { + "simple": { + "format": "[%(asctime)s] [%(levelname)s] [%(name)s.%(funcName)s] %(message)s" + } + }, + "handlers": { + "stdout": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + "level": "DEBUG", + "formatter": "simple", + } + }, + "loggers": {"satosa": {"level": "DEBUG"}}, + "root": {"level": "DEBUG", "handlers": ["stdout"]}, + } + logging.config.dictConfig(satosa_config.get("LOGGING", default_logging_config)) + + logger.info("Running SATOSA version {v}".format(v=satosa.__version__)) res1 = WsgiApplication(satosa_config) res2 = CookiesSameSiteCompatMiddleware(res1, satosa_config) diff --git a/src/satosa/routing.py b/src/satosa/routing.py index 317b047f9..015cffb23 100644 --- a/src/satosa/routing.py +++ b/src/satosa/routing.py @@ -4,8 +4,8 @@ import logging import re -from satosa.context import SATOSABadContextError -from satosa.exception import SATOSAError +from satosa.exception import SATOSABadContextError +from satosa.exception import SATOSANoBoundEndpointError import satosa.logging_util as lu @@ -15,20 +15,6 @@ STATE_KEY = "ROUTER" -class SATOSANoBoundEndpointError(SATOSAError): - """ - Raised when a given url path is not bound to any endpoint function - """ - pass - - -class SATOSAUnknownTargetBackend(SATOSAError): - """ - Raised when targeting an unknown backend - """ - pass - - class ModuleRouter(object): class UnknownEndpoint(ValueError): pass diff --git a/src/satosa/satosa_config.py b/src/satosa/satosa_config.py index d3b414520..d45280c41 100644 --- a/src/satosa/satosa_config.py +++ b/src/satosa/satosa_config.py @@ -3,10 +3,12 @@ """ import logging import os +import os.path -import yaml +from satosa.exception import SATOSAConfigurationError +from satosa.yaml import load as yaml_load +from satosa.yaml import YAMLError -from .exception import SATOSAConfigurationError logger = logging.getLogger(__name__) @@ -38,7 +40,7 @@ def __init__(self, config): # Load sensitive config from environment variables for key in SATOSAConfig.sensitive_dict_keys: - val = os.environ.get("SATOSA_{key}".format(key=key)) + val = os.environ.get(f"SATOSA_{key}") if val: self._config[key] = val @@ -54,7 +56,7 @@ def __init__(self, config): plugin_configs.append(plugin_config) break else: - raise SATOSAConfigurationError('Failed to load plugin config \'{}\''.format(config)) + raise SATOSAConfigurationError(f"Failed to load plugin config '{config}'") self._config[key] = plugin_configs for parser in parsers: @@ -84,8 +86,8 @@ def _verify_dict(self, conf): raise SATOSAConfigurationError("Missing key '%s' in config" % key) for key in SATOSAConfig.sensitive_dict_keys: - if key not in conf and "SATOSA_{key}".format(key=key) not in os.environ: - raise SATOSAConfigurationError("Missing key '%s' from config and ENVIRONMENT" % key) + if key not in conf and f"SATOSA_{key}" not in os.environ: + raise SATOSAConfigurationError(f"Missing key '{key}' from config and ENVIRONMENT") def __getitem__(self, item): """ @@ -143,10 +145,11 @@ def _load_yaml(self, config_file): :param config_file: config to load. Can be file path or yaml string :return: Loaded config """ + try: with open(os.path.abspath(config_file)) as f: - return yaml.safe_load(f.read()) - except yaml.YAMLError as exc: + return yaml_load(f.read()) + except YAMLError as exc: logger.error("Could not parse config as YAML: {}".format(exc)) if hasattr(exc, 'problem_mark'): mark = exc.problem_mark diff --git a/src/satosa/scripts/satosa_saml_metadata.py b/src/satosa/scripts/satosa_saml_metadata.py index 20e4ae4f9..c0638d8b7 100644 --- a/src/satosa/scripts/satosa_saml_metadata.py +++ b/src/satosa/scripts/satosa_saml_metadata.py @@ -5,6 +5,7 @@ from saml2.sigver import security_context from ..metadata_creation.saml_metadata import create_entity_descriptors +from ..metadata_creation.saml_metadata import create_entity_descriptor_metadata from ..metadata_creation.saml_metadata import create_signed_entity_descriptor from ..satosa_config import SATOSAConfig @@ -16,44 +17,58 @@ def _get_security_context(key, cert): return security_context(conf) -def _create_split_entity_descriptors(entities, secc, valid): +def _create_split_entity_descriptors(entities, secc, valid, sign=True): output = [] for module_name, eds in entities.items(): for i, ed in enumerate(eds): - output.append((create_signed_entity_descriptor(ed, secc, valid), "{}_{}.xml".format(module_name, i))) + ed_str = ( + create_signed_entity_descriptor(ed, secc, valid) + if sign + else create_entity_descriptor_metadata(ed, valid) + ) + output.append((ed_str, "{}_{}.xml".format(module_name, i))) return output -def _create_merged_entities_descriptors(entities, secc, valid, name): +def _create_merged_entities_descriptors(entities, secc, valid, name, sign=True): output = [] frontend_entity_descriptors = [e for sublist in entities.values() for e in sublist] for frontend in frontend_entity_descriptors: - output.append((create_signed_entity_descriptor(frontend, secc, valid), name)) + ed_str = ( + create_signed_entity_descriptor(frontend, secc, valid) + if sign + else create_entity_descriptor_metadata(frontend, valid) + ) + output.append((ed_str, name)) return output def create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend_metadata=False, - split_backend_metadata=False): + split_backend_metadata=False, sign=True): """ Generates SAML metadata for the given PROXY_CONF, signed with the given KEY and associated CERT. """ satosa_config = SATOSAConfig(proxy_conf) - secc = _get_security_context(key, cert) + + if sign and (not key or not cert): + raise ValueError("Key and cert are required when signing") + secc = _get_security_context(key, cert) if sign else None + frontend_entities, backend_entities = create_entity_descriptors(satosa_config) output = [] if frontend_entities: if split_frontend_metadata: - output.extend(_create_split_entity_descriptors(frontend_entities, secc, valid)) + output.extend(_create_split_entity_descriptors(frontend_entities, secc, valid, sign)) else: - output.extend(_create_merged_entities_descriptors(frontend_entities, secc, valid, "frontend.xml")) + output.extend(_create_merged_entities_descriptors(frontend_entities, secc, valid, "frontend.xml", sign)) if backend_entities: if split_backend_metadata: - output.extend(_create_split_entity_descriptors(backend_entities, secc, valid)) + output.extend(_create_split_entity_descriptors(backend_entities, secc, valid, sign)) else: - output.extend(_create_merged_entities_descriptors(backend_entities, secc, valid, "backend.xml")) + output.extend(_create_merged_entities_descriptors(backend_entities, secc, valid, "backend.xml", sign)) for metadata, filename in output: path = os.path.join(dir, filename) @@ -64,8 +79,8 @@ def create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_fron @click.command() @click.argument("proxy_conf") -@click.argument("key") -@click.argument("cert") +@click.argument("key", required=False) +@click.argument("cert", required=False) @click.option("--dir", type=click.Path(exists=True, file_okay=False, dir_okay=True, writable=True, readable=False, resolve_path=False), @@ -75,5 +90,7 @@ def create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_fron help="Create one entity descriptor per file for the frontend metadata") @click.option("--split-backend", is_flag=True, type=click.BOOL, default=False, help="Create one entity descriptor per file for the backend metadata") -def construct_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend, split_backend): - create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend, split_backend) +@click.option("--sign/--no-sign", is_flag=True, type=click.BOOL, default=True, + help="Sign the generated metadata") +def construct_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend, split_backend, sign): + create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend, split_backend, sign) diff --git a/src/satosa/state.py b/src/satosa/state.py index d81a7773a..1fc768425 100644 --- a/src/satosa/state.py +++ b/src/satosa/state.py @@ -11,8 +11,7 @@ from satosa.cookies import SimpleCookie from uuid import uuid4 -from lzma import LZMACompressor -from lzma import LZMADecompressor +from lzma import LZMACompressor, LZMADecompressor from Cryptodome import Random from Cryptodome.Cipher import AES @@ -23,38 +22,152 @@ logger = logging.getLogger(__name__) -# TODO MOVE TO CONFIG -STATE_COOKIE_MAX_AGE = 1200 - _SESSION_ID_KEY = "SESSION_ID" -def state_to_cookie(state, name, path, encryption_key): +class State(UserDict): + """ + This class holds a state attribute object. A state object must be able to be converted to + a json string, otherwise will an exception be raised. """ - Saves a state to a cookie - :type state: satosa.state.State - :type name: str - :type path: str - :type encryption_key: str - :rtype: satosa.cookies.SimpleCookie + def __init__(self, urlstate_data=None, encryption_key=None): + """ + If urlstate is empty a new empty state instance will be returned. - :param state: The state to save - :param name: Name identifier of the cookie - :param path: Endpoint path the cookie will be associated to - :param encryption_key: Key to encrypt the state information - :return: A cookie - """ + If urlstate is not empty the constructor will rebuild the state attribute objects + from the urlstate string. + :type urlstate_data: str + :type encryption_key: str + :rtype: State + + :param encryption_key: The key to be used for encryption. + :param urlstate_data: A string created by the method urlstate in this class. + :return: An instance of this class. + """ + self.delete = False + + urlstate_data = {} if urlstate_data is None else urlstate_data + if urlstate_data and not encryption_key: + raise ValueError("If an 'urlstate_data' is supplied 'encrypt_key' must be specified.") + + if urlstate_data: + try: + urlstate_data_bytes = urlstate_data.encode("utf-8") + urlstate_data_b64decoded = base64.urlsafe_b64decode(urlstate_data_bytes) + lzma = LZMADecompressor() + urlstate_data_decompressed = lzma.decompress(urlstate_data_b64decoded) + urlstate_data_decrypted = _AESCipher(encryption_key).decrypt( + urlstate_data_decompressed + ) + lzma = LZMADecompressor() + urlstate_data_decrypted_decompressed = lzma.decompress(urlstate_data_decrypted) + urlstate_data_obj = json.loads(urlstate_data_decrypted_decompressed) + except Exception as e: + error_context = { + "message": "Failed to load state data. Reinitializing empty state.", + "reason": str(e), + "urlstate_data": urlstate_data, + } + logger.warning(error_context) + urlstate_data = {} + else: + urlstate_data = urlstate_data_obj + + session_id = ( + urlstate_data[_SESSION_ID_KEY] + if urlstate_data and _SESSION_ID_KEY in urlstate_data + else uuid4().urn + ) + urlstate_data[_SESSION_ID_KEY] = session_id + + super().__init__(urlstate_data) + + @property + def session_id(self): + return self.data.get(_SESSION_ID_KEY) + + def urlstate(self, encryption_key): + """ + Will return a url safe representation of the state. + + :type encryption_key: Key used for encryption. + :rtype: str + + :return: Url representation av of the state. + """ + lzma = LZMACompressor() + urlstate_data = json.dumps(self.data) + urlstate_data = lzma.compress(urlstate_data.encode("UTF-8")) + urlstate_data += lzma.flush() + urlstate_data = _AESCipher(encryption_key).encrypt(urlstate_data) + lzma = LZMACompressor() + urlstate_data = lzma.compress(urlstate_data) + urlstate_data += lzma.flush() + urlstate_data = base64.urlsafe_b64encode(urlstate_data) + return urlstate_data.decode("utf-8") - cookie_data = "" if state.delete else state.urlstate(encryption_key) - max_age = 0 if state.delete else STATE_COOKIE_MAX_AGE + def copy(self): + """ + Returns a deepcopy of the state + + :rtype: satosa.state.State + + :return: A copy of the state + """ + state_copy = State() + state_copy.data = copy.deepcopy(self.data) + return state_copy + + @property + def state_dict(self): + """ + :rtype: dict[str, any] + :return: A copy of the state as dictionary. + """ + return copy.deepcopy(self.data) + + +def state_to_cookie( + state: State, + *, + name: str, + path: str, + encryption_key: str, + secure: bool = None, + httponly: bool = None, + samesite: str = None, + max_age: str = None, +) -> SimpleCookie: + """ + Saves a state to a cookie + :param state: the data to save + :param name: identifier of the cookie + :param path: path the cookie will be associated to + :param encryption_key: the key to use to encrypt the state information + :param secure: whether to include the cookie only when the request is transmitted + over a secure channel + :param httponly: whether the cookie should only be accessed only by the server + :param samesite: whether the cookie should only be sent with requests + initiated from the same registrable domain + :param max_age: indicates the maximum lifetime of the cookie, + represented as the number of seconds until the cookie expires + :return: A cookie object + """ cookie = SimpleCookie() - cookie[name] = cookie_data - cookie[name]["samesite"] = "None" - cookie[name]["secure"] = True + cookie[name] = "" if state.delete else state.urlstate(encryption_key) cookie[name]["path"] = path - cookie[name]["max-age"] = max_age + cookie[name]["secure"] = secure if secure is not None else True + cookie[name]["httponly"] = httponly if httponly is not None else "" + cookie[name]["samesite"] = samesite if samesite is not None else "None" + cookie[name]["max-age"] = ( + 0 + if state.delete + else max_age + if max_age is not None + else "" + ) msg = "Saved state in cookie {name} with properties {props}".format( name=name, props=list(cookie[name].items()) @@ -65,7 +178,7 @@ def state_to_cookie(state, name, path, encryption_key): return cookie -def cookie_to_state(cookie_str, name, encryption_key): +def cookie_to_state(cookie_str: str, name: str, encryption_key: str) -> State: """ Loads a state from a cookie @@ -83,8 +196,7 @@ def cookie_to_state(cookie_str, name, encryption_key): cookie = SimpleCookie(cookie_str) state = State(cookie[name].value, encryption_key) except KeyError as e: - msg_tmpl = 'No cookie named {name} in {data}' - msg = msg_tmpl.format(name=name, data=cookie_str) + msg = f'No cookie named {name} in {cookie_str}' raise SATOSAStateError(msg) from e except ValueError as e: msg_tmpl = 'Failed to process {name} from {data}' @@ -161,94 +273,3 @@ def _unpad(b): :rtype: bytes """ return b[:-ord(b[len(b) - 1:])] - - -class State(UserDict): - """ - This class holds a state attribute object. A state object must be able to be converted to - a json string, otherwise will an exception be raised. - """ - - def __init__(self, urlstate_data=None, encryption_key=None): - """ - If urlstate is empty a new empty state instance will be returned. - - If urlstate is not empty the constructor will rebuild the state attribute objects - from the urlstate string. - :type urlstate_data: str - :type encryption_key: str - :rtype: State - - :param encryption_key: The key to be used for encryption. - :param urlstate_data: A string created by the method urlstate in this class. - :return: An instance of this class. - """ - self.delete = False - - urlstate_data = {} if urlstate_data is None else urlstate_data - if urlstate_data and not encryption_key: - raise ValueError("If an 'urlstate_data' is supplied 'encrypt_key' must be specified.") - - if urlstate_data: - urlstate_data = urlstate_data.encode("utf-8") - urlstate_data = base64.urlsafe_b64decode(urlstate_data) - lzma = LZMADecompressor() - urlstate_data = lzma.decompress(urlstate_data) - urlstate_data = _AESCipher(encryption_key).decrypt(urlstate_data) - lzma = LZMADecompressor() - urlstate_data = lzma.decompress(urlstate_data) - urlstate_data = urlstate_data.decode("UTF-8") - urlstate_data = json.loads(urlstate_data) - - session_id = ( - urlstate_data[_SESSION_ID_KEY] - if urlstate_data and _SESSION_ID_KEY in urlstate_data - else uuid4().urn - ) - urlstate_data[_SESSION_ID_KEY] = session_id - - super().__init__(urlstate_data) - - @property - def session_id(self): - return self.data.get(_SESSION_ID_KEY) - - def urlstate(self, encryption_key): - """ - Will return a url safe representation of the state. - - :type encryption_key: Key used for encryption. - :rtype: str - - :return: Url representation av of the state. - """ - lzma = LZMACompressor() - urlstate_data = json.dumps(self.data) - urlstate_data = lzma.compress(urlstate_data.encode("UTF-8")) - urlstate_data += lzma.flush() - urlstate_data = _AESCipher(encryption_key).encrypt(urlstate_data) - lzma = LZMACompressor() - urlstate_data = lzma.compress(urlstate_data) - urlstate_data += lzma.flush() - urlstate_data = base64.urlsafe_b64encode(urlstate_data) - return urlstate_data.decode("utf-8") - - def copy(self): - """ - Returns a deepcopy of the state - - :rtype: satosa.state.State - - :return: A copy of the state - """ - state_copy = State() - state_copy.data = copy.deepcopy(self.data) - return state_copy - - @property - def state_dict(self): - """ - :rtype: dict[str, any] - :return: A copy of the state as dictionary. - """ - return copy.deepcopy(self.data) diff --git a/src/satosa/version.py b/src/satosa/version.py new file mode 100644 index 000000000..cac85faf0 --- /dev/null +++ b/src/satosa/version.py @@ -0,0 +1,12 @@ +try: + from importlib.metadata import version as _resolve_package_version +except ImportError: + from importlib_metadata import version as _resolve_package_version # type: ignore[no-redef] + + +def _parse_version(): + value = _resolve_package_version("satosa") + return value + + +version = _parse_version() diff --git a/src/satosa/wsgi.py b/src/satosa/wsgi.py index e5e9e1948..86220eb06 100644 --- a/src/satosa/wsgi.py +++ b/src/satosa/wsgi.py @@ -1,9 +1,7 @@ import argparse -import functools import os import sys -from werkzeug.debug import DebuggedApplication from werkzeug.serving import run_simple from satosa.proxy_server import make_app @@ -17,33 +15,25 @@ def main(): global app - parser = argparse.ArgumentParser(description='Process some integers.') - parser.add_argument('port', type=int) - parser.add_argument('--keyfile', type=str) - parser.add_argument('--certfile', type=str) - parser.add_argument('--host', type=str) - parser.add_argument('-d', action='store_true', dest="debug", - help="enable debug mode.") + parser = argparse.ArgumentParser(description="Process some integers.") + parser.add_argument("port", type=int) + parser.add_argument("--keyfile", type=str) + parser.add_argument("--certfile", type=str) + parser.add_argument("--host", type=str) args = parser.parse_args() if (args.keyfile and not args.certfile) or (args.certfile and not args.keyfile): print("Both keyfile and certfile must be specified for HTTPS.") - sys.exit() + sys.exit(1) - if args.debug: - app.app = functools.partial(app.app, debug=True) - app = DebuggedApplication(app) + ssl_context = ( + (args.certfile, args.keyfile) + if args.keyfile and args.certfile + else None + ) + host = args.host or "localhost" + run_simple(host, args.port, app, ssl_context=ssl_context) - if (args.keyfile and args.certfile): - ssl_context = (args.certfile, args.keyfile) - else: - ssl_context = None - if args.host: - run_simple(args.host, args.port, app, ssl_context=ssl_context) - else: - run_simple('localhost', args.port, app, ssl_context=ssl_context) - - -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/satosa/yaml.py b/src/satosa/yaml.py new file mode 100644 index 000000000..d45b12116 --- /dev/null +++ b/src/satosa/yaml.py @@ -0,0 +1,63 @@ +import os + +from yaml import SafeLoader as _safe_loader +from yaml import YAMLError +from yaml import safe_load as load # noqa: F401 + + +def _constructor_env_variables(loader, node): + """ + Extracts the environment variable from the node's value. + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: value of the environment variable + """ + raw_value = loader.construct_scalar(node) + new_value = os.environ.get(raw_value) + if new_value is None: + msg = "Cannot construct value from {node}: {value}".format( + node=node, value=new_value + ) + raise YAMLError(msg) + return new_value + + +def _constructor_envfile_variables(loader, node): + """ + Extracts the environment variable from the node's value. + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: value read from file pointed to by environment variable + """ + raw_value = loader.construct_scalar(node) + filepath = os.environ.get(raw_value) + try: + with open(filepath, "r") as fd: + new_value = fd.read() + except (TypeError, IOError) as e: + msg = "Cannot construct value from {node}: {path}".format( + node=node, path=filepath + ) + raise YAMLError(msg) from e + else: + return new_value + + +def _constructor_tuple_variables(loader, node): + """ + Extracts the tuple variable from the node's value. + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: value of the tuple + """ + return tuple(loader.construct_sequence(node)) + + +TAG_ENV = "!ENV" +TAG_ENVFILE = "!ENVFILE" +TAG_TUPLE = "tag:yaml.org,2002:python/tuple" + + +_safe_loader.add_constructor(TAG_ENV, _constructor_env_variables) +_safe_loader.add_constructor(TAG_ENVFILE, _constructor_envfile_variables) +_safe_loader.add_constructor(TAG_TUPLE, _constructor_tuple_variables) diff --git a/tests/conftest.py b/tests/conftest.py index ef09cd753..f0602a028 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -130,8 +130,6 @@ def satosa_config_dict(backend_plugin_config, frontend_plugin_config, request_mi config = { "BASE": BASE_URL, "COOKIE_STATE_NAME": "TEST_STATE", - "BACKEND_MODULES": ["foo"], - "FRONTEND_MODULES": ["bar"], "INTERNAL_ATTRIBUTES": {"attributes": {}}, "STATE_ENCRYPTION_KEY": "state_encryption_key", "CUSTOM_PLUGIN_MODULE_PATHS": [os.path.dirname(__file__)], @@ -190,12 +188,6 @@ def saml_frontend_config(cert_and_key, sp_conf): "config": { "idp_config": { "entityid": "frontend-entity_id", - "organization": {"display_name": "Test Identities", "name": "Test Identities Org.", - "url": "http://www.example.com"}, - "contact_person": [{"contact_type": "technical", "email_address": "technical@example.com", - "given_name": "Technical"}, - {"contact_type": "support", "email_address": "support@example.com", - "given_name": "Support"}], "service": { "idp": { "endpoints": { @@ -361,89 +353,3 @@ def consent_module_config(signing_key_path): } } return consent_config - - -import atexit -import random -import shutil -import subprocess -import tempfile -import time - -import pymongo -import pytest - - -class MongoTemporaryInstance(object): - """Singleton to manage a temporary MongoDB instance - - Use this for testing purpose only. The instance is automatically destroyed - at the end of the program. - - """ - _instance = None - - @classmethod - def get_instance(cls): - if cls._instance is None: - cls._instance = cls() - atexit.register(cls._instance.shutdown) - return cls._instance - - def __init__(self): - self._tmpdir = tempfile.mkdtemp() - self._port = 27017 - self._process = subprocess.Popen(['mongod', '--bind_ip', 'localhost', - '--port', str(self._port), - '--dbpath', self._tmpdir, - '--nojournal', '--nohttpinterface', - '--noauth', '--smallfiles', - '--syncdelay', '0', - '--nssize', '1', ], - stdout=open('/tmp/mongo-temp.log', 'wb'), - stderr=subprocess.STDOUT) - - # XXX: wait for the instance to be ready - # Mongo is ready in a glance, we just wait to be able to open a - # Connection. - for i in range(10): - time.sleep(0.2) - try: - self._conn = pymongo.MongoClient('localhost', self._port) - except pymongo.errors.ConnectionFailure: - continue - else: - break - else: - self.shutdown() - assert False, 'Cannot connect to the mongodb test instance' - - @property - def conn(self): - return self._conn - - @property - def port(self): - return self._port - - def shutdown(self): - if self._process: - self._process.terminate() - self._process.wait() - self._process = None - shutil.rmtree(self._tmpdir, ignore_errors=True) - - def get_uri(self): - """ - Convenience function to get a mongodb URI to the temporary database. - - :return: URI - """ - return 'mongodb://localhost:{port!s}'.format(port=self.port) - - -@pytest.yield_fixture -def mongodb_instance(): - tmp_db = MongoTemporaryInstance() - yield tmp_db - tmp_db.shutdown() diff --git a/tests/flows/test_account_linking.py b/tests/flows/test_account_linking.py index 80a87a874..94f53a431 100644 --- a/tests/flows/test_account_linking.py +++ b/tests/flows/test_account_linking.py @@ -1,6 +1,6 @@ import responses from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.proxy_server import make_app from satosa.satosa_config import SATOSAConfig @@ -15,7 +15,7 @@ def test_full_flow(self, satosa_config_dict, account_linking_module_config): satosa_config_dict["MICRO_SERVICES"].insert(0, account_linking_module_config) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # incoming auth req http_resp = test_client.get("/{}/{}/request".format(satosa_config_dict["BACKEND_MODULES"][0]["name"], diff --git a/tests/flows/test_consent.py b/tests/flows/test_consent.py index d2da94350..76dff496b 100644 --- a/tests/flows/test_consent.py +++ b/tests/flows/test_consent.py @@ -3,7 +3,7 @@ import responses from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.proxy_server import make_app from satosa.satosa_config import SATOSAConfig @@ -18,7 +18,7 @@ def test_full_flow(self, satosa_config_dict, consent_module_config): satosa_config_dict["MICRO_SERVICES"].append(consent_module_config) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # incoming auth req http_resp = test_client.get("/{}/{}/request".format(satosa_config_dict["BACKEND_MODULES"][0]["name"], diff --git a/tests/flows/test_oidc-saml.py b/tests/flows/test_oidc-saml.py index 2d51c9dd6..2a299bfef 100644 --- a/tests/flows/test_oidc-saml.py +++ b/tests/flows/test_oidc-saml.py @@ -1,15 +1,18 @@ +import os import json +import base64 from urllib.parse import urlparse, urlencode, parse_qsl +import mongomock import pytest from jwkest.jwk import rsa_load, RSAKey from jwkest.jws import JWS from oic.oic.message import ClaimsRequest, Claims -from pyop.storage import MongoWrapper +from pyop.storage import StorageBase from saml2 import BINDING_HTTP_REDIRECT from saml2.config import IdPConfig from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.metadata_creation.saml_metadata import create_entity_descriptors from satosa.proxy_server import make_app @@ -20,11 +23,31 @@ CLIENT_ID = "client1" +CLIENT_SECRET = "secret" +CLIENT_REDIRECT_URI = "https://client.example.com/cb" REDIRECT_URI = "https://client.example.com/cb" +DB_URI = "mongodb://localhost/satosa" +@pytest.fixture(scope="session") +def client_db_path(tmpdir_factory): + tmpdir = str(tmpdir_factory.getbasetemp()) + path = os.path.join(tmpdir, "cdb.json") + cdb_json = { + CLIENT_ID: { + "response_types": ["id_token", "code"], + "redirect_uris": [ + CLIENT_REDIRECT_URI + ], + "client_secret": CLIENT_SECRET + } + } + with open(path, "w") as f: + f.write(json.dumps(cdb_json)) + + return path @pytest.fixture -def oidc_frontend_config(signing_key_path, mongodb_instance): +def oidc_frontend_config(signing_key_path): data = { "module": "satosa.frontends.openid_connect.OpenIDConnectFrontend", "name": "OIDCFrontend", @@ -32,22 +55,47 @@ def oidc_frontend_config(signing_key_path, mongodb_instance): "issuer": "https://proxy-op.example.com", "signing_key_path": signing_key_path, "provider": {"response_types_supported": ["id_token"]}, - "db_uri": mongodb_instance.get_uri() # use mongodb for integration testing + "client_db_uri": DB_URI, # use mongodb for integration testing + "db_uri": DB_URI # use mongodb for integration testing } } - # insert client in mongodb - cdb = MongoWrapper(mongodb_instance.get_uri(), "satosa", "clients") - cdb[CLIENT_ID] = { - "redirect_uris": [REDIRECT_URI], - "response_types": ["id_token"] + return data + + +@pytest.fixture +def oidc_stateless_frontend_config(signing_key_path, client_db_path): + data = { + "module": "satosa.frontends.openid_connect.OpenIDConnectFrontend", + "name": "OIDCFrontend", + "config": { + "issuer": "https://proxy-op.example.com", + "signing_key_path": signing_key_path, + "client_db_path": client_db_path, + "db_uri": "stateless://user:abc123@localhost", + "provider": { + "response_types_supported": ["id_token", "code"] + } + } } return data +@mongomock.patch(servers=(('localhost', 27017),)) class TestOIDCToSAML: + def _client_setup(self): + """Insert client in mongodb.""" + self._cdb = StorageBase.from_uri( + DB_URI, db_name="satosa", collection="clients", ttl=None + ) + self._cdb[CLIENT_ID] = { + "redirect_uris": [REDIRECT_URI], + "response_types": ["id_token"] + } + def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_config, idp_conf): + self._client_setup() subject_id = "testuser1" # proxy config @@ -59,7 +107,7 @@ def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_ _, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # get frontend OP config info provider_config = json.loads(test_client.get("/.well-known/openid-configuration").data.decode("utf-8")) @@ -78,7 +126,7 @@ def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_ # config test IdP backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0]) idp_conf["metadata"]["inline"].append(backend_metadata_str) - fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False)) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) # create auth resp req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query)) @@ -91,7 +139,7 @@ def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_ # make auth resp to proxy authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp) - authn_resp = test_client.get("/" + authn_resp_req) + authn_resp = test_client.get(authn_resp_req) assert authn_resp.status == "303 See Other" # verify auth resp from proxy @@ -104,3 +152,134 @@ def test_full_flow(self, satosa_config_dict, oidc_frontend_config, saml_backend_ (name, values) in id_token_claims.items() for name, values in OIDC_USERS[subject_id].items() ) + + def test_full_stateless_id_token_flow(self, satosa_config_dict, oidc_stateless_frontend_config, saml_backend_config, idp_conf): + subject_id = "testuser1" + + # proxy config + satosa_config_dict["FRONTEND_MODULES"] = [oidc_stateless_frontend_config] + satosa_config_dict["BACKEND_MODULES"] = [saml_backend_config] + satosa_config_dict["INTERNAL_ATTRIBUTES"]["attributes"] = {attr_name: {"openid": [attr_name], + "saml": [attr_name]} + for attr_name in USERS[subject_id]} + _, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) + + # application + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) + + # get frontend OP config info + provider_config = json.loads(test_client.get("/.well-known/openid-configuration").data.decode("utf-8")) + + # create auth req + claims_request = ClaimsRequest(id_token=Claims(**{k: None for k in USERS[subject_id]})) + req_args = {"scope": "openid", "response_type": "id_token", "client_id": CLIENT_ID, + "redirect_uri": REDIRECT_URI, "nonce": "nonce", + "claims": claims_request.to_json()} + auth_req = urlparse(provider_config["authorization_endpoint"]).path + "?" + urlencode(req_args) + + # make auth req to proxy + proxied_auth_req = test_client.get(auth_req) + assert proxied_auth_req.status == "303 See Other" + + # config test IdP + backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0]) + idp_conf["metadata"]["inline"].append(backend_metadata_str) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) + + # create auth resp + req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query)) + url, authn_resp = fakeidp.handle_auth_req( + req_params["SAMLRequest"], + req_params["RelayState"], + BINDING_HTTP_REDIRECT, + subject_id, + response_binding=BINDING_HTTP_REDIRECT) + + # make auth resp to proxy + authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp) + authn_resp = test_client.get(authn_resp_req) + assert authn_resp.status == "303 See Other" + + # verify auth resp from proxy + resp_dict = dict(parse_qsl(urlparse(authn_resp.data.decode("utf-8")).fragment)) + signing_key = RSAKey(key=rsa_load(oidc_stateless_frontend_config["config"]["signing_key_path"]), + use="sig", alg="RS256") + id_token_claims = JWS().verify_compact(resp_dict["id_token"], keys=[signing_key]) + + assert all( + (name, values) in id_token_claims.items() + for name, values in OIDC_USERS[subject_id].items() + ) + + def test_full_stateless_code_flow(self, satosa_config_dict, oidc_stateless_frontend_config, saml_backend_config, idp_conf): + subject_id = "testuser1" + + # proxy config + satosa_config_dict["FRONTEND_MODULES"] = [oidc_stateless_frontend_config] + satosa_config_dict["BACKEND_MODULES"] = [saml_backend_config] + satosa_config_dict["INTERNAL_ATTRIBUTES"]["attributes"] = {attr_name: {"openid": [attr_name], + "saml": [attr_name]} + for attr_name in USERS[subject_id]} + _, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) + + # application + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) + + # get frontend OP config info + provider_config = json.loads(test_client.get("/.well-known/openid-configuration").data.decode("utf-8")) + + # create auth req + claims_request = ClaimsRequest(id_token=Claims(**{k: None for k in USERS[subject_id]})) + req_args = {"scope": "openid", "response_type": "code", "client_id": CLIENT_ID, + "redirect_uri": REDIRECT_URI, "nonce": "nonce", + "claims": claims_request.to_json()} + auth_req = urlparse(provider_config["authorization_endpoint"]).path + "?" + urlencode(req_args) + + # make auth req to proxy + proxied_auth_req = test_client.get(auth_req) + assert proxied_auth_req.status == "303 See Other" + + # config test IdP + backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0]) + idp_conf["metadata"]["inline"].append(backend_metadata_str) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) + + # create auth resp + req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query)) + url, authn_resp = fakeidp.handle_auth_req( + req_params["SAMLRequest"], + req_params["RelayState"], + BINDING_HTTP_REDIRECT, + subject_id, + response_binding=BINDING_HTTP_REDIRECT) + + # make auth resp to proxy + authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp) + authn_resp = test_client.get(authn_resp_req) + assert authn_resp.status == "303 See Other" + + resp_dict = dict(parse_qsl(urlparse(authn_resp.data.decode("utf-8")).query)) + code = resp_dict["code"] + client_id_secret_str = CLIENT_ID + ":" + CLIENT_SECRET + auth_header = "Basic %s" % base64.b64encode(client_id_secret_str.encode()).decode() + + authn_resp = test_client.post(provider_config["token_endpoint"], + data={ + "code": code, + "grant_type": "authorization_code", + "redirect_uri": CLIENT_REDIRECT_URI + }, + headers={'Authorization': auth_header}) + + assert authn_resp.status == "200 OK" + + # verify auth resp from proxy + resp_dict = json.loads(authn_resp.data.decode("utf-8")) + signing_key = RSAKey(key=rsa_load(oidc_stateless_frontend_config["config"]["signing_key_path"]), + use="sig", alg="RS256") + id_token_claims = JWS().verify_compact(resp_dict["id_token"], keys=[signing_key]) + + assert all( + (name, values) in id_token_claims.items() + for name, values in OIDC_USERS[subject_id].items() + ) diff --git a/tests/flows/test_saml-oidc.py b/tests/flows/test_saml-oidc.py index b0068cc50..bc41acfe1 100644 --- a/tests/flows/test_saml-oidc.py +++ b/tests/flows/test_saml-oidc.py @@ -5,7 +5,7 @@ from saml2 import BINDING_HTTP_REDIRECT from saml2.config import SPConfig from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.metadata_creation.saml_metadata import create_entity_descriptors from satosa.proxy_server import make_app @@ -27,12 +27,12 @@ def run_test(self, satosa_config_dict, sp_conf, oidc_backend_config, frontend_co frontend_metadata, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # config test SP frontend_metadata_str = str(frontend_metadata[frontend_config["name"]][0]) sp_conf["metadata"]["inline"].append(frontend_metadata_str) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) # create auth req destination, req_args = fakesp.make_auth_req(frontend_metadata[frontend_config["name"]][0].entity_id) diff --git a/tests/flows/test_saml-saml.py b/tests/flows/test_saml-saml.py index 29f20fc0f..91c350495 100644 --- a/tests/flows/test_saml-saml.py +++ b/tests/flows/test_saml-saml.py @@ -3,7 +3,7 @@ from saml2 import BINDING_HTTP_REDIRECT from saml2.config import SPConfig, IdPConfig from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.metadata_creation.saml_metadata import create_entity_descriptors from satosa.proxy_server import make_app @@ -23,12 +23,12 @@ def run_test(self, satosa_config_dict, sp_conf, idp_conf, saml_backend_config, f frontend_metadata, backend_metadata = create_entity_descriptors(SATOSAConfig(satosa_config_dict)) # application - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # config test SP frontend_metadata_str = str(frontend_metadata[frontend_config["name"]][0]) sp_conf["metadata"]["inline"].append(frontend_metadata_str) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) # create auth req destination, req_args = fakesp.make_auth_req(frontend_metadata[frontend_config["name"]][0].entity_id) @@ -41,7 +41,7 @@ def run_test(self, satosa_config_dict, sp_conf, idp_conf, saml_backend_config, f # config test IdP backend_metadata_str = str(backend_metadata[saml_backend_config["name"]][0]) idp_conf["metadata"]["inline"].append(backend_metadata_str) - fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False)) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) # create auth resp req_params = dict(parse_qsl(urlparse(proxied_auth_req.data.decode("utf-8")).query)) @@ -54,7 +54,7 @@ def run_test(self, satosa_config_dict, sp_conf, idp_conf, saml_backend_config, f # make auth resp to proxy authn_resp_req = urlparse(url).path + "?" + urlencode(authn_resp) - authn_resp = test_client.get("/" + authn_resp_req) + authn_resp = test_client.get(authn_resp_req) assert authn_resp.status == "303 See Other" # verify auth resp from proxy diff --git a/tests/flows/test_wsgi_flow.py b/tests/flows/test_wsgi_flow.py index 08d4d4a3d..ab9d636f5 100644 --- a/tests/flows/test_wsgi_flow.py +++ b/tests/flows/test_wsgi_flow.py @@ -1,10 +1,8 @@ """ Complete test for a SAML to SAML proxy. """ -import json - from werkzeug.test import Client -from werkzeug.wrappers import BaseResponse +from werkzeug.wrappers import Response from satosa.proxy_server import make_app from satosa.response import NotFound @@ -21,7 +19,7 @@ def test_flow(self, satosa_config_dict): """ Performs the test. """ - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) # Make request to frontend resp = test_client.get('/{}/{}/request'.format("backend", "frontend")) @@ -35,7 +33,7 @@ def test_flow(self, satosa_config_dict): assert resp.data.decode('utf-8') == "Auth response received, passed to test frontend" def test_unknown_request_path(self, satosa_config_dict): - test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), BaseResponse) + test_client = Client(make_app(SATOSAConfig(satosa_config_dict)), Response) resp = test_client.get('/unknown') assert resp.status == NotFound._status diff --git a/tests/satosa/backends/test_bitbucket.py b/tests/satosa/backends/test_bitbucket.py index 192c55a84..d6cf25bac 100644 --- a/tests/satosa/backends/test_bitbucket.py +++ b/tests/satosa/backends/test_bitbucket.py @@ -159,7 +159,6 @@ def test_authn_response(self, incoming_authn_response): mock_do_access_token_request self.bb_backend._authn_response(incoming_authn_response) - assert self.bb_backend.name not in incoming_authn_response.state self.assert_expected_attributes() self.assert_token_request(**mock_do_access_token_request.call_args[1]) @@ -190,5 +189,4 @@ def test_entire_flow(self, context): "state": mock_get_state.return_value } self.bb_backend._authn_response(context) - assert self.bb_backend.name not in context.state self.assert_expected_attributes() diff --git a/tests/satosa/backends/test_idpy_oidc.py b/tests/satosa/backends/test_idpy_oidc.py new file mode 100644 index 000000000..95e8b427c --- /dev/null +++ b/tests/satosa/backends/test_idpy_oidc.py @@ -0,0 +1,235 @@ +import json +import re +import time +from datetime import datetime +from unittest.mock import Mock +from urllib.parse import parse_qsl +from urllib.parse import urlparse + +from cryptojwt.key_jar import build_keyjar +from idpyoidc.client.defaults import DEFAULT_KEY_DEFS +from idpyoidc.client.oauth2.stand_alone_client import StandAloneClient +from idpyoidc.message.oidc import AuthorizationResponse +from idpyoidc.message.oidc import IdToken +from oic.oic import AuthorizationRequest +import pytest +import responses + +from satosa.backends.idpy_oidc import IdpyOIDCBackend +from satosa.context import Context +from satosa.internal import InternalData +from satosa.response import Response + +ISSUER = "https://provider.example.com" +CLIENT_ID = "test_client" +CLIENT_BASE_URL = "https://client.test.com" +NONCE = "the nonce" + + +class TestIdpyOIDCBackend(object): + @pytest.fixture + def backend_config(self): + return { + "client": { + "base_url": CLIENT_BASE_URL, + "client_id": CLIENT_ID, + "client_type": "oidc", + "client_secret": "ZJYCqe3GGRvdrudKyZS0XhGv_Z45DuKhCUk0gBR1vZk", + "application_type": "web", + "application_name": "SATOSA Test", + "contacts": ["ops@example.com"], + "response_types_supported": ["code"], + "response_type": "code id_token token", + "scope": "openid foo", + "key_conf": {"key_defs": DEFAULT_KEY_DEFS}, + "jwks_uri": f"{CLIENT_BASE_URL}/jwks.json", + "provider_info": { + "issuer": ISSUER, + "authorization_endpoint": f"{ISSUER}/authn", + "token_endpoint": f"{ISSUER}/token", + "userinfo_endpoint": f"{ISSUER}/user", + "jwks_uri": f"{ISSUER}/static/jwks" + } + } + } + + @pytest.fixture + def internal_attributes(self): + return { + "attributes": { + "givenname": {"openid": ["given_name"]}, + "mail": {"openid": ["email"]}, + "edupersontargetedid": {"openid": ["sub"]}, + "surname": {"openid": ["family_name"]} + } + } + + @pytest.fixture(autouse=True) + @responses.activate + def create_backend(self, internal_attributes, backend_config): + base_url = backend_config['client']['base_url'] + self.issuer_keys = build_keyjar(DEFAULT_KEY_DEFS) + with responses.RequestsMock() as rsps: + rsps.add( + responses.GET, + backend_config['client']['provider_info']['jwks_uri'], + body=self.issuer_keys.export_jwks_as_json(), + status=200, + content_type="application/json") + + self.oidc_backend = IdpyOIDCBackend(Mock(), internal_attributes, backend_config, + base_url, "oidc") + + @pytest.fixture + def userinfo(self): + return { + "given_name": "Test", + "family_name": "Devsson", + "email": "test_dev@example.com", + "sub": "username" + } + + @pytest.fixture + def id_token(self, userinfo): + issuer_keys = build_keyjar(DEFAULT_KEY_DEFS) + signing_key = issuer_keys.get_signing_key(key_type='RSA')[0] + signing_key.alg = "RS256" + auth_time = int(datetime.utcnow().timestamp()) + id_token_claims = { + "auth_time": auth_time, + "iss": ISSUER, + "sub": userinfo["sub"], + "aud": CLIENT_ID, + "nonce": NONCE, + "exp": auth_time + 3600, + "iat": auth_time, + } + id_token = IdToken(**id_token_claims) + return id_token + + @pytest.fixture + def all_user_claims(self, userinfo, id_token): + all_user_claims = {**userinfo, **id_token} + return all_user_claims + + def test_client(self, backend_config): + assert isinstance(self.oidc_backend.client, StandAloneClient) + # 3 signing keys. One RSA, one EC and one symmetric + assert len(self.oidc_backend.client.context.keyjar.get_signing_key()) == 3 + assert self.oidc_backend.client.context.jwks_uri == backend_config['client']['jwks_uri'] + + def assert_expected_attributes(self, attr_map, user_claims, actual_attributes): + expected_attributes = { + out_attr: [user_claims[in_mapping["openid"][0]]] + for out_attr, in_mapping in attr_map["attributes"].items() + } + assert actual_attributes == expected_attributes + + def setup_token_endpoint(self, userinfo): + _client = self.oidc_backend.client + signing_key = self.issuer_keys.get_signing_key(key_type='RSA')[0] + signing_key.alg = "RS256" + id_token_claims = { + "iss": ISSUER, + "sub": userinfo["sub"], + "aud": CLIENT_ID, + "nonce": NONCE, + "exp": time.time() + 3600, + "iat": time.time() + } + id_token = IdToken(**id_token_claims).to_jwt([signing_key], algorithm=signing_key.alg) + token_response = { + "access_token": "SlAV32hkKG", + "token_type": "Bearer", + "refresh_token": "8xLOxBtZp8", + "expires_in": 3600, + "id_token": id_token + } + responses.add(responses.POST, + _client.context.provider_info['token_endpoint'], + body=json.dumps(token_response), + status=200, + content_type="application/json") + + def setup_userinfo_endpoint(self, userinfo): + responses.add(responses.GET, + self.oidc_backend.client.context.provider_info['userinfo_endpoint'], + body=json.dumps(userinfo), + status=200, + content_type="application/json") + + @pytest.fixture + def incoming_authn_response(self): + _context = self.oidc_backend.client.context + oidc_state = "my state" + _uri = _context.claims.get_usage("redirect_uris")[0] + _request = AuthorizationRequest( + redirect_uri=_uri, + response_type="code", + client_id=_context.get_client_id(), + scope=_context.claims.get_usage("scope"), + nonce=NONCE + ) + _context.cstate.set(oidc_state, {"iss": _context.issuer}) + _context.cstate.bind_key(NONCE, oidc_state) + _context.cstate.update(oidc_state, _request) + + response = AuthorizationResponse( + code="F+R4uWbN46U+Bq9moQPC4lEvRd2De4o=", + state=oidc_state, + iss=_context.issuer, + nonce=NONCE + ) + return response.to_dict() + + def test_register_endpoints(self): + _uri = self.oidc_backend.client.context.claims.get_usage("redirect_uris")[0] + redirect_uri_path = urlparse(_uri).path.lstrip('/') + url_map = self.oidc_backend.register_endpoints() + regex, callback = url_map[0] + assert re.search(regex, redirect_uri_path) + assert callback == self.oidc_backend.response_endpoint + + def test_translate_response_to_internal_response(self, all_user_claims): + internal_response = self.oidc_backend._translate_response(all_user_claims, ISSUER) + assert internal_response.subject_id == all_user_claims["sub"] + self.assert_expected_attributes( + self.oidc_backend.internal_attributes, + all_user_claims, + internal_response.attributes, + ) + + @responses.activate + def test_response_endpoint(self, context, all_user_claims, incoming_authn_response): + self.setup_token_endpoint(all_user_claims) + self.setup_userinfo_endpoint(all_user_claims) + + response_context = Context() + response_context.request = incoming_authn_response + response_context.state = context.state + + self.oidc_backend.response_endpoint(response_context) + + args = self.oidc_backend.auth_callback_func.call_args[0] + assert isinstance(args[0], Context) + assert isinstance(args[1], InternalData) + self.assert_expected_attributes( + self.oidc_backend.internal_attributes, all_user_claims, args[1].attributes + ) + + def test_start_auth_redirects_to_provider_authorization_endpoint(self, context): + _client = self.oidc_backend.client + auth_response = self.oidc_backend.start_auth(context, None) + assert isinstance(auth_response, Response) + + login_url = auth_response.message + parsed = urlparse(login_url) + assert login_url.startswith(_client.context.provider_info["authorization_endpoint"]) + auth_params = dict(parse_qsl(parsed.query)) + assert auth_params["scope"] == " ".join(_client.context.claims.get_usage("scope")) + assert auth_params["response_type"] == _client.context.claims.get_usage("response_types")[0] + assert auth_params["client_id"] == _client.client_id + assert auth_params["redirect_uri"] == _client.context.claims.get_usage("redirect_uris")[0] + assert "state" in auth_params + assert "nonce" in auth_params + diff --git a/tests/satosa/backends/test_oauth.py b/tests/satosa/backends/test_oauth.py index 0100cfaa9..22afc8ee7 100644 --- a/tests/satosa/backends/test_oauth.py +++ b/tests/satosa/backends/test_oauth.py @@ -136,7 +136,6 @@ def test_authn_response(self, incoming_authn_response): self.fb_backend.consumer.do_access_token_request = mock_do_access_token_request self.fb_backend._authn_response(incoming_authn_response) - assert self.fb_backend.name not in incoming_authn_response.state self.assert_expected_attributes() self.assert_token_request(**mock_do_access_token_request.call_args[1]) @@ -164,5 +163,4 @@ def test_entire_flow(self, context): "state": mock_get_state.return_value } self.fb_backend._authn_response(context) - assert self.fb_backend.name not in context.state self.assert_expected_attributes() diff --git a/tests/satosa/backends/test_openid_connect.py b/tests/satosa/backends/test_openid_connect.py index b282e7725..34bac79fe 100644 --- a/tests/satosa/backends/test_openid_connect.py +++ b/tests/satosa/backends/test_openid_connect.py @@ -9,7 +9,7 @@ import responses from Cryptodome.PublicKey import RSA from jwkest.jwk import RSAKey -from oic.oic.message import RegistrationRequest, IdToken +from oic.oic.message import IdToken from oic.utils.authn.client import CLIENT_AUTHN_METHOD from satosa.backends.openid_connect import OpenIDConnectBackend, _create_client, STATE_KEY, NONCE_KEY @@ -163,7 +163,6 @@ def test_response_endpoint(self, backend_config, internal_attributes, userinfo, self.setup_userinfo_endpoint(backend_config["provider_metadata"]["userinfo_endpoint"], userinfo) self.oidc_backend.response_endpoint(incoming_authn_response) - assert self.oidc_backend.name not in incoming_authn_response.state args = self.oidc_backend.auth_callback_func.call_args[0] assert isinstance(args[0], Context) @@ -198,7 +197,6 @@ def test_entire_flow(self, context, backend_config, internal_attributes, userinf "token_type": "Bearer", } self.oidc_backend.response_endpoint(context) - assert self.oidc_backend.name not in context.state args = self.oidc_backend.auth_callback_func.call_args[0] self.assert_expected_attributes(internal_attributes, userinfo, args[1].attributes) diff --git a/tests/satosa/backends/test_saml2.py b/tests/satosa/backends/test_saml2.py index e5e2d905c..e1cc96466 100644 --- a/tests/satosa/backends/test_saml2.py +++ b/tests/satosa/backends/test_saml2.py @@ -12,13 +12,17 @@ import pytest import saml2 -from saml2 import BINDING_HTTP_REDIRECT +from saml2 import BINDING_HTTP_REDIRECT, BINDING_HTTP_POST from saml2.authn_context import PASSWORD from saml2.config import IdPConfig, SPConfig +from saml2.entity import Entity +from saml2.samlp import authn_request_from_string from saml2.s_utils import deflate_and_base64_encode from satosa.backends.saml2 import SAMLBackend from satosa.context import Context +from satosa.exception import SATOSAAuthenticationError +from satosa.exception import SATOSAMissingStateError from satosa.internal import InternalData from tests.users import USERS from tests.util import FakeIdP, create_metadata_from_config_dict, FakeSP @@ -118,7 +122,7 @@ def test_discovery_server_set_in_context(self, context, sp_conf): def test_full_flow(self, context, idp_conf, sp_conf): test_state_key = "test_state_key_456afgrh" response_binding = BINDING_HTTP_REDIRECT - fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False)) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) context.state[test_state_key] = "my_state" @@ -135,6 +139,7 @@ def test_full_flow(self, context, idp_conf, sp_conf): request_context.state = context.state # pass discovery response to backend and check that it redirects to the selected IdP + context.state["SATOSA_BASE"] = {"requester": "the-service-identifier"} resp = self.samlbackend.disco_response(request_context) assert_redirect_to_idp(resp, idp_conf) @@ -153,7 +158,6 @@ def test_full_flow(self, context, idp_conf, sp_conf): # pass auth response to backend and verify behavior self.samlbackend.authn_response(response_context, response_binding) context, internal_resp = self.samlbackend.auth_callback_func.call_args[0] - assert self.samlbackend.name not in context.state assert context.state[test_state_key] == "my_state" assert_authn_response(internal_resp) @@ -179,46 +183,132 @@ def test_authn_request(self, context, idp_conf): req_params = dict(parse_qsl(urlparse(resp.message).query)) assert context.state[self.samlbackend.name]["relay_state"] == req_params["RelayState"] + @pytest.mark.parametrize("hostname", ["example.com:8443", "example.net"]) + @pytest.mark.parametrize( + "strat", + ["", "use_first_acs", "prefer_matching_host", "invalid"], + ) + def test_acs_selection_strategy(self, context, sp_conf, idp_conf, hostname, strat): + acs_endpoints = [ + ("https://example.com/saml2/acs/post", BINDING_HTTP_POST), + ("https://example.net/saml2/acs/post", BINDING_HTTP_POST), + ("https://example.com:8443/saml2/acs/post", BINDING_HTTP_POST), + ] + config = {"sp_config": sp_conf} + config["sp_config"]["service"]["sp"]["endpoints"][ + "assertion_consumer_service" + ] = acs_endpoints + if strat: + config["acs_selection_strategy"] = strat + + req = self._make_authn_request(hostname, context, config, idp_conf["entityid"]) + + if strat == "prefer_matching_host": + expected_acs = hostname + else: + expected_acs = urlparse(acs_endpoints[0][0]).netloc + assert urlparse(req.assertion_consumer_service_url).netloc == expected_acs + + def _make_authn_request(self, http_host, context, config, entity_id): + context.http_headers = {"HTTP_HOST": http_host} if http_host else {} + self.samlbackend = SAMLBackend( + Mock(), + INTERNAL_ATTRIBUTES, + config, + "base_url", + "samlbackend", + ) + resp = self.samlbackend.authn_request(context, entity_id) + req_params = dict(parse_qsl(urlparse(resp.message).query)) + req_xml = Entity.unravel(req_params["SAMLRequest"], BINDING_HTTP_REDIRECT) + return authn_request_from_string(req_xml) + + @pytest.mark.parametrize("hostname", ["unknown-hostname", None]) + def test_unknown_or_no_hostname_selects_first_acs( + self, context, sp_conf, idp_conf, hostname + ): + config = {"sp_config": sp_conf} + config["sp_config"]["service"]["sp"]["endpoints"][ + "assertion_consumer_service" + ] = ( + ("https://first-hostname/saml2/acs/post", BINDING_HTTP_POST), + ("https://other-hostname/saml2/acs/post", BINDING_HTTP_POST), + ) + config["acs_selection_strategy"] = "prefer_matching_host" + req = self._make_authn_request(hostname, context, config, idp_conf["entityid"]) + assert ( + req.assertion_consumer_service_url + == "https://first-hostname/saml2/acs/post" + ) + def test_authn_response(self, context, idp_conf, sp_conf): response_binding = BINDING_HTTP_REDIRECT - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) - fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf, metadata_construction=False)) - destination, request_params = fakesp.make_auth_req(idp_conf["entityid"]) - url, auth_resp = fakeidp.handle_auth_req(request_params["SAMLRequest"], request_params["RelayState"], - BINDING_HTTP_REDIRECT, - "testuser1", response_binding=response_binding) - + request_params, auth_resp = self._perform_request_response( + idp_conf, sp_conf, response_binding + ) context.request = auth_resp context.state[self.samlbackend.name] = {"relay_state": request_params["RelayState"]} self.samlbackend.authn_response(context, response_binding) context, internal_resp = self.samlbackend.auth_callback_func.call_args[0] assert_authn_response(internal_resp) - assert self.samlbackend.name not in context.state - @pytest.mark.skipif( - saml2.__version__ < '4.6.1', - reason="Optional NameID needs pysaml2 v4.6.1 or higher") - def test_authn_response_no_name_id(self, context, idp_conf, sp_conf): + def _perform_request_response( + self, idp_conf, sp_conf, response_binding, receive_nameid=True + ): + fakesp = FakeSP(SPConfig().load(sp_conf)) + fakeidp = FakeIdP(USERS, config=IdPConfig().load(idp_conf)) + destination, request_params = fakesp.make_auth_req(idp_conf["entityid"]) + auth_resp_func = ( + fakeidp.handle_auth_req + if receive_nameid + else fakeidp.handle_auth_req_no_name_id + ) + url, auth_resp = auth_resp_func( + request_params["SAMLRequest"], + request_params["RelayState"], + BINDING_HTTP_REDIRECT, + "testuser1", + response_binding=response_binding, + ) + + return request_params, auth_resp + + def test_no_state_raises_error(self, context, idp_conf, sp_conf): response_binding = BINDING_HTTP_REDIRECT + request_params, auth_resp = self._perform_request_response( + idp_conf, sp_conf, response_binding + ) + context.request = auth_resp + # not setting context.state[self.samlbackend.name] + # to simulate a request with lost state + + with pytest.raises(SATOSAMissingStateError): + self.samlbackend.authn_response(context, response_binding) - fakesp_conf = SPConfig().load(sp_conf, metadata_construction=False) - fakesp = FakeSP(fakesp_conf) + def test_no_relay_state_raises_error(self, context, idp_conf, sp_conf): + response_binding = BINDING_HTTP_REDIRECT + request_params, auth_resp = self._perform_request_response( + idp_conf, sp_conf, response_binding + ) + context.request = auth_resp + # not setting context.state[self.samlbackend.name]["relay_state"] + # to simulate a request without a relay state + context.state[self.samlbackend.name] = {} - fakeidp_conf = IdPConfig().load(idp_conf, metadata_construction=False) - fakeidp = FakeIdP(USERS, config=fakeidp_conf) + with pytest.raises(SATOSAAuthenticationError): + self.samlbackend.authn_response(context, response_binding) - destination, request_params = fakesp.make_auth_req( - idp_conf["entityid"]) + @pytest.mark.skipif( + saml2.__version__ < '4.6.1', + reason="Optional NameID needs pysaml2 v4.6.1 or higher" + ) + def test_authn_response_no_name_id(self, context, idp_conf, sp_conf): + response_binding = BINDING_HTTP_REDIRECT - # Use the fake IdP to mock up an authentication request that has no - # element. - url, auth_resp = fakeidp.handle_auth_req_no_name_id( - request_params["SAMLRequest"], - request_params["RelayState"], - BINDING_HTTP_REDIRECT, - "testuser1", - response_binding=response_binding) + request_params, auth_resp = self._perform_request_response( + idp_conf, sp_conf, response_binding, receive_nameid=False + ) backend = self.samlbackend @@ -230,7 +320,6 @@ def test_authn_response_no_name_id(self, context, idp_conf, sp_conf): context, internal_resp = backend.auth_callback_func.call_args[0] assert_authn_response(internal_resp) - assert backend.name not in context.state def test_authn_response_with_encrypted_assertion(self, sp_conf, context): with open(os.path.join( diff --git a/tests/satosa/frontends/test_openid_connect.py b/tests/satosa/frontends/test_openid_connect.py index b33a16703..f769b2c66 100644 --- a/tests/satosa/frontends/test_openid_connect.py +++ b/tests/satosa/frontends/test_openid_connect.py @@ -71,6 +71,21 @@ def frontend_config_with_extra_scopes(self, signing_key_path): return config + @pytest.fixture + def frontend_config_with_extra_id_token_claims(self, signing_key_path): + config = { + "signing_key_path": signing_key_path, + "provider": { + "response_types_supported": ["code", "id_token", "code id_token token"], + "scopes_supported": ["openid", "email"], + "extra_id_token_claims": { + CLIENT_ID: ["email"], + } + }, + } + + return config + def create_frontend(self, frontend_config): # will use in-memory storage instance = OpenIDConnectFrontend(lambda ctx, req: None, INTERNAL_ATTRIBUTES, @@ -387,6 +402,26 @@ def test_register_endpoints_dynamic_client_registration_is_configurable( provider_info = ProviderConfigurationResponse().deserialize(frontend.provider_config(None).message, "json") assert ("registration_endpoint" in provider_info) == client_registration_enabled + @pytest.mark.parametrize("sub_mirror_public", [ + True, + False + ]) + def test_mirrored_subject(self, context, frontend_config, authn_req, sub_mirror_public): + frontend_config["sub_mirror_public"] = sub_mirror_public + frontend_config["provider"]["subject_types_supported"] = ["public"] + frontend = self.create_frontend(frontend_config) + + self.insert_client_in_client_db(frontend, authn_req["redirect_uri"]) + internal_response = self.setup_for_authn_response(context, frontend, authn_req) + http_resp = frontend.handle_authn_response(context, internal_response) + + resp = AuthorizationResponse().deserialize(urlparse(http_resp.message).fragment) + id_token = IdToken().from_jwt(resp["id_token"], key=[frontend.signing_key]) + if sub_mirror_public: + assert id_token["sub"] == OIDC_USERS["testuser1"]["eduPersonTargetedID"][0] + else: + assert id_token["sub"] != OIDC_USERS["testuser1"]["eduPersonTargetedID"][0] + def test_token_endpoint(self, context, frontend_config, authn_req): token_lifetime = 60 * 60 * 24 frontend_config["provider"]["access_token_lifetime"] = token_lifetime @@ -409,6 +444,27 @@ def test_token_endpoint(self, context, frontend_config, authn_req): assert parsed["expires_in"] == token_lifetime assert parsed["id_token"] + def test_token_endpoint_with_extra_claims(self, context, frontend_config_with_extra_id_token_claims, authn_req): + frontend = self.create_frontend(frontend_config_with_extra_id_token_claims) + + user_id = "test_user" + self.insert_client_in_client_db(frontend, authn_req["redirect_uri"]) + self.insert_user_in_user_db(frontend, user_id) + authn_req["response_type"] = "code" + authn_resp = frontend.provider.authorize(authn_req, user_id) + + context.request = AccessTokenRequest(redirect_uri=authn_req["redirect_uri"], code=authn_resp["code"]).to_dict() + credentials = "{}:{}".format(CLIENT_ID, CLIENT_SECRET) + basic_auth = urlsafe_b64encode(credentials.encode("utf-8")).decode("utf-8") + context.request_authorization = "Basic {}".format(basic_auth) + + response = frontend.token_endpoint(context) + parsed = AccessTokenResponse().deserialize(response.message, "json") + assert parsed["access_token"] + + id_token = IdToken().from_jwt(parsed["id_token"], key=[frontend.signing_key]) + assert id_token["email"] == "test@example.com" + def test_token_endpoint_issues_refresh_tokens_if_configured(self, context, frontend_config, authn_req): frontend_config["provider"]["refresh_token_lifetime"] = 60 * 60 * 24 * 365 frontend = OpenIDConnectFrontend(lambda ctx, req: None, INTERNAL_ATTRIBUTES, @@ -501,7 +557,7 @@ def test_full_flow(self, context, frontend_with_extra_scopes): frontend_with_extra_scopes.auth_req_callback_func = mock_callback # discovery http_response = frontend_with_extra_scopes.provider_config(context) - provider_config = ProviderConfigurationResponse().deserialize(http_response.message, "json") + _ = ProviderConfigurationResponse().deserialize(http_response.message, "json") # client registration registration_request = RegistrationRequest(redirect_uris=[redirect_uri], response_types=[response_type]) diff --git a/tests/satosa/frontends/test_saml2.py b/tests/satosa/frontends/test_saml2.py index 3e89fd2fa..978489429 100644 --- a/tests/satosa/frontends/test_saml2.py +++ b/tests/satosa/frontends/test_saml2.py @@ -1,6 +1,7 @@ """ Tests for the SAML frontend module src/frontends/saml2.py. """ +import copy import itertools import re from collections import Counter @@ -28,7 +29,6 @@ from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.state import State -from satosa.context import Context from tests.users import USERS from tests.util import FakeSP, create_metadata_from_config_dict @@ -75,7 +75,7 @@ def setup_for_authn_req(self, context, idp_conf, sp_conf, nameid_format=None, re idp_metadata_str = create_metadata_from_config_dict(samlfrontend.idp_config) sp_conf["metadata"]["inline"].append(idp_metadata_str) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) destination, auth_req = fakesp.make_auth_req( samlfrontend.idp_config["entityid"], nameid_format, @@ -94,7 +94,7 @@ def setup_for_authn_req(self, context, idp_conf, sp_conf, nameid_format=None, re return samlfrontend def get_auth_response(self, samlfrontend, context, internal_response, sp_conf, idp_metadata_str): - sp_config = SPConfig().load(sp_conf, metadata_construction=False) + sp_config = SPConfig().load(sp_conf) resp_args = { "name_id_policy": NameIDPolicy(format=NAMEID_FORMAT_TRANSIENT), "in_response_to": None, @@ -150,7 +150,7 @@ def test_handle_authn_request(self, context, idp_conf, sp_conf, internal_respons resp = samlfrontend.handle_authn_response(context, internal_response) resp_dict = parse_qs(urlparse(resp.message).query) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) resp = fakesp.parse_authn_request_response(resp_dict["SAMLResponse"][0], BINDING_HTTP_REDIRECT) for key in resp.ava: @@ -189,7 +189,7 @@ def test_handle_authn_response_without_relay_state(self, context, idp_conf, sp_c resp = samlfrontend.handle_authn_response(context, internal_response) resp_dict = parse_qs(urlparse(resp.message).query) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) resp = fakesp.parse_authn_request_response(resp_dict["SAMLResponse"][0], BINDING_HTTP_REDIRECT) @@ -213,7 +213,7 @@ def test_handle_authn_response_without_name_id( resp = samlfrontend.handle_authn_response(context, internal_response) resp_dict = parse_qs(urlparse(resp.message).query) - fakesp = FakeSP(SPConfig().load(sp_conf, metadata_construction=False)) + fakesp = FakeSP(SPConfig().load(sp_conf)) resp = fakesp.parse_authn_request_response( resp_dict["SAMLResponse"][0], BINDING_HTTP_REDIRECT) @@ -285,17 +285,28 @@ def test_acr_mapping_per_idp_in_authn_response(self, context, idp_conf, sp_conf, authn_context_class_ref = resp.assertion.authn_statement[0].authn_context.authn_context_class_ref assert authn_context_class_ref.text == expected_loa - @pytest.mark.parametrize("entity_category, entity_category_module, expected_attributes", [ - ([""], "swamid", swamid.RELEASE[""]), - ([COCO], "edugain", edugain.RELEASE[""] + edugain.RELEASE[COCO]), - ([RESEARCH_AND_SCHOLARSHIP], "refeds", refeds.RELEASE[""] + refeds.RELEASE[RESEARCH_AND_SCHOLARSHIP]), - ([RESEARCH_AND_EDUCATION, EU], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, EU)]), - ([RESEARCH_AND_EDUCATION, HEI], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, HEI)]), - ([RESEARCH_AND_EDUCATION, NREN], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, NREN)]), - ([SFS_1993_1153], "swamid", swamid.RELEASE[""] + swamid.RELEASE[SFS_1993_1153]), - ]) - def test_respect_sp_entity_categories(self, context, entity_category, entity_category_module, expected_attributes, - idp_conf, sp_conf, internal_response): + @pytest.mark.parametrize( + "entity_category, entity_category_module, expected_attributes", + [ + ([""], "swamid", swamid.RELEASE[""]), + ([COCO], "edugain", edugain.RELEASE[""] + edugain.RELEASE[COCO]), + ([RESEARCH_AND_SCHOLARSHIP], "refeds", refeds.RELEASE[""] + refeds.RELEASE[RESEARCH_AND_SCHOLARSHIP]), + ([RESEARCH_AND_EDUCATION, EU], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, EU)]), + ([RESEARCH_AND_EDUCATION, HEI], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, HEI)]), + ([RESEARCH_AND_EDUCATION, NREN], "swamid", swamid.RELEASE[""] + swamid.RELEASE[(RESEARCH_AND_EDUCATION, NREN)]), + ([SFS_1993_1153], "swamid", swamid.RELEASE[""] + swamid.RELEASE[SFS_1993_1153]), + ] + ) + def test_respect_sp_entity_categories( + self, + context, + entity_category, + entity_category_module, + expected_attributes, + idp_conf, + sp_conf, + internal_response + ): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = [entity_category_module] if all(entity_category): # don't insert empty entity category @@ -303,10 +314,18 @@ def test_respect_sp_entity_categories(self, context, entity_category, entity_cat if entity_category == [COCO]: sp_conf["service"]["sp"]["required_attributes"] = expected_attributes - expected_attributes_in_all_entity_categories = list( - itertools.chain(swamid.RELEASE[""], edugain.RELEASE[COCO], refeds.RELEASE[RESEARCH_AND_SCHOLARSHIP], - swamid.RELEASE[(RESEARCH_AND_EDUCATION, EU)], swamid.RELEASE[(RESEARCH_AND_EDUCATION, HEI)], - swamid.RELEASE[(RESEARCH_AND_EDUCATION, NREN)], swamid.RELEASE[SFS_1993_1153])) + expected_attributes_in_all_entity_categories = set( + itertools.chain( + swamid.RELEASE[""], + edugain.RELEASE[""], + edugain.RELEASE[COCO], + refeds.RELEASE[RESEARCH_AND_SCHOLARSHIP], + swamid.RELEASE[(RESEARCH_AND_EDUCATION, EU)], + swamid.RELEASE[(RESEARCH_AND_EDUCATION, HEI)], + swamid.RELEASE[(RESEARCH_AND_EDUCATION, NREN)], + swamid.RELEASE[SFS_1993_1153], + ) + ) attribute_mapping = {} for expected_attribute in expected_attributes_in_all_entity_categories: attribute_mapping[expected_attribute.lower()] = {"saml": [expected_attribute]} @@ -345,8 +364,9 @@ def test_metadata_endpoint(self, context, idp_conf): assert headers["Content-Type"] == "text/xml" assert idp_conf["entityid"] in resp.message - def test_custom_attribute_release_with_less_attributes_than_entity_category(self, context, idp_conf, sp_conf, - internal_response): + def test_custom_attribute_release_with_less_attributes_than_entity_category( + self, context, idp_conf, sp_conf, internal_response + ): idp_metadata_str = create_metadata_from_config_dict(idp_conf) idp_conf["service"]["idp"]["policy"]["default"]["entity_categories"] = ["swamid"] sp_conf["entity_category"] = [SFS_1993_1153] @@ -364,8 +384,12 @@ def test_custom_attribute_release_with_less_attributes_than_entity_category(self samlfrontend = self.setup_for_authn_req(context, idp_conf, sp_conf, internal_attributes=internal_attributes, extra_config=dict(custom_attribute_release=custom_attributes)) + internal_response.requester = sp_conf["entityid"] resp = self.get_auth_response(samlfrontend, context, internal_response, sp_conf, idp_metadata_str) - assert len(resp.ava.keys()) == 0 + assert len(resp.ava.keys()) == ( + len(expected_attributes) + - len(custom_attributes[internal_response.auth_info.issuer][internal_response.requester]["exclude"]) + ) class TestSAMLMirrorFrontend: @@ -407,6 +431,7 @@ def test_load_idp_dynamic_entity_id(self, idp_conf): class TestSAMLVirtualCoFrontend(TestSAMLFrontend): BACKEND = "test_backend" + BACKEND_1 = "test_backend_1" CO = "MESS" CO_O = "organization" CO_C = "countryname" @@ -417,8 +442,8 @@ class TestSAMLVirtualCoFrontend(TestSAMLFrontend): CO_O: ["Medium Energy Synchrotron Source"], CO_C: ["US"], CO_CO: ["United States"], - CO_NOREDUORGACRONYM: ["MESS"] - } + CO_NOREDUORGACRONYM: ["MESS"], + } KEY_SSO = "single_sign_on_service" @pytest.fixture @@ -447,10 +472,10 @@ def frontend(self, idp_conf, sp_conf): # endpoints, and the collaborative organization configuration to # create the configuration for the frontend. conf = { - "idp_config": idp_conf, - "endpoints": ENDPOINTS, - "collaborative_organizations": [collab_org] - } + "idp_config": idp_conf, + "endpoints": ENDPOINTS, + "collaborative_organizations": [collab_org], + } # Use a richer set of internal attributes than what is provided # for the parent class so that we can test for the static SAML @@ -480,10 +505,13 @@ def context(self, context): that would be available during a SAML flow and that would include a path and target_backend that indicates the CO. """ - context.path = "{}/{}/sso/redirect".format(self.BACKEND, self.CO) - context.target_backend = self.BACKEND + return self._make_context(context, self.BACKEND, self.CO) - return context + def _make_context(self, context, backend, co_name): + _context = copy.deepcopy(context) + _context.path = "{}/{}/sso/redirect".format(backend, co_name) + _context.target_backend = backend + return _context def test_create_state_data(self, frontend, context, idp_conf): frontend._create_co_virtual_idp(context) @@ -518,6 +546,17 @@ def test_create_co_virtual_idp(self, frontend, context, idp_conf): assert idp_server.config.entityid == expected_entityid assert all(sso in sso_endpoints for sso in expected_endpoints) + def test_create_co_virtual_idp_with_entity_id_templates(self, frontend, context): + frontend.idp_config['entityid'] = "{}/Saml2IDP/proxy.xml".format(BASE_URL) + expected_entity_id = "{}/Saml2IDP/proxy.xml/{}".format(BASE_URL, self.CO) + idp_server = frontend._create_co_virtual_idp(context) + assert idp_server.config.entityid == expected_entity_id + + frontend.idp_config['entityid'] = "{}//idp/".format(BASE_URL) + expected_entity_id = "{}/{}/idp/{}".format(BASE_URL, context.target_backend, self.CO) + idp_server = frontend._create_co_virtual_idp(context) + assert idp_server.config.entityid == expected_entity_id + def test_register_endpoints(self, frontend, context): idp_server = frontend._create_co_virtual_idp(context) url_map = frontend.register_endpoints([self.BACKEND]) @@ -529,6 +568,28 @@ def test_register_endpoints(self, frontend, context): for endpoint in all_idp_endpoints: assert any(pat.match(endpoint) for pat in compiled_regex) + def test_register_endpoints_throws_error_in_case_duplicate_entity_ids(self, frontend): + with pytest.raises(ValueError): + frontend.register_endpoints([self.BACKEND, self.BACKEND_1]) + + def test_register_endpoints_with_metadata_endpoints(self, frontend, context): + frontend.idp_config['entityid'] = "{}//idp/".format(BASE_URL) + frontend.config['entityid_endpoint'] = True + idp_server_1 = frontend._create_co_virtual_idp(context) + context_2 = self._make_context(context, self.BACKEND_1, self.CO) + idp_server_2 = frontend._create_co_virtual_idp(context_2) + + url_map = frontend.register_endpoints([self.BACKEND, self.BACKEND_1]) + expected_idp_endpoints = [urlparse(endpoint[0]).path[1:] for server in [idp_server_1, idp_server_2] + for endpoint in server.config._idp_endpoints[self.KEY_SSO]] + for server in [idp_server_1, idp_server_2]: + expected_idp_endpoints.append(urlparse(server.config.entityid).path[1:]) + + compiled_regex = [re.compile(regex) for regex, _ in url_map] + + for endpoint in expected_idp_endpoints: + assert any(pat.match(endpoint) for pat in compiled_regex) + def test_co_static_attributes(self, frontend, context, internal_response, idp_conf, sp_conf): # Use the frontend and context fixtures to dynamically create the @@ -539,16 +600,15 @@ def test_co_static_attributes(self, frontend, context, internal_response, # and then use those to dynamically update the ipd_conf fixture. co_name = frontend._get_co_name(context) backend_name = context.target_backend - idp_conf = frontend._add_endpoints_to_config(idp_conf, co_name, - backend_name) - idp_conf = frontend._add_entity_id(context, idp_conf, co_name) + idp_conf = frontend._add_endpoints_to_config(idp_conf, co_name, backend_name) + idp_conf = frontend._add_entity_id(idp_conf, co_name, backend_name) # Use a utility function to serialize the idp_conf IdP configuration # fixture to a string and then dynamically update the sp_conf # SP configuration fixture with the metadata. idp_metadata_str = create_metadata_from_config_dict(idp_conf) sp_conf["metadata"]["inline"].append(idp_metadata_str) - sp_config = SPConfig().load(sp_conf, metadata_construction=False) + sp_config = SPConfig().load(sp_conf) # Use the updated sp_config fixture to generate a fake SP and then # use the fake SP to generate an authentication request aimed at the @@ -573,9 +633,9 @@ def test_co_static_attributes(self, frontend, context, internal_response, "name_id_policy": NameIDPolicy(format=NAMEID_FORMAT_TRANSIENT), "in_response_to": None, "destination": sp_config.endpoint( - "assertion_consumer_service", - binding=BINDING_HTTP_REDIRECT - )[0], + "assertion_consumer_service", + binding=BINDING_HTTP_REDIRECT + )[0], "sp_entity_id": sp_conf["entityid"], "binding": BINDING_HTTP_REDIRECT } @@ -622,12 +682,10 @@ def test_should_map_unspecified(self): def test_should_map_public(self): assert ( - subject_type_to_saml_nameid_format("public") - == NAMEID_FORMAT_PERSISTENT + subject_type_to_saml_nameid_format("public") == NAMEID_FORMAT_PERSISTENT ) def test_should_map_pairwise(self): assert ( - subject_type_to_saml_nameid_format("pairwise") - == NAMEID_FORMAT_TRANSIENT + subject_type_to_saml_nameid_format("pairwise") == NAMEID_FORMAT_TRANSIENT ) diff --git a/tests/satosa/metadata_creation/test_description.py b/tests/satosa/metadata_creation/test_description.py index 8b73ec923..818d01a03 100644 --- a/tests/satosa/metadata_creation/test_description.py +++ b/tests/satosa/metadata_creation/test_description.py @@ -1,5 +1,3 @@ -from unittest.mock import mock_open, patch - import pytest from satosa.metadata_creation.description import ContactPersonDesc, UIInfoDesc, OrganizationDesc, MetadataDescription @@ -26,12 +24,18 @@ def test_to_dict(self): desc.add_description("test", "en") desc.add_display_name("my company", "en") desc.add_logo("logo.jpg", 80, 80, "en") + desc.add_keywords(["kw1", "kw2"], "en") + desc.add_information_url("https://test", "en") + desc.add_privacy_statement_url("https://test", "en") serialized = desc.to_dict() ui_info = serialized["service"]["idp"]["ui_info"] assert ui_info["description"] == [{"text": "test", "lang": "en"}] assert ui_info["display_name"] == [{"text": "my company", "lang": "en"}] assert ui_info["logo"] == [{"text": "logo.jpg", "width": 80, "height": 80, "lang": "en"}] + assert ui_info["keywords"] == [{"text": ["kw1", "kw2"], "lang": "en"}] + assert ui_info["information_url"] == [{"text": "https://test", "lang": "en"}] + assert ui_info["privacy_statement_url"] == [{"text": "https://test", "lang": "en"}] def test_to_dict_for_logo_without_lang(self): desc = UIInfoDesc() diff --git a/tests/satosa/metadata_creation/test_saml_metadata.py b/tests/satosa/metadata_creation/test_saml_metadata.py index 49cff97a4..77e8ac1d7 100644 --- a/tests/satosa/metadata_creation/test_saml_metadata.py +++ b/tests/satosa/metadata_creation/test_saml_metadata.py @@ -236,7 +236,7 @@ def test_create_mirrored_metadata_does_not_contain_target_contact_info(self, sat class TestCreateSignedEntitiesDescriptor: @pytest.fixture def entity_desc(self, sp_conf): - return entity_descriptor(SPConfig().load(sp_conf, metadata_construction=True)) + return entity_descriptor(SPConfig().load(sp_conf)) @pytest.fixture def verification_security_context(self, cert_and_key): @@ -274,7 +274,7 @@ def test_valid_for(self, entity_desc, signature_security_context): class TestCreateSignedEntityDescriptor: @pytest.fixture def entity_desc(self, sp_conf): - return entity_descriptor(SPConfig().load(sp_conf, metadata_construction=True)) + return entity_descriptor(SPConfig().load(sp_conf)) @pytest.fixture def verification_security_context(self, cert_and_key): diff --git a/tests/satosa/micro_services/test_account_linking.py b/tests/satosa/micro_services/test_account_linking.py index 1c6dad5e4..859f3517d 100644 --- a/tests/satosa/micro_services/test_account_linking.py +++ b/tests/satosa/micro_services/test_account_linking.py @@ -3,7 +3,10 @@ import pytest import requests + import responses +from responses import matchers + from jwkest.jwk import rsa_load, RSAKey from jwkest.jws import JWS @@ -46,13 +49,15 @@ def test_existing_account_linking_with_known_known_uuid(self, account_linking_co } key = RSAKey(key=rsa_load(account_linking_config["sign_key"]), use="sig", alg="RS256") jws = JWS(json.dumps(data), alg=key.alg).sign_compact([key]) + url = "%s/get_id" % account_linking_config["api_url"] + params = {"jwt": jws} responses.add( responses.GET, - "%s/get_id?jwt=%s" % (account_linking_config["api_url"], jws), - status=200, + url=url, body=uuid, + match=[matchers.query_param_matcher(params)], content_type="text/html", - match_querystring=True + status=200, ) self.account_linking.process(context, internal_response) @@ -82,13 +87,15 @@ def test_full_flow(self, account_linking_config, internal_response, context): uuid = "uuid" with responses.RequestsMock() as rsps: # account is linked, 200 OK + url = "%s/get_id" % account_linking_config["api_url"] + params = {"jwt": jws} rsps.add( responses.GET, - "%s/get_id?jwt=%s" % (account_linking_config["api_url"], jws), - status=200, + url=url, body=uuid, + match=[matchers.query_param_matcher(params)], content_type="text/html", - match_querystring=True + status=200, ) internal_response = self.account_linking._handle_al_response(context) assert internal_response.subject_id == uuid diff --git a/tests/satosa/micro_services/test_attribute_authorization.py b/tests/satosa/micro_services/test_attribute_authorization.py index 4bd0cfc54..6fb277d15 100644 --- a/tests/satosa/micro_services/test_attribute_authorization.py +++ b/tests/satosa/micro_services/test_attribute_authorization.py @@ -1,3 +1,4 @@ +import pytest from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.micro_services.attribute_authorization import AttributeAuthorization @@ -5,9 +6,23 @@ from satosa.context import Context class TestAttributeAuthorization: - def create_authz_service(self, attribute_allow, attribute_deny): - authz_service = AttributeAuthorization(config=dict(attribute_allow=attribute_allow,attribute_deny=attribute_deny), name="test_authz", - base_url="https://satosa.example.com") + def create_authz_service( + self, + attribute_allow, + attribute_deny, + force_attributes_presence_on_allow=False, + force_attributes_presence_on_deny=False, + ): + authz_service = AttributeAuthorization( + config=dict( + force_attributes_presence_on_allow=force_attributes_presence_on_allow, + force_attributes_presence_on_deny=force_attributes_presence_on_deny, + attribute_allow=attribute_allow, + attribute_deny=attribute_deny, + ), + name="test_authz", + base_url="https://satosa.example.com", + ) authz_service.next = lambda ctx, data: data return authz_service @@ -25,7 +40,7 @@ def test_authz_allow_success(self): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - except SATOSAAuthenticationError as ex: + except SATOSAAuthenticationError: assert False def test_authz_allow_fail(self): @@ -38,13 +53,24 @@ def test_authz_allow_fail(self): resp.attributes = { "a0": ["bar"], } - try: + with pytest.raises(SATOSAAuthenticationError): + ctx = Context() + ctx.state = dict() + authz_service.process(ctx, resp) + + def test_authz_allow_missing(self): + attribute_allow = { + "": { "default": {"a0": ['foo1','foo2']} } + } + attribute_deny = {} + authz_service = self.create_authz_service(attribute_allow, attribute_deny, force_attributes_presence_on_allow=True) + resp = InternalData(auth_info=AuthenticationInformation()) + resp.attributes = { + } + with pytest.raises(SATOSAAuthenticationError): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - assert False - except SATOSAAuthenticationError as ex: - assert True def test_authz_allow_second(self): attribute_allow = { @@ -60,7 +86,7 @@ def test_authz_allow_second(self): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - except SATOSAAuthenticationError as ex: + except SATOSAAuthenticationError: assert False def test_authz_deny_success(self): @@ -73,13 +99,10 @@ def test_authz_deny_success(self): resp.attributes = { "a0": ["foo2"], } - try: + with pytest.raises(SATOSAAuthenticationError): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - assert False - except SATOSAAuthenticationError as ex: - assert True def test_authz_deny_fail(self): attribute_deny = { @@ -95,5 +118,5 @@ def test_authz_deny_fail(self): ctx = Context() ctx.state = dict() authz_service.process(ctx, resp) - except SATOSAAuthenticationError as ex: + except SATOSAAuthenticationError: assert False diff --git a/tests/satosa/micro_services/test_attribute_generation.py b/tests/satosa/micro_services/test_attribute_generation.py index be4fd9ab9..67f669417 100644 --- a/tests/satosa/micro_services/test_attribute_generation.py +++ b/tests/satosa/micro_services/test_attribute_generation.py @@ -1,7 +1,6 @@ from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.micro_services.attribute_generation import AddSyntheticAttributes -from satosa.exception import SATOSAAuthenticationError from satosa.context import Context class TestAddSyntheticAttributes: @@ -63,3 +62,20 @@ def test_generate_mustache2(self): assert("kaka1" in resp.attributes['kaka']) assert("a@example.com" in resp.attributes['eppn']) assert("b@example.com" in resp.attributes['eppn']) + + def test_generate_mustache_empty_attribute(self): + synthetic_attributes = { + "": {"default": {"a0": "{{kaka.first}}#{{eppn.scope}}"}} + } + authz_service = self.create_syn_service(synthetic_attributes) + resp = InternalData(auth_info=AuthenticationInformation()) + resp.attributes = { + "kaka": ["kaka1", "kaka2"], + "eppn": None, + } + ctx = Context() + ctx.state = dict() + authz_service.process(ctx, resp) + assert("kaka1#" in resp.attributes['a0']) + assert("kaka1" in resp.attributes['kaka']) + assert("kaka2" in resp.attributes['kaka']) diff --git a/tests/satosa/micro_services/test_attribute_modifications.py b/tests/satosa/micro_services/test_attribute_modifications.py index 0efaec43e..41ce8a7c0 100644 --- a/tests/satosa/micro_services/test_attribute_modifications.py +++ b/tests/satosa/micro_services/test_attribute_modifications.py @@ -1,3 +1,9 @@ +import pytest +from tests.util import FakeIdP, create_metadata_from_config_dict, FakeSP +from saml2.mdstore import MetadataStore +from saml2.config import Config +from satosa.context import Context +from satosa.exception import SATOSAError from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.micro_services.attribute_modifications import FilterAttributeValues @@ -10,6 +16,22 @@ def create_filter_service(self, attribute_filters): filter_service.next = lambda ctx, data: data return filter_service + def create_idp_metadata_conf_with_shibmd_scopes(self, idp_entityid, shibmd_scopes): + idp_conf = { + "entityid": idp_entityid, + "service": { + "idp":{} + } + } + + if shibmd_scopes is not None: + idp_conf["service"]["idp"]["scope"] = shibmd_scopes + + metadata_conf = { + "inline": [create_metadata_from_config_dict(idp_conf)] + } + return metadata_conf + def test_filter_all_attributes_from_all_target_providers_for_all_requesters(self): attribute_filters = { "": { # all providers @@ -116,3 +138,264 @@ def test_filter_one_attribute_for_one_target_provider_for_one_requester(self): } filtered = filter_service.process(None, resp) assert filtered.attributes == {"a1": ["1:foo:bar:2"]} + + def test_filter_one_attribute_from_all_target_providers_for_all_requesters_in_extended_notation(self): + attribute_filters = { + "": { + "": { + "a2": { + "regexp": "^foo:bar$" + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo:bar", "1:foo:bar:2"], + } + filtered = filter_service.process(None, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["foo:bar"]} + + def test_invalid_filter_type(self): + attribute_filters = { + "": { + "": { + "a2": { + "invalid_filter": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo:bar", "1:foo:bar:2"], + } + with pytest.raises(SATOSAError): + filtered = filter_service.process(None, resp) + + def test_shibmdscope_match_value_filter_with_no_md_store_in_context(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo:bar", "1:foo:bar:2"], + } + ctx = Context() + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": []} + + def test_shibmdscope_match_value_filter_with_empty_md_store_in_context(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo:bar", "1:foo:bar:2"], + } + ctx = Context() + mdstore = MetadataStore(None, None) + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": []} + + def test_shibmdscope_match_value_filter_with_idp_md_with_no_scope(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "1.foo.bar.2"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, None)) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": []} + + def test_shibmdscope_match_value_filter_with_idp_md_with_single_scope(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "1.foo.bar.2"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["foo.bar"])) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["foo.bar"]} + + def test_shibmdscope_match_value_filter_with_idp_md_with_single_regexp_scope(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["test.foo.bar", "1.foo.bar.2"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, [r"[^.]*\.foo\.bar$"])) + # mark scope as regexp (cannot be done via pysaml2 YAML config) + mdstore[idp_entityid]['idpsso_descriptor'][0]['extensions']['extension_elements'][0]['regexp'] = 'true' + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["test.foo.bar"]} + + def test_shibmdscope_match_value_filter_with_idp_md_with_multiple_scopes(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_value": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "1.foo.bar.2", "foo.baz", "foo.baz.com"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["foo.bar", "foo.baz"])) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["foo.bar", "foo.baz"]} + + def test_shibmdscope_match_scope_filter_with_single_scope(self): + attribute_filters = { + "": { + "": { + "a2": { + "shibmdscope_match_scope": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "value@foo.bar", "1.foo.bar.2", "value@foo.bar.2", "value@extra@foo.bar"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["foo.bar"])) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["value@foo.bar"]} + + def test_multiple_filters_for_single_attribute(self): + attribute_filters = { + "": { + "": { + "a2": { + "regexp": "^value1@", + "shibmdscope_match_scope": None + } + } + } + } + filter_service = self.create_filter_service(attribute_filters) + + resp = InternalData(AuthenticationInformation()) + resp.attributes = { + "a1": ["abc:xyz"], + "a2": ["foo.bar", "value1@foo.bar", "value2@foo.bar", "1.foo.bar.2", "value@foo.bar.2", "value@extra@foo.bar"], + } + + idp_entityid = 'https://idp.example.org/' + resp.auth_info.issuer = idp_entityid + + mdstore = MetadataStore(None, Config()) + mdstore.imp(self.create_idp_metadata_conf_with_shibmd_scopes(idp_entityid, ["foo.bar"])) + ctx = Context() + ctx.decorate(Context.KEY_METADATA_STORE, mdstore) + + filtered = filter_service.process(ctx, resp) + assert filtered.attributes == {"a1": ["abc:xyz"], "a2": ["value1@foo.bar"]} diff --git a/tests/satosa/micro_services/test_attribute_policy.py b/tests/satosa/micro_services/test_attribute_policy.py new file mode 100644 index 000000000..f68483025 --- /dev/null +++ b/tests/satosa/micro_services/test_attribute_policy.py @@ -0,0 +1,58 @@ +from satosa.context import Context +from satosa.internal import AuthenticationInformation, InternalData +from satosa.micro_services.attribute_policy import AttributePolicy + + +class TestAttributePolicy: + def create_attribute_policy_service(self, attribute_policies): + attribute_policy_service = AttributePolicy( + config=attribute_policies, + name="test_attribute_policy", + base_url="https://satosa.example.com" + ) + attribute_policy_service.next = lambda ctx, data: data + return attribute_policy_service + + def test_attribute_policy(self): + requester = "requester" + attribute_policies = { + "attribute_policy": { + "requester_everything_allowed": {}, + "requester_nothing_allowed": { + "allowed": {} + }, + "requester_subset_allowed": { + "allowed": { + "attr1", + "attr2", + }, + }, + }, + } + attributes = { + "attr1": ["foo"], + "attr2": ["foo", "bar"], + "attr3": ["foo"] + } + results = { + "requester_everything_allowed": attributes.keys(), + "requester_nothing_allowed": set(), + "requester_subset_allowed": {"attr1", "attr2"}, + } + for requester, result in results.items(): + attribute_policy_service = self.create_attribute_policy_service( + attribute_policies) + + ctx = Context() + ctx.state = dict() + + resp = InternalData(auth_info=AuthenticationInformation()) + resp.requester = requester + resp.attributes = { + "attr1": ["foo"], + "attr2": ["foo", "bar"], + "attr3": ["foo"] + } + + filtered = attribute_policy_service.process(ctx, resp) + assert(filtered.attributes.keys() == result) diff --git a/tests/satosa/micro_services/test_consent.py b/tests/satosa/micro_services/test_consent.py index 247b74868..a8eaed965 100644 --- a/tests/satosa/micro_services/test_consent.py +++ b/tests/satosa/micro_services/test_consent.py @@ -1,7 +1,7 @@ import json import re from collections import Counter -from urllib.parse import urlparse, parse_qs +from urllib.parse import urlparse import pytest import requests @@ -152,7 +152,7 @@ def test_consent_full_flow(self, context, consent_config, internal_response, int consent_verify_endpoint_regex, consent_registration_endpoint_regex): expected_ticket = "my_ticket" - requester_name = [{"lang": "en", "text": "test requester"}] + requester_name = internal_response.requester_name context.state[consent.STATE_KEY] = {"filter": internal_request.attributes, "requester_name": requester_name} @@ -189,7 +189,8 @@ def test_consent_not_given(self, context, consent_config, internal_response, int responses.add(responses.GET, consent_registration_endpoint_regex, status=200, body=expected_ticket) - context.state[consent.STATE_KEY] = {"filter": [], "requester_name": None} + requester_name = internal_response.requester_name + context.state[consent.STATE_KEY] = {} resp = self.consent_module.process(context, internal_response) @@ -198,7 +199,7 @@ def test_consent_not_given(self, context, consent_config, internal_response, int internal_response, consent_config["sign_key"], self.consent_module.base_url, - None) + requester_name) new_context = Context() new_context.state = context.state @@ -216,27 +217,6 @@ def test_filter_attributes(self): filtered_attributes = self.consent_module._filter_attributes(ATTRIBUTES, FILTER) assert Counter(filtered_attributes.keys()) == Counter(FILTER) - @responses.activate - def test_manage_consent_filters_attributes_before_send_to_consent_service(self, context, internal_request, - internal_response, - consent_verify_endpoint_regex): - approved_attributes = ["foo", "bar"] - # fake previous consent - responses.add(responses.GET, consent_verify_endpoint_regex, status=200, - body=json.dumps(approved_attributes)) - - attributes = {"foo": "123", "bar": "456", "abc": "should be filtered"} - internal_response.attributes = attributes - - context.state[consent.STATE_KEY] = {"filter": approved_attributes} - self.consent_module.process(context, internal_response) - - consent_hash = urlparse(responses.calls[0].request.url).path.split("/")[2] - expected_hash = self.consent_module._get_consent_id(internal_response.requester, internal_response.subject_id, - {k: v for k, v in attributes.items() if - k in approved_attributes}) - assert consent_hash == expected_hash - @responses.activate def test_manage_consent_without_filter_passes_through_all_attributes(self, context, internal_response, consent_verify_endpoint_regex): diff --git a/tests/satosa/micro_services/test_custom_routing.py b/tests/satosa/micro_services/test_custom_routing.py index 7a5227250..1be124877 100644 --- a/tests/satosa/micro_services/test_custom_routing.py +++ b/tests/satosa/micro_services/test_custom_routing.py @@ -1,11 +1,16 @@ from base64 import urlsafe_b64encode +from unittest import TestCase import pytest from satosa.context import Context +from satosa.state import State from satosa.exception import SATOSAError, SATOSAConfigurationError from satosa.internal import InternalData from satosa.micro_services.custom_routing import DecideIfRequesterIsAllowed +from satosa.micro_services.custom_routing import DecideBackendByTargetIssuer +from satosa.micro_services.custom_routing import DecideBackendByRequester + TARGET_ENTITY = "entity1" @@ -156,3 +161,93 @@ def test_missing_target_entity_id_from_context(self, context): req = InternalData(requester="test_requester") with pytest.raises(SATOSAError): decide_service.process(context, req) + + +class TestDecideBackendByTargetIssuer(TestCase): + def setUp(self): + context = Context() + context.state = State() + + config = { + 'default_backend': 'default_backend', + 'target_mapping': { + 'mapped_idp.example.org': 'mapped_backend', + }, + } + + plugin = DecideBackendByTargetIssuer( + config=config, + name='test_decide_service', + base_url='https://satosa.example.org', + ) + plugin.next = lambda ctx, data: (ctx, data) + + self.config = config + self.context = context + self.plugin = plugin + + def test_when_target_is_not_set_do_skip(self): + data = InternalData(requester='test_requester') + newctx, newdata = self.plugin.process(self.context, data) + assert not newctx.target_backend + + def test_when_target_is_not_mapped_choose_default_backend(self): + self.context.decorate(Context.KEY_TARGET_ENTITYID, 'idp.example.org') + data = InternalData(requester='test_requester') + newctx, newdata = self.plugin.process(self.context, data) + assert newctx.target_backend == 'default_backend' + + def test_when_target_is_mapped_choose_mapping_backend(self): + self.context.decorate(Context.KEY_TARGET_ENTITYID, 'mapped_idp.example.org') + data = InternalData(requester='test_requester') + data.requester = 'somebody else' + newctx, newdata = self.plugin.process(self.context, data) + assert newctx.target_backend == 'mapped_backend' + + +class TestDecideBackendByRequester(TestCase): + def setUp(self): + context = Context() + context.state = State() + + config = { + 'requester_mapping': { + 'test_requester': 'mapped_backend', + }, + } + + plugin = DecideBackendByRequester( + config=config, + name='test_decide_service', + base_url='https://satosa.example.org', + ) + plugin.next = lambda ctx, data: (ctx, data) + + self.config = config + self.context = context + self.plugin = plugin + + def test_when_requester_is_not_mapped_and_no_default_backend_skip(self): + data = InternalData(requester='other_test_requester') + newctx, newdata = self.plugin.process(self.context, data) + assert not newctx.target_backend + + def test_when_requester_is_not_mapped_choose_default_backend(self): + # override config to set default backend + self.config['default_backend'] = 'default_backend' + self.plugin = DecideBackendByRequester( + config=self.config, + name='test_decide_service', + base_url='https://satosa.example.org', + ) + self.plugin.next = lambda ctx, data: (ctx, data) + + data = InternalData(requester='other_test_requester') + newctx, newdata = self.plugin.process(self.context, data) + assert newctx.target_backend == 'default_backend' + + def test_when_requester_is_mapped_choose_mapping_backend(self): + data = InternalData(requester='test_requester') + data.requester = 'test_requester' + newctx, newdata = self.plugin.process(self.context, data) + assert newctx.target_backend == 'mapped_backend' diff --git a/tests/satosa/micro_services/test_disco.py b/tests/satosa/micro_services/test_disco.py new file mode 100644 index 000000000..ac2c3c5c2 --- /dev/null +++ b/tests/satosa/micro_services/test_disco.py @@ -0,0 +1,44 @@ +from unittest import TestCase + +import pytest + +from satosa.context import Context +from satosa.state import State +from satosa.micro_services.disco import DiscoToTargetIssuer +from satosa.micro_services.disco import DiscoToTargetIssuerError + + +class TestDiscoToTargetIssuer(TestCase): + def setUp(self): + context = Context() + context.state = State() + + config = { + 'disco_endpoints': [ + '.*/disco', + ], + } + + plugin = DiscoToTargetIssuer( + config=config, + name='test_disco_to_target_issuer', + base_url='https://satosa.example.org', + ) + plugin.next = lambda ctx, data: (ctx, data) + + self.config = config + self.context = context + self.plugin = plugin + + def test_when_entity_id_is_not_set_raise_error(self): + self.context.request = {} + with pytest.raises(DiscoToTargetIssuerError): + self.plugin._handle_disco_response(self.context) + + def test_when_entity_id_is_set_target_issuer_is_set(self): + entity_id = 'idp.example.org' + self.context.request = { + 'entityID': entity_id, + } + newctx, newdata = self.plugin._handle_disco_response(self.context) + assert newctx.get_decoration(Context.KEY_TARGET_ENTITYID) == entity_id diff --git a/tests/satosa/micro_services/test_idp_hinting.py b/tests/satosa/micro_services/test_idp_hinting.py new file mode 100644 index 000000000..2fa454253 --- /dev/null +++ b/tests/satosa/micro_services/test_idp_hinting.py @@ -0,0 +1,55 @@ +from unittest import TestCase + +from satosa.context import Context +from satosa.internal import InternalData +from satosa.state import State +from satosa.micro_services.idp_hinting import IdpHinting + + +class TestIdpHinting(TestCase): + def setUp(self): + context = Context() + context.state = State() + internal_data = InternalData() + + config = { + 'allowed_params': ["idp_hinting", "idp_hint", "idphint"] + } + + plugin = IdpHinting( + config=config, + name='test_idphinting', + base_url='https://satosa.example.org', + ) + plugin.next = lambda ctx, data: (ctx, data) + + self.config = config + self.context = context + self.data = internal_data + self.plugin = plugin + + def test_no_query_params(self): + self.context.qs_params = {} + new_context, new_data = self.plugin.process(self.context, self.data) + assert not new_context.get_decoration(Context.KEY_TARGET_ENTITYID) + + def test_hint_in_params(self): + _target = 'https://localhost:8080' + self.context.qs_params = {'idphint': _target} + new_context, new_data = self.plugin.process(self.context, self.data) + assert new_context.get_decoration(Context.KEY_TARGET_ENTITYID) == _target + + def test_no_hint_in_params(self): + _target = 'https://localhost:8080' + self.context.qs_params = {'param_not_in_allowed_params': _target} + new_context, new_data = self.plugin.process(self.context, self.data) + assert not new_context.get_decoration(Context.KEY_TARGET_ENTITYID) + + def test_issuer_already_set(self): + _pre_selected_target = 'https://local.localhost:8080' + self.context.decorate(Context.KEY_TARGET_ENTITYID, _pre_selected_target) + _target = 'https://localhost:8080' + self.context.qs_params = {'idphint': _target} + new_context, new_data = self.plugin.process(self.context, self.data) + assert new_context.get_decoration(Context.KEY_TARGET_ENTITYID) == _pre_selected_target + assert new_context.get_decoration(Context.KEY_TARGET_ENTITYID) != _target diff --git a/tests/satosa/micro_services/test_ldap_attribute_store.py b/tests/satosa/micro_services/test_ldap_attribute_store.py index e3af1a7f5..26dc3b9fb 100644 --- a/tests/satosa/micro_services/test_ldap_attribute_store.py +++ b/tests/satosa/micro_services/test_ldap_attribute_store.py @@ -2,6 +2,8 @@ from copy import deepcopy +from ldap3 import AUTO_BIND_NO_TLS, MOCK_SYNC + from satosa.internal import AuthenticationInformation from satosa.internal import InternalData from satosa.micro_services.ldap_attribute_store import LdapAttributeStore @@ -107,3 +109,60 @@ def test_attributes_general(self, ldap_attribute_store): internal_attr = ldap_to_internal_map[ldap_attr] response_attr = response.attributes[internal_attr] assert(ldap_value in response_attr) + + @pytest.mark.parametrize( + 'config,connection_attributes', + [ + ( + { + 'auto_bind': 'AUTO_BIND_NO_TLS', + 'client_strategy': 'MOCK_SYNC', + 'ldap_url': 'ldap://satosa.example.com', + 'bind_dn': 'uid=readonly_user,ou=system,dc=example,dc=com', + 'bind_password': 'password', + }, + { + 'user': 'uid=readonly_user,ou=system,dc=example,dc=com', + 'password': 'password', + 'auto_bind': AUTO_BIND_NO_TLS, + 'strategy_type': MOCK_SYNC, + 'read_only': True, + 'version': 3, + 'pool_size': 10, + 'pool_keepalive': 10, + 'pool_lifetime': None, + }, + ), + ( + { + 'auto_bind': 'AUTO_BIND_NO_TLS', + 'client_strategy': 'MOCK_SYNC', + 'ldap_url': 'ldap://satosa.example.com', + 'bind_dn': 'uid=readonly_user,ou=system,dc=example,dc=com', + 'bind_password': 'password', + 'pool_size': 40, + 'pool_keepalive': 41, + 'pool_lifetime': 42, + }, + { + 'user': 'uid=readonly_user,ou=system,dc=example,dc=com', + 'password': 'password', + 'auto_bind': AUTO_BIND_NO_TLS, + 'strategy_type': MOCK_SYNC, + 'read_only': True, + 'version': 3, + 'pool_size': 40, + 'pool_keepalive': 41, + 'pool_lifetime': 42, + }, + ), + ] + ) + def test_connection_config(self, config, connection_attributes): + ldapAttributeStore = LdapAttributeStore({'default': config}, + name="test_ldap_attribute_store", + base_url="https://satosa.example.com") + connection = ldapAttributeStore.config['default']['connection'] + + for k, v in connection_attributes.items(): + assert getattr(connection, k) == v diff --git a/tests/satosa/scripts/test_satosa_saml_metadata.py b/tests/satosa/scripts/test_satosa_saml_metadata.py index 26809dc2a..f76f5d990 100644 --- a/tests/satosa/scripts/test_satosa_saml_metadata.py +++ b/tests/satosa/scripts/test_satosa_saml_metadata.py @@ -1,6 +1,7 @@ import glob import os +import mongomock import pytest from saml2.config import Config from saml2.mdstore import MetaDataFile @@ -10,7 +11,7 @@ @pytest.fixture -def oidc_frontend_config(signing_key_path, mongodb_instance): +def oidc_frontend_config(signing_key_path): data = { "module": "satosa.frontends.openid_connect.OpenIDConnectFrontend", "name": "OIDCFrontend", @@ -23,6 +24,7 @@ def oidc_frontend_config(signing_key_path, mongodb_instance): return data +@mongomock.patch(servers=(('localhost', 27017),)) class TestConstructSAMLMetadata: def test_saml_saml(self, tmpdir, cert_and_key, satosa_config_dict, saml_frontend_config, saml_backend_config): diff --git a/tests/satosa/test_attribute_mapping.py b/tests/satosa/test_attribute_mapping.py index c109ab717..93a3dff78 100644 --- a/tests/satosa/test_attribute_mapping.py +++ b/tests/satosa/test_attribute_mapping.py @@ -5,6 +5,56 @@ from satosa.attribute_mapping import AttributeMapper +class TestAttributeMapperNestedDataDifferentAttrProfile: + def test_nested_mapping_nested_data_to_internal(self): + mapping = { + "attributes": { + "name": { + "openid": ["name"] + }, + "givenname": { + "openid": ["given_name", "name.firstName"] + }, + }, + } + + data = { + "name": { + "firstName": "value-first", + "lastName": "value-last", + }, + "email": "someuser@apple.com", + } + + converter = AttributeMapper(mapping) + internal_repr = converter.to_internal("openid", data) + assert internal_repr["name"] == [data["name"]] + assert internal_repr["givenname"] == [data["name"]["firstName"]] + + + def test_nested_mapping_simple_data_to_internal(self): + mapping = { + "attributes": { + "name": { + "openid": ["name"] + }, + "givenname": { + "openid": ["given_name", "name.firstName"] + }, + }, + } + + data = { + "name": "value-first", + "email": "someuser@google.com", + } + + converter = AttributeMapper(mapping) + internal_repr = converter.to_internal("openid", data) + assert internal_repr["name"] == [data["name"]] + assert internal_repr.get("givenname") is None + + class TestAttributeMapper: def test_nested_attribute_to_internal(self): mapping = { diff --git a/tests/satosa/test_base.py b/tests/satosa/test_base.py index 0cb365742..0f2a35f50 100644 --- a/tests/satosa/test_base.py +++ b/tests/satosa/test_base.py @@ -1,18 +1,11 @@ -import copy from unittest.mock import Mock import pytest -from saml2.saml import NAMEID_FORMAT_TRANSIENT -from saml2.saml import NAMEID_FORMAT_PERSISTENT - import satosa -from satosa import util from satosa.base import SATOSABase -from satosa.exception import SATOSAConfigurationError from satosa.internal import AuthenticationInformation from satosa.internal import InternalData -from satosa.micro_services import consent from satosa.satosa_config import SATOSAConfig @@ -29,18 +22,6 @@ def test_full_initialisation(self, satosa_config): assert len(base.request_micro_services) == 1 assert len(base.response_micro_services) == 1 - def test_constuctor_should_raise_exception_if_account_linking_is_not_first_in_micro_service_list( - self, satosa_config, account_linking_module_config): - satosa_config["MICRO_SERVICES"].append(account_linking_module_config) - with pytest.raises(SATOSAConfigurationError): - SATOSABase(satosa_config) - - def test_constuctor_should_raise_exception_if_consent_is_not_last_in_micro_service_list( - self, satosa_config, consent_module_config): - satosa_config["MICRO_SERVICES"].insert(0, consent_module_config) - with pytest.raises(SATOSAConfigurationError): - SATOSABase(satosa_config) - def test_auth_resp_callback_func_user_id_from_attrs_is_used_to_override_user_id(self, context, satosa_config): satosa_config["INTERNAL_ATTRIBUTES"]["user_id_from_attrs"] = ["user_id", "domain"] base = SATOSABase(satosa_config) @@ -56,38 +37,6 @@ def test_auth_resp_callback_func_user_id_from_attrs_is_used_to_override_user_id( expected_user_id = "user@example.com" assert internal_resp.subject_id == expected_user_id - def test_auth_req_callback_stores_state_for_consent(self, context, satosa_config): - base = SATOSABase(satosa_config) - - context.target_backend = satosa_config["BACKEND_MODULES"][0]["name"] - requester_name = [{"lang": "en", "text": "Test EN"}, {"lang": "sv", "text": "Test SV"}] - internal_req = InternalData( - subject_type=NAMEID_FORMAT_TRANSIENT, requester_name=requester_name, - ) - internal_req.attributes = ["attr1", "attr2"] - base._auth_req_callback_func(context, internal_req) - - assert context.state[consent.STATE_KEY]["requester_name"] == internal_req.requester_name - assert context.state[consent.STATE_KEY]["filter"] == internal_req.attributes - - def test_auth_resp_callback_func_hashes_all_specified_attributes(self, context, satosa_config): - satosa_config["INTERNAL_ATTRIBUTES"]["hash"] = ["user_id", "mail"] - base = SATOSABase(satosa_config) - - attributes = {"user_id": ["user"], "mail": ["user@example.com", "user@otherdomain.com"]} - internal_resp = InternalData(auth_info=AuthenticationInformation("", "", "")) - internal_resp.attributes = copy.copy(attributes) - internal_resp.subject_id = "test_user" - context.state[satosa.base.STATE_KEY] = {"requester": "test_requester"} - context.state[satosa.routing.STATE_KEY] = satosa_config["FRONTEND_MODULES"][0]["name"] - - base._auth_resp_callback_func(context, internal_resp) - for attr in satosa_config["INTERNAL_ATTRIBUTES"]["hash"]: - assert internal_resp.attributes[attr] == [ - util.hash_data(satosa_config.get("USER_ID_HASH_SALT", ""), v) - for v in attributes[attr] - ] - def test_auth_resp_callback_func_respects_user_id_to_attr(self, context, satosa_config): satosa_config["INTERNAL_ATTRIBUTES"]["user_id_to_attr"] = "user_id" base = SATOSABase(satosa_config) diff --git a/tests/satosa/test_satosa_config.py b/tests/satosa/test_satosa_config.py index 030ae6485..fd5045a93 100644 --- a/tests/satosa/test_satosa_config.py +++ b/tests/satosa/test_satosa_config.py @@ -1,12 +1,13 @@ import json +import os from unittest.mock import mock_open, patch import pytest -from satosa.exception import SATOSAConfigurationError from satosa.exception import SATOSAConfigurationError from satosa.satosa_config import SATOSAConfig +TEST_RESOURCE_BASE_PATH = os.path.join(os.path.dirname(__file__), "../test_resources") class TestSATOSAConfig: @pytest.fixture @@ -58,18 +59,29 @@ def test_can_read_endpoint_configs_from_file(self, satosa_config_dict, modules_k satosa_config_dict[modules_key] = ["/fake_file_path"] expected_config = {"foo": "bar"} + with pytest.raises(SATOSAConfigurationError): + SATOSAConfig(satosa_config_dict) + with patch("builtins.open", mock_open(read_data=json.dumps(expected_config))): config = SATOSAConfig(satosa_config_dict) assert config[modules_key] == [expected_config] - @pytest.mark.parametrize("modules_key", [ - "BACKEND_MODULES", - "FRONTEND_MODULES", - "MICRO_SERVICES" - ]) - def test_can_read_endpoint_configs_from_file(self, satosa_config_dict, modules_key): - satosa_config_dict[modules_key] = ["/fake_file_path"] + def test_can_substitute_from_environment_variable(self, monkeypatch): + monkeypatch.setenv("SATOSA_COOKIE_STATE_NAME", "oatmeal_raisin") + config = SATOSAConfig( + os.path.join(TEST_RESOURCE_BASE_PATH, "proxy_conf_environment_test.yaml") + ) - with pytest.raises(SATOSAConfigurationError): - SATOSAConfig(satosa_config_dict) + assert config["COOKIE_STATE_NAME"] == 'oatmeal_raisin' + + def test_can_substitute_from_environment_variable_file(self, monkeypatch): + cookie_file = os.path.join(TEST_RESOURCE_BASE_PATH, 'cookie_state_name') + monkeypatch.setenv("SATOSA_COOKIE_STATE_NAME_FILE", cookie_file) + config = SATOSAConfig( + os.path.join( + TEST_RESOURCE_BASE_PATH, "proxy_conf_environment_file_test.yaml" + ) + ) + + assert config["COOKIE_STATE_NAME"] == 'chocolate_chip' diff --git a/tests/satosa/test_state.py b/tests/satosa/test_state.py index 76b33d60c..eadee2182 100644 --- a/tests/satosa/test_state.py +++ b/tests/satosa/test_state.py @@ -100,7 +100,7 @@ def test_encode_decode_of_state(self): path = "/" encrypt_key = "2781y4hef90" - cookie = state_to_cookie(state, cookie_name, path, encrypt_key) + cookie = state_to_cookie(state, name=cookie_name, path=path, encryption_key=encrypt_key) cookie_str = cookie[cookie_name].OutputString() loaded_state = cookie_to_state(cookie_str, cookie_name, encrypt_key) @@ -117,7 +117,7 @@ def test_state_to_cookie_produces_cookie_without_max_age_for_state_that_should_b path = "/" encrypt_key = "2781y4hef90" - cookie = state_to_cookie(state, cookie_name, path, encrypt_key) + cookie = state_to_cookie(state, name=cookie_name, path=path, encryption_key=encrypt_key) cookie_str = cookie[cookie_name].OutputString() parsed_cookie = SimpleCookie(cookie_str) diff --git a/tests/test_requirements.txt b/tests/test_requirements.txt index bf7f30deb..fa872ab2a 100644 --- a/tests/test_requirements.txt +++ b/tests/test_requirements.txt @@ -1,4 +1,5 @@ pytest -responses +responses >= 0.14 beautifulsoup4 ldap3 +mongomock diff --git a/tests/test_resources/cookie_state_name b/tests/test_resources/cookie_state_name new file mode 100644 index 000000000..84bb814b8 --- /dev/null +++ b/tests/test_resources/cookie_state_name @@ -0,0 +1 @@ +chocolate_chip \ No newline at end of file diff --git a/tests/test_resources/proxy_conf_environment_file_test.yaml b/tests/test_resources/proxy_conf_environment_file_test.yaml new file mode 100644 index 000000000..801c109e8 --- /dev/null +++ b/tests/test_resources/proxy_conf_environment_file_test.yaml @@ -0,0 +1,10 @@ +BASE: https://example.com + +STATE_ENCRYPTION_KEY: state_encryption_key + +INTERNAL_ATTRIBUTES: {"attributes": {}} + +COOKIE_STATE_NAME: !ENVFILE SATOSA_COOKIE_STATE_NAME_FILE + +BACKEND_MODULES: [] +FRONTEND_MODULES: [] diff --git a/tests/test_resources/proxy_conf_environment_test.yaml b/tests/test_resources/proxy_conf_environment_test.yaml new file mode 100644 index 000000000..ab8118f31 --- /dev/null +++ b/tests/test_resources/proxy_conf_environment_test.yaml @@ -0,0 +1,10 @@ +BASE: https://example.com + +STATE_ENCRYPTION_KEY: state_encryption_key + +INTERNAL_ATTRIBUTES: {"attributes": {}} + +COOKIE_STATE_NAME: !ENV SATOSA_COOKIE_STATE_NAME + +BACKEND_MODULES: [] +FRONTEND_MODULES: [] diff --git a/tests/util.py b/tests/util.py index 0e1f5f9fb..c26c796fe 100644 --- a/tests/util.py +++ b/tests/util.py @@ -231,7 +231,7 @@ def handle_auth_req_no_name_id(self, saml_request, relay_state, binding, def create_metadata_from_config_dict(config): nspair = {"xs": "http://www.w3.org/2001/XMLSchema"} - conf = Config().load(config, metadata_construction=True) + conf = Config().load(config) return entity_descriptor(conf).to_string(nspair).decode("utf-8") diff --git a/tox.ini b/tox.ini index 4d69d943e..95cbdc864 100644 --- a/tox.ini +++ b/tox.ini @@ -2,14 +2,19 @@ envlist = py36 py37 + py38 + py39 pypy3 [testenv] +skip_install = true deps = -rtests/test_requirements.txt -whitelist_externals = +allowlist_externals = tox xmlsec1 commands = + pip install -U pip wheel setuptools + pip install -U .[pyop_mongo] xmlsec1 --version python --version pytest --version @@ -17,3 +22,33 @@ commands = pip --version pip freeze pytest -vvv -ra {posargs:tests/} + +[flake8] +ignore = + E402 + E501 + E111 + E117 + E121 + E123 + E125 + E126 + E201 + E202 + E203 + E221 + E226 + E231 + E261 + E262 + E265 + E275 + E302 + E303 + E703 + W291 + W292 + W293 + W503 + W504 + W605