From aaed90e4a5c6519dcb4a4ed68d3c89fb96853aea Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Tue, 13 Aug 2024 10:15:10 -0500 Subject: [PATCH 01/59] Packaging for v3.6.2 (#354) --- HISTORY.rst | 7 +++++++ setup.py | 2 +- shotgun_api3/shotgun.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index 3b7dd4d95..92bf444d9 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,6 +4,13 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.6.2 (2024 Aug 13) +==================== +- Remove Ticket entity reference and prepare this to run in CI. +- Condition auth for Jenkins environment. +- Update certifi to 2024.7.4. +- FIRST PHASE Python2 removing. + v3.6.1 (2024 Jun 6) =================== - Adds multi_entity_update_modes support to mockgun ``update()`` and ``batch()`` methods. diff --git a/setup.py b/setup.py index a0f51e764..c46fff40b 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ setup( name='shotgun_api3', - version='3.6.1', + version='3.6.2', description='Flow Production Tracking Python API', long_description=readme, author='Autodesk', diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index f5c2c5209..99b2a5957 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -116,7 +116,7 @@ def _is_mimetypes_broken(): # ---------------------------------------------------------------------------- # Version -__version__ = "3.6.1" +__version__ = "3.6.2" # ---------------------------------------------------------------------------- # Errors From 319108b4b8da6a768fa89d051e4d1af5cde7638a Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Tue, 5 Nov 2024 09:55:20 -0500 Subject: [PATCH 02/59] Remove min supported py version on docs (#358) --- README.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/README.md b/README.md index eaf1e9ccc..f960e941f 100644 --- a/README.md +++ b/README.md @@ -10,10 +10,6 @@ Autodesk provides a simple Python-based API for accessing Flow Production Tracki The latest version can always be found at http://github.com/shotgunsoftware/python-api -## Minimum Requirements - -* Python v3.7 - ## Documentation Tutorials and detailed documentation about the Python API are available at http://developer.shotgridsoftware.com/python-api). From b5fc779a3054ce9b4b2646fae7b384c957353a18 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Thu, 7 Nov 2024 10:37:35 -0500 Subject: [PATCH 03/59] SG-36700 Upgrade Azure Pipelines vm images (#359) * Upgrade Azure Pipelines vm images * Downgrade codecov to support x86_64 architecture --- azure-pipelines-templates/run-tests.yml | 2 +- azure-pipelines.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/azure-pipelines-templates/run-tests.yml b/azure-pipelines-templates/run-tests.yml index 551c06989..edf5ffe4a 100644 --- a/azure-pipelines-templates/run-tests.yml +++ b/azure-pipelines-templates/run-tests.yml @@ -146,7 +146,7 @@ jobs: displayName: Uploading code coverage - ${{ else }}: - script: | - curl -Os https://uploader.codecov.io/latest/macos/codecov + curl -Os https://uploader.codecov.io/v0.7.3/macos/codecov chmod +x codecov ./codecov -f coverage.xml displayName: Uploading code coverage diff --git a/azure-pipelines.yml b/azure-pipelines.yml index cc8f6b887..989745755 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -58,14 +58,14 @@ jobs: - template: azure-pipelines-templates/run-tests.yml parameters: name: Linux - vm_image: 'ubuntu-20.04' + vm_image: 'ubuntu-latest' - template: azure-pipelines-templates/run-tests.yml parameters: name: macOS - vm_image: 'macOS-12' + vm_image: 'macOS-latest' - template: azure-pipelines-templates/run-tests.yml parameters: name: Windows - vm_image: 'windows-2022' + vm_image: 'windows-latest' From d0b5b506cb72ad70a26989cfc10608df2d8f0ace Mon Sep 17 00:00:00 2001 From: Sungbin Lee Date: Fri, 15 Nov 2024 23:22:57 +0900 Subject: [PATCH 04/59] Fix incorrect Shotgun import in Python API example (#355) - Corrected the import statement for the Shotgun class to use the proper module path 'shotgun_api3.Shotgun' --- docs/cookbook/usage_tips.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/cookbook/usage_tips.rst b/docs/cookbook/usage_tips.rst index c2aa362b2..91cd6e8cb 100644 --- a/docs/cookbook/usage_tips.rst +++ b/docs/cookbook/usage_tips.rst @@ -85,7 +85,7 @@ Then when you're writing scripts, you don't need to worry about remembering whic import shotgun_api3 import studio_globals - sg = Shotgun('https://my-site.shotgrid.autodesk.com', 'script_name', '0123456789abcdef0123456789abcdef0123456') + sg = shotgun_api3.Shotgun('https://my-site.shotgrid.autodesk.com', 'script_name', '0123456789abcdef0123456789abcdef0123456') result = sg.find(studio_globals.ENTITY_WIDGET, filters=[['sg_status_list', 'is', 'ip']], fields=['code', 'sg_shot']) From 31df878d537b81fe9d118443631e91d3fe3a98ea Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Mon, 9 Dec 2024 12:52:42 -0500 Subject: [PATCH 05/59] SG-36677 Optimize payload by prevent unnecessary data (#360) * Optimize payload by prevent unnecessary data * Packaging for pre-release * Code review improvements * Improve example * Improve example * Process env var * Format documentation code * Swap env var logic * Restrict env var values * Remove lowercase transform * Read env var once * Add unit tests * Packaging for v3.7.0 --- HISTORY.rst | 7 +++++ docs/reference.rst | 30 +++++++++++++++++++++ setup.py | 11 ++------ shotgun_api3/shotgun.py | 29 +++++++++++++++++++-- tests/test_unit.py | 58 +++++++++++++++++++++++++++++++++++++++++ 5 files changed, 124 insertions(+), 11 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index 92bf444d9..ccfc44e33 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,6 +4,13 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.7.0 (2024 Dec 9) +=========================== +- Remove unnecessary data in the payload when combining related queries before sending it to the server. + This would improve overall performance decreasing network latency and server processing. + See documentation for more information. + + v3.6.2 (2024 Aug 13) ==================== - Remove Ticket entity reference and prepare this to run in CI. diff --git a/docs/reference.rst b/docs/reference.rst index da171ea85..6304fc09e 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -949,6 +949,36 @@ Stores the number of milliseconds to wait between request retries. By default, In the case that both this environment variable and the config's ``rpc_attempt_interval`` property are set, the value in ``rpc_attempt_interal`` will be used. + +SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION +======================================= + +.. note:: (v3.7.0) This is an experimental feature. Feel free to disable this feature if you are experiencing any issues. + +When set to ``1``, this environment variable will enable the entity optimization feature. +This feature is disabled by default and is used to reduce the payload size made to the server when retrieving entities +improving overall performance by decreasing network latency and server processing. + +For example, a ``find`` call like this: + +.. code-block:: python + + sg.find('Asset', [['project', 'is', { + 'created_at': datetime.datetime(2015, 12, 16, 11, 2, 10, tzinfo), + 'id': 9999, + 'name': 'Demo: Game', + 'type': 'Project', + # More entity attributes + }]]) + + +Will internally be transformed as if you invoked something like this: + +.. code-block:: python + + sg.find('Asset', [['project', 'is', {'id': 999, 'type': 'Project'}]]) + + ************ Localization ************ diff --git a/setup.py b/setup.py index c46fff40b..3305fd5af 100644 --- a/setup.py +++ b/setup.py @@ -18,16 +18,9 @@ f = open('LICENSE') license = f.read().strip() -# For python 2.4 support -script_args = sys.argv[1:] -if (sys.version_info[0] <= 2) or (sys.version_info[0] == 2 and sys.version_info[1] <= 5): - if 'install' in script_args and '--no-compile' not in script_args: - script_args.append('--no-compile') - - setup( name='shotgun_api3', - version='3.6.2', + version='3.7.0', description='Flow Production Tracking Python API', long_description=readme, author='Autodesk', @@ -35,7 +28,7 @@ url='https://github.com/shotgunsoftware/python-api', license=license, packages=find_packages(exclude=('tests',)), - script_args=script_args, + script_args=sys.argv[1:], include_package_data=True, package_data={'': ['cacerts.txt', 'cacert.pem']}, zip_safe=False, diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 99b2a5957..87f68d3e9 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -105,6 +105,8 @@ def _is_mimetypes_broken(): SG_TIMEZONE = SgTimezone() +SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION = False + NO_SSL_VALIDATION = False """ Turns off hostname matching validation for SSL certificates @@ -116,7 +118,7 @@ def _is_mimetypes_broken(): # ---------------------------------------------------------------------------- # Version -__version__ = "3.6.2" +__version__ = "3.7.0" # ---------------------------------------------------------------------------- # Errors @@ -649,7 +651,11 @@ def __init__(self, if self.config.rpc_attempt_interval < 0: raise ValueError("Value of SHOTGUN_API_RETRY_INTERVAL must be positive, " "got '%s'." % self.config.rpc_attempt_interval) - + + global SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION + if os.environ.get("SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION", "0").strip().lower() == "1": + SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION = True + self._connection = None self.__ca_certs = self._get_certs_file(ca_certs) @@ -4470,6 +4476,25 @@ def _translate_filters_simple(sg_filter): if len(values) == 1 and isinstance(values[0], (list, tuple)): values = values[0] + # Payload optimization: Do not send a full object + # just send the `type` and `id` when combining related queries + global SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION + if ( + SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION + and condition["path"] != "id" + and condition["relation"] in ["is", "is_not"] + and isinstance(values[0], dict) + ): + try: + values = [ + { + "type": values[0]["type"], + "id": values[0]["id"], + } + ] + except KeyError: + pass + condition["values"] = values return condition diff --git a/tests/test_unit.py b/tests/test_unit.py index 84bd35b60..096ca9327 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -405,6 +405,63 @@ def test_invalid(self): self.assertRaises(api.ShotgunError, api.shotgun._translate_filters, filters, "all") + def test_related_object(self): + filters = [ + [ + "project", + "is", + {"foo": "foo", "bar": "bar", "id": 999, "baz": "baz", "type": "Anything"}, + ], + ] + expected = { + "logical_operator": "and", + "conditions": [ + { + "path": "project", + "relation": "is", + "values": [ + { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "id": 999, + "type": "Anything", + } + ], + } + ], + } + result = api.shotgun._translate_filters(filters, "all") + self.assertEqual(result, expected) + + def test_related_object_entity_optimization(self): + filters = [ + [ + "project", + "is", + {"foo": "foo", "bar": "bar", "id": 999, "baz": "baz", "type": "Anything"}, + ], + ] + expected = { + "logical_operator": "and", + "conditions": [ + { + "path": "project", + "relation": "is", + "values": [ + { + "id": 999, + "type": "Anything", + } + ], + } + ], + } + os.environ["SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION"] = "1" + api.Shotgun("http://server_path", "script_name", "api_key", connect=False) + result = api.shotgun._translate_filters(filters, "all") + self.assertEqual(result, expected) + class TestCerts(unittest.TestCase): # A dummy bad url provided by Amazon @@ -506,5 +563,6 @@ def _test_mimetypes_import(self, platform, major, minor, patch_number, result, m mock.platform = platform self.assertEqual(_is_mimetypes_broken(), result) + if __name__ == '__main__': unittest.main() From f0451f5188a34f6def8a3bbc785928de2489c075 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Tue, 28 Jan 2025 14:59:29 -0500 Subject: [PATCH 06/59] SG-37544 Include the "in" and "not_in" operators for payload optimization (#362) * Include the "in" and "not_in" for payload optimization * Use a decorator to mock environmental variable --- shotgun_api3/shotgun.py | 9 ++------- tests/test_unit.py | 40 ++++++++++++++++++++++++++++++++++++++-- 2 files changed, 40 insertions(+), 9 deletions(-) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 87f68d3e9..58f621323 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -4482,16 +4482,11 @@ def _translate_filters_simple(sg_filter): if ( SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION and condition["path"] != "id" - and condition["relation"] in ["is", "is_not"] + and condition["relation"] in ["is", "is_not", "in", "not_in"] and isinstance(values[0], dict) ): try: - values = [ - { - "type": values[0]["type"], - "id": values[0]["id"], - } - ] + values = [{"type": v["type"], "id": v["id"]} for v in values] except KeyError: pass diff --git a/tests/test_unit.py b/tests/test_unit.py index 096ca9327..3f2e7593d 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -12,6 +12,7 @@ import os import unittest +from unittest import mock from .mock import patch import shotgun_api3 as api from shotgun_api3.shotgun import _is_mimetypes_broken @@ -434,7 +435,8 @@ def test_related_object(self): result = api.shotgun._translate_filters(filters, "all") self.assertEqual(result, expected) - def test_related_object_entity_optimization(self): + @mock.patch.dict(os.environ, {"SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION": "1"}) + def test_related_object_entity_optimization_is(self): filters = [ [ "project", @@ -457,7 +459,41 @@ def test_related_object_entity_optimization(self): } ], } - os.environ["SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION"] = "1" + api.Shotgun("http://server_path", "script_name", "api_key", connect=False) + result = api.shotgun._translate_filters(filters, "all") + self.assertEqual(result, expected) + + @mock.patch.dict(os.environ, {"SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION": "1"}) + def test_related_object_entity_optimization_in(self): + filters = [ + [ + "project", + "in", + [ + {"foo1": "foo1", "bar1": "bar1", "id": 999, "baz1": "baz1", "type": "Anything"}, + {"foo2": "foo2", "bar2": "bar2", "id": 998, "baz2": "baz2", "type": "Anything"} + ], + ], + ] + expected = { + "logical_operator": "and", + "conditions": [ + { + "path": "project", + "relation": "in", + "values": [ + { + "id": 999, + "type": "Anything", + }, + { + "id": 998, + "type": "Anything", + } + ], + } + ], + } api.Shotgun("http://server_path", "script_name", "api_key", connect=False) result = api.shotgun._translate_filters(filters, "all") self.assertEqual(result, expected) From b83b9e27bf813ce2048aa0a41e8707a91ff54a00 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Fri, 7 Feb 2025 09:10:11 -0500 Subject: [PATCH 07/59] SG-37548 Add payload optimization on update method (#363) * Include the "in" and "not_in" for payload optimization * Use a decorator to mock environmental variable * Add payload optimization for update action * Change conditional order * Extract function to be reused. Improve testing. * Fix typo * Fix test * Support multi entity, Add tests * Apply CR feedback * Update shotgun_api3/shotgun.py Co-authored-by: Julien Langlois <16244608+julien-lang@users.noreply.github.com> --------- Co-authored-by: Julien Langlois <16244608+julien-lang@users.noreply.github.com> --- shotgun_api3/shotgun.py | 52 ++++++++++---- tests/test_unit.py | 149 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 188 insertions(+), 13 deletions(-) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 58f621323..d3f2cfba1 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -1132,6 +1132,28 @@ def _add_project_param(self, params, project_entity): params["project"] = project_entity return params + + def _translate_update_params( + self, entity_type, entity_id, data, multi_entity_update_modes + ): + global SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION + + def optimize_field(field_dict): + if SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION: + return {k: _get_type_and_id_from_value(v) for k, v in field_dict.items()} + return field_dict + + full_fields = self._dict_to_list( + data, + extra_data=self._dict_to_extra_data( + multi_entity_update_modes, "multi_entity_update_mode" + ), + ) + return { + "type": entity_type, + "id": entity_id, + "fields": [optimize_field(field_dict) for field_dict in full_fields], + } def summarize(self, entity_type, @@ -1463,14 +1485,7 @@ def update(self, entity_type, entity_id, data, multi_entity_update_modes=None): upload_filmstrip_image = data.pop("filmstrip_image") if data: - params = { - "type": entity_type, - "id": entity_id, - "fields": self._dict_to_list( - data, - extra_data=self._dict_to_extra_data( - multi_entity_update_modes, "multi_entity_update_mode")) - } + params = self._translate_update_params(entity_type, entity_id, data, multi_entity_update_modes) record = self._call_rpc("update", params) result = self._parse_records(record)[0] else: @@ -4485,10 +4500,7 @@ def _translate_filters_simple(sg_filter): and condition["relation"] in ["is", "is_not", "in", "not_in"] and isinstance(values[0], dict) ): - try: - values = [{"type": v["type"], "id": v["id"]} for v in values] - except KeyError: - pass + values = [_get_type_and_id_from_value(v) for v in values] condition["values"] = values @@ -4500,3 +4512,19 @@ def _version_str(version): Convert a tuple of int's to a '.' separated str. """ return ".".join(map(str, version)) + + +def _get_type_and_id_from_value(value): + """ + For an entity dictionary, returns a new dictionary with only the type and id keys. + If any of these keys are not present, the original dictionary is returned. + """ + try: + if isinstance(value, dict): + return {"type": value["type"], "id": value["id"]} + elif isinstance(value, list): + return [{"type": v["type"], "id": v["id"]} for v in value] + except (KeyError, TypeError): + LOG.debug(f"Could not optimize entity value {value}") + + return value diff --git a/tests/test_unit.py b/tests/test_unit.py index 3f2e7593d..c8144d51b 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -290,6 +290,8 @@ def test_py_version(self, mock_sys): class TestFilters(unittest.TestCase): + maxDiff = None + def test_empty(self): expected = { "logical_operator": "and", @@ -463,6 +465,28 @@ def test_related_object_entity_optimization_is(self): result = api.shotgun._translate_filters(filters, "all") self.assertEqual(result, expected) + # Now test a non-related object. The expected result should not be optimized. + filters = [ + [ + "something", + "is", + {"foo": "foo", "bar": "bar"}, + ], + ] + expected = { + "logical_operator": "and", + "conditions": [ + { + "path": "something", + "relation": "is", + "values": [{'bar': 'bar', 'foo': 'foo'}], + } + ], + } + api.Shotgun("http://server_path", "script_name", "api_key", connect=False) + result = api.shotgun._translate_filters(filters, "all") + self.assertEqual(result, expected) + @mock.patch.dict(os.environ, {"SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION": "1"}) def test_related_object_entity_optimization_in(self): filters = [ @@ -471,7 +495,8 @@ def test_related_object_entity_optimization_in(self): "in", [ {"foo1": "foo1", "bar1": "bar1", "id": 999, "baz1": "baz1", "type": "Anything"}, - {"foo2": "foo2", "bar2": "bar2", "id": 998, "baz2": "baz2", "type": "Anything"} + {"foo2": "foo2", "bar2": "bar2", "id": 998, "baz2": "baz2", "type": "Anything"}, + {"foo3": "foo3", "bar3": "bar3"}, ], ], ] @@ -489,6 +514,10 @@ def test_related_object_entity_optimization_in(self): { "id": 998, "type": "Anything", + }, + { + "foo3": "foo3", + "bar3": "bar3", } ], } @@ -498,6 +527,124 @@ def test_related_object_entity_optimization_in(self): result = api.shotgun._translate_filters(filters, "all") self.assertEqual(result, expected) + def test_related_object_update_entity(self): + entity_type = "Anything" + entity_id = 999 + multi_entity_update_modes = {"link": "set", "name": "set"} + data = { + "name": "test", + "link": { + "name": "test", + "url": "http://test.com", + }, + } + expected = { + "id": 999, + "type": "Anything", + "fields": [ + { + "field_name": "name", + "value": "test", + "multi_entity_update_mode": "set", + }, + { + "field_name": "link", + "value": { + "name": "test", + "url": "http://test.com", + }, + "multi_entity_update_mode": "set", + }, + ], + } + sg = api.Shotgun("http://server_path", "script_name", "api_key", connect=False) + result = sg._translate_update_params(entity_type, entity_id, data, multi_entity_update_modes) + self.assertEqual(result, expected) + + @mock.patch.dict(os.environ, {"SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION": "1"}) + def test_related_object_update_optimization_entity(self): + entity_type = "Anything" + entity_id = 999 + multi_entity_update_modes = {"project": "set", "link": "set", "name": "set"} + data = { + "name": "test", + "link": { + "name": "test", + "url": "http://test.com", + }, + "project": { + "foo1": "foo1", + "bar1": "bar1", + "id": 888, + "baz1": "baz1", + "type": "Project", + }, + } + expected = { + "id": 999, + "type": "Anything", + "fields": [ + { + "field_name": "name", + "value": "test", + "multi_entity_update_mode": "set", + }, + { + "field_name": "link", + "value": { + "name": "test", + "url": "http://test.com", + }, + "multi_entity_update_mode": "set", + }, + { + "field_name": "project", + "multi_entity_update_mode": "set", + "value": { + # Entity is optimized with type/id fields. + "id": 888, + "type": "Project", + }, + }, + ], + } + sg = api.Shotgun("http://server_path", "script_name", "api_key", connect=False) + result = sg._translate_update_params(entity_type, entity_id, data, multi_entity_update_modes) + self.assertEqual(result, expected) + + @mock.patch.dict(os.environ, {"SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION": "1"}) + def test_related_object_update_optimization_entity_multi(self): + entity_type = "Asset" + entity_id = 6626 + data = { + "sg_status_list": "ip", + "project": {"id": 70, "type": "Project", "name": "disposable name 70"}, + "sg_vvv": [ + {"id": 6441, "type": "Asset", "name": "disposable name 6441"}, + {"id": 6440, "type": "Asset"}, + ], + "sg_class": {"id": 1, "type": "CustomEntity53", "name": "disposable name 1"}, + } + expected = { + "type": "Asset", + "id": 6626, + "fields": [ + {"field_name": "sg_status_list", "value": "ip"}, + {"field_name": "project", "value": {"type": "Project", "id": 70}}, + { + "field_name": "sg_vvv", + "value": [ + {"id": 6441, "type": "Asset"}, + {"id": 6440, "type": "Asset"}, + ], + }, + {"field_name": "sg_class", "value": {"type": "CustomEntity53", "id": 1}}, + ], + } + sg = api.Shotgun("http://server_path", "script_name", "api_key", connect=False) + result = sg._translate_update_params(entity_type, entity_id, data, None) + self.assertEqual(result, expected) + class TestCerts(unittest.TestCase): # A dummy bad url provided by Amazon From 9b9e5380519637c5626ab679f7f5f5e22f26476b Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Wed, 12 Feb 2025 08:40:54 -0500 Subject: [PATCH 08/59] SG-38119 Release v3.8.0. Make payload optimization default (#366) * Make payload optimization default * Packaging for v3.8.0 * Update HISTORY.rst Co-authored-by: Julien Langlois <16244608+julien-lang@users.noreply.github.com> --------- Co-authored-by: Julien Langlois <16244608+julien-lang@users.noreply.github.com> --- HISTORY.rst | 11 ++++++++++- docs/reference.rst | 6 +++--- setup.py | 2 +- shotgun_api3/shotgun.py | 22 +++++++++++----------- tests/test_unit.py | 10 ++++++---- 5 files changed, 31 insertions(+), 20 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index ccfc44e33..82309527e 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,8 +4,17 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.8.0 (2024 Feb 7) +=================== + +- Extend the payload optimizations to the ``in`` and ``not_in`` filters and + the ``update`` method. +- The payload optimization is now enabled by default. + It can be disabled with the ``SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION`` + environment variable. + v3.7.0 (2024 Dec 9) -=========================== +=================== - Remove unnecessary data in the payload when combining related queries before sending it to the server. This would improve overall performance decreasing network latency and server processing. See documentation for more information. diff --git a/docs/reference.rst b/docs/reference.rst index 6304fc09e..b3260b3be 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -950,13 +950,13 @@ Stores the number of milliseconds to wait between request retries. By default, In the case that both this environment variable and the config's ``rpc_attempt_interval`` property are set, the value in ``rpc_attempt_interal`` will be used. -SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION +SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION ======================================= .. note:: (v3.7.0) This is an experimental feature. Feel free to disable this feature if you are experiencing any issues. -When set to ``1``, this environment variable will enable the entity optimization feature. -This feature is disabled by default and is used to reduce the payload size made to the server when retrieving entities +When set to ``1``, this environment variable will disable the entity optimization feature. +This feature is enabled by default and is used to reduce the payload size made to the server when retrieving entities improving overall performance by decreasing network latency and server processing. For example, a ``find`` call like this: diff --git a/setup.py b/setup.py index 3305fd5af..337e3b13b 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( name='shotgun_api3', - version='3.7.0', + version='3.8.0', description='Flow Production Tracking Python API', long_description=readme, author='Autodesk', diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index d3f2cfba1..f0d4faf48 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -105,7 +105,7 @@ def _is_mimetypes_broken(): SG_TIMEZONE = SgTimezone() -SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION = False +SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION = False NO_SSL_VALIDATION = False """ @@ -118,7 +118,7 @@ def _is_mimetypes_broken(): # ---------------------------------------------------------------------------- # Version -__version__ = "3.7.0" +__version__ = "3.8.0" # ---------------------------------------------------------------------------- # Errors @@ -652,9 +652,9 @@ def __init__(self, raise ValueError("Value of SHOTGUN_API_RETRY_INTERVAL must be positive, " "got '%s'." % self.config.rpc_attempt_interval) - global SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION - if os.environ.get("SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION", "0").strip().lower() == "1": - SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION = True + global SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION + if os.environ.get("SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION", "0").strip().lower() == "1": + SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION = True self._connection = None @@ -1136,12 +1136,12 @@ def _add_project_param(self, params, project_entity): def _translate_update_params( self, entity_type, entity_id, data, multi_entity_update_modes ): - global SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION + global SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION def optimize_field(field_dict): - if SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION: - return {k: _get_type_and_id_from_value(v) for k, v in field_dict.items()} - return field_dict + if SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION: + return field_dict + return {k: _get_type_and_id_from_value(v) for k, v in field_dict.items()} full_fields = self._dict_to_list( data, @@ -4493,9 +4493,9 @@ def _translate_filters_simple(sg_filter): # Payload optimization: Do not send a full object # just send the `type` and `id` when combining related queries - global SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION + global SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION if ( - SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION + not SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION and condition["path"] != "id" and condition["relation"] in ["is", "is_not", "in", "not_in"] and isinstance(values[0], dict) diff --git a/tests/test_unit.py b/tests/test_unit.py index c8144d51b..84304cab7 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -408,6 +408,7 @@ def test_invalid(self): self.assertRaises(api.ShotgunError, api.shotgun._translate_filters, filters, "all") + @mock.patch.dict(os.environ, {"SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION": "1"}) def test_related_object(self): filters = [ [ @@ -434,10 +435,11 @@ def test_related_object(self): } ], } + api.Shotgun("http://server_path", "script_name", "api_key", connect=False) result = api.shotgun._translate_filters(filters, "all") self.assertEqual(result, expected) - @mock.patch.dict(os.environ, {"SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION": "1"}) + @mock.patch("shotgun_api3.shotgun.SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION", False) def test_related_object_entity_optimization_is(self): filters = [ [ @@ -487,7 +489,7 @@ def test_related_object_entity_optimization_is(self): result = api.shotgun._translate_filters(filters, "all") self.assertEqual(result, expected) - @mock.patch.dict(os.environ, {"SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION": "1"}) + @mock.patch("shotgun_api3.shotgun.SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION", False) def test_related_object_entity_optimization_in(self): filters = [ [ @@ -561,7 +563,7 @@ def test_related_object_update_entity(self): result = sg._translate_update_params(entity_type, entity_id, data, multi_entity_update_modes) self.assertEqual(result, expected) - @mock.patch.dict(os.environ, {"SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION": "1"}) + @mock.patch("shotgun_api3.shotgun.SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION", False) def test_related_object_update_optimization_entity(self): entity_type = "Anything" entity_id = 999 @@ -612,7 +614,7 @@ def test_related_object_update_optimization_entity(self): result = sg._translate_update_params(entity_type, entity_id, data, multi_entity_update_modes) self.assertEqual(result, expected) - @mock.patch.dict(os.environ, {"SHOTGUN_API_ENABLE_ENTITY_OPTIMIZATION": "1"}) + @mock.patch("shotgun_api3.shotgun.SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION", False) def test_related_object_update_optimization_entity_multi(self): entity_type = "Asset" entity_id = 6626 From b6247a39876c4d1c25ce170666adb1ef350f7e4b Mon Sep 17 00:00:00 2001 From: Eduardo Chauca <166560435+eduardoChaucaGallegos@users.noreply.github.com> Date: Tue, 18 Feb 2025 01:39:40 -0500 Subject: [PATCH 09/59] upgrade certifi to 2024.12.14 (#365) --- shotgun_api3/lib/certifi/__init__.py | 2 +- shotgun_api3/lib/certifi/cacert.pem | 284 ++++++++++++++++----------- 2 files changed, 172 insertions(+), 114 deletions(-) diff --git a/shotgun_api3/lib/certifi/__init__.py b/shotgun_api3/lib/certifi/__init__.py index d321f1bc3..ee8686bec 100644 --- a/shotgun_api3/lib/certifi/__init__.py +++ b/shotgun_api3/lib/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2024.07.04" +__version__ = "2024.12.14" diff --git a/shotgun_api3/lib/certifi/cacert.pem b/shotgun_api3/lib/certifi/cacert.pem index a6581589b..ef509f865 100644 --- a/shotgun_api3/lib/certifi/cacert.pem +++ b/shotgun_api3/lib/certifi/cacert.pem @@ -763,35 +763,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - # Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. # Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. # Label: "Microsec e-Szigno Root CA 2009" @@ -3100,50 +3071,6 @@ LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG mpv0 -----END CERTIFICATE----- -# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G4" -# Serial: 289383649854506086828220374796556676440 -# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 -# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 -# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw -gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL -Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg -MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw -BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 -MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 -c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ -bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg -Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ -2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E -T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j -5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM -C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T -DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX -wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A -2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm -nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 -dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl -N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj -c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS -5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS -Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr -hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ -B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI -AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw -H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ -b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk -2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol -IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk -5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY -n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== ------END CERTIFICATE----- - # Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation # Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation # Label: "Microsoft ECC Root Certificate Authority 2017" @@ -3485,6 +3412,46 @@ DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= -----END CERTIFICATE----- +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + # Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz # Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz # Label: "ANF Secure Server Root CA" @@ -4214,46 +4181,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR -----END CERTIFICATE----- -# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication RootCA3" -# Serial: 16247922307909811815 -# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 -# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a -# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 ------BEGIN CERTIFICATE----- -MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV -BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw -JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 -MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc -U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg -Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r -CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA -lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG -TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 -9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 -8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 -g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we -GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst -+3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M -0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ -T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw -HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS -YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA -FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd -9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI -UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ -OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke -gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf -iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV -nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD -2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// -1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad -TdJ0MN1kURXbg4NR16/9M51NZg== ------END CERTIFICATE----- - # Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. # Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. # Label: "Security Communication ECC RootCA1" @@ -4796,3 +4723,134 @@ PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG XSaQpYXFuXqUPoeovQA= -----END CERTIFICATE----- + +# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA CYBER Root CA" +# Serial: 85076849864375384482682434040119489222 +# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 +# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 +# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ +MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 +IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 +WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO +LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg +Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P +40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF +avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ +34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i +JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu +j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf +Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP +2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA +S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA +oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC +kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW +5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd +BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB +AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t +tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn +68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn +TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t +RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx +f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI +Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz +8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 +NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX +xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 +t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA12" +# Serial: 587887345431707215246142177076162061960426065942 +# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 +# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 +# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw +NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF +KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt +p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd +J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur +FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J +hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K +h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF +AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld +mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ +mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA +8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV +55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ +yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA14" +# Serial: 575790784512929437950770173562378038616896959179 +# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 +# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f +# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw +NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ +FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg +vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy +6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo +/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J +kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ +0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib +y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac +18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs +0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB +SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL +ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk +86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E +rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib +ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT +zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS +DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 +2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo +FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy +K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 +dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl +Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB +365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c +JRNItX+S +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA15" +# Serial: 126083514594751269499665114766174399806381178503 +# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 +# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d +# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a +-----BEGIN CERTIFICATE----- +MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw +UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM +dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy +NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl +cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 +IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 +wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR +ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT +9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp +4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 +bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= +-----END CERTIFICATE----- \ No newline at end of file From 2cd8b387a165fb2773cc2d6ef4ea224dc6d581f8 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 19 Feb 2025 08:28:32 -0800 Subject: [PATCH 10/59] SG-33057 App pre-commit configuration and CI (#367) * Add pre-commit config * Apply pre-commit reformat * fixup! Add pre-commit config --- .flake8 | 2 +- .gitignore | 1 - .pre-commit-config.yaml | 49 + HISTORY.rst | 74 +- LICENSE | 8 +- SECURITY.md | 2 +- .../code_style_validation.yml | 50 + azure-pipelines-templates/run-tests.yml | 6 +- azure-pipelines.yml | 1 + docs/advanced/iron_python.rst | 2 +- docs/advanced/packaging.rst | 4 +- docs/authentication.rst | 1 - docs/changelog.rst | 2 +- docs/cookbook.rst | 10 +- docs/cookbook/examples/ami_handler.rst | 8 +- .../examples/ami_version_packager.rst | 69 +- docs/cookbook/examples/basic_create_shot.rst | 25 +- .../basic_create_shot_task_template.rst | 12 +- .../basic_create_version_link_shot.rst | 36 +- docs/cookbook/examples/basic_delete_shot.rst | 11 +- docs/cookbook/examples/basic_find_shot.rst | 18 +- docs/cookbook/examples/basic_sg_instance.rst | 6 +- docs/cookbook/examples/basic_update_shot.rst | 27 +- .../basic_upload_thumbnail_version.rst | 10 +- docs/cookbook/examples/svn_integration.rst | 70 +- docs/cookbook/smart_cut_fields.rst | 8 +- docs/cookbook/tasks.rst | 4 +- docs/cookbook/tasks/split_tasks.rst | 41 +- docs/cookbook/tasks/task_dependencies.rst | 76 +- docs/cookbook/tasks/updating_tasks.rst | 34 +- docs/cookbook/tutorials.rst | 2 +- docs/cookbook/usage_tips.rst | 56 +- docs/index.rst | 1 - docs/reference.rst | 66 +- nose.cfg | 2 +- run-tests | 2 + setup.py | 20 +- shotgun_api3/__init__.py | 21 +- shotgun_api3/shotgun.py | 1327 +++++---- tests/base.py | 351 +-- tests/ci_requirements.txt | 6 +- tests/mock.py | 311 ++- tests/test_api.py | 2373 ++++++++++------- tests/test_client.py | 192 +- tests/test_config_file | 2 +- tests/test_mockgun.py | 260 +- tests/test_proxy.py | 12 +- tests/test_unit.py | 401 +-- update_httplib2.py | 46 +- 49 files changed, 3617 insertions(+), 2501 deletions(-) create mode 100644 .pre-commit-config.yaml create mode 100644 azure-pipelines-templates/code_style_validation.yml diff --git a/.flake8 b/.flake8 index 02cff1e84..343f01039 100644 --- a/.flake8 +++ b/.flake8 @@ -10,4 +10,4 @@ [flake8] max-line-length = 120 -exclude = shotgun_api3/lib/httplib2/*,shotgun_api3/lib/six.py,tests/httplib2test.py,tests/mock.py \ No newline at end of file +exclude = shotgun_api3/lib/httplib2/*,shotgun_api3/lib/six.py,tests/httplib2test.py,tests/mock.py diff --git a/.gitignore b/.gitignore index 3e6ff329a..02018058b 100644 --- a/.gitignore +++ b/.gitignore @@ -33,4 +33,3 @@ build dist shotgun_api3.egg-info /%1 - diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..537da22cf --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,49 @@ +# Copyright (c) 2024, Shotgun Software Inc. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# - Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# - Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# - Neither the name of the Shotgun Software Inc nor the names of its +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Styles the code properly + +# Exclude Third Pary components +exclude: "shotgun_api3/lib/.*" + +# List of super useful formatters. +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-ast + - id: check-case-conflict + - id: check-executables-have-shebangs + - id: check-merge-conflict + - id: end-of-file-fixer + - id: requirements-txt-fixer + - id: trailing-whitespace + + - repo: https://github.com/psf/black + rev: 25.1.0 + hooks: + - id: black diff --git a/HISTORY.rst b/HISTORY.rst index 82309527e..9858f61c8 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -9,7 +9,7 @@ v3.8.0 (2024 Feb 7) - Extend the payload optimizations to the ``in`` and ``not_in`` filters and the ``update`` method. -- The payload optimization is now enabled by default. +- The payload optimization is now enabled by default. It can be disabled with the ``SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION`` environment variable. @@ -57,7 +57,7 @@ v3.4.2 (2024 Feb 6) v3.4.1 (2024 Jan 29) ==================== - Flaky Tests -- Documentation: Fix issue regarding "in" filter prototype +- Documentation: Fix issue regarding "in" filter prototype - Documentation: Travis badge image is no working anymore - Documentation: Add ``user_subscription_read`` and ``user_subscription_create`` methods - Update Python Certifi license block @@ -208,7 +208,7 @@ v3.0.34 (2017 September 18) v3.0.33 (2017 July 18) ====================== -- Raise an exception when uploading an empty file using :meth:`upload`, :meth:`upload_thumbnail` +- Raise an exception when uploading an empty file using :meth:`upload`, :meth:`upload_thumbnail` or :meth:`upload_filmstrip_thumbnail` before calling out to the server. - Multiple enhancements and bugfixes to Mockgun - Added ``nav_search_string()`` and ``nav_search_entity()`` methods as experimental, internal methods for querying SG hierarchy. @@ -258,17 +258,17 @@ v3.0.27 (2016 Feb 18) v3.0.26 (2016 Feb 1) ==================== -- Updating testing framework to use environment variables inconjunction with existing +- Updating testing framework to use environment variables inconjunction with existing ``example_config`` file so that commits and pull requests are automatically run on travis-ci. -- Fix to prevent stripping out case-sensitivity of a URL if the user passes their credentials to +- Fix to prevent stripping out case-sensitivity of a URL if the user passes their credentials to ``config.server`` as an authorization header. v3.0.25 (2016 Jan 12) ===================== -- Add handling for Python versions incompatible with SHA-2 (see `this blog post +- Add handling for Python versions incompatible with SHA-2 (see `this blog post `_). -- Add ``SHOTGUN_FORCE_CERTIFICATE_VALIDATION`` environment variable to prevent disabling certficate +- Add ``SHOTGUN_FORCE_CERTIFICATE_VALIDATION`` environment variable to prevent disabling certficate validation when SHA-2 validation is not available. - Add SSL info to user-agent header. @@ -276,11 +276,11 @@ v3.0.24 (2016 Jan 08) ===================== - Not released. - + v3.0.23 (2015 Oct 26) ===================== -- Fix for `python bug #23371 `_ on Windows loading mimetypes +- Fix for `python bug #23371 `_ on Windows loading mimetypes module (thanks `@patrickwolf `_). - Fix for tests on older versions of python. - Sanitize authentication values before raising error. @@ -288,13 +288,13 @@ v3.0.23 (2015 Oct 26) v3.0.22 (2015 Sept 9) ===================== -- Added method :meth:`text_search` which allows an API client to access the Shotgun global search +- Added method :meth:`text_search` which allows an API client to access the Shotgun global search and auto completer. -- Added method :meth:`activity_stream_read` which allows an API client to access the activity +- Added method :meth:`activity_stream_read` which allows an API client to access the activity stream for a given Shotgun entity. -- Added method :meth:`note_thread_read` which allows an API client to download an entire Note +- Added method :meth:`note_thread_read` which allows an API client to download an entire Note conversation, including Replies and Attachments, using a single API call. -- Added an experimental ``mockgun`` module which can be used to emulate the Shotgun API, for +- Added an experimental ``mockgun`` module which can be used to emulate the Shotgun API, for example inside unit test rigs. - [minor] Improved docstrings. @@ -313,23 +313,23 @@ v3.0.19 (2015 Mar 25) - Add ability to authenticate with Shotgun using ``session_token``. - Add :meth:`get_session_token` method for obtaining token to authenticate with. -- Add new ``AuthenticationFault`` exception type to indicate when server communication has failed +- Add new ``AuthenticationFault`` exception type to indicate when server communication has failed due to authentication reasons. -- Add support for ``SHOTGUN_API_CACERTS`` environment variable to provide location of external +- Add support for ``SHOTGUN_API_CACERTS`` environment variable to provide location of external SSL certificates file. - Fixes and updates to various tests. v3.0.18 (2015 Mar 13) ===================== -- Add ability to query the per-project visibility status for entities, fields and statuses. +- Add ability to query the per-project visibility status for entities, fields and statuses. (requires Shotgun server >= v5.4.4) v3.0.17 (2014 Jul 10) ===================== - Add ability to update ``last_accessed_by_current_user`` on Project. -- Add workaround for `bug #9291 in Python 2.7 `_ affecting +- Add workaround for `bug #9291 in Python 2.7 `_ affecting mimetypes library on Windows. - Add platform and Python version to user-agent (eg. ``shotgun-json (3.0.17); Python 2.7 (Mac)``) @@ -343,7 +343,7 @@ v3.0.16 (2014 May 23) v3.0.15 (2014 Mar 6) ==================== -- Fixed bug which allowed a value of ``None`` for password parameter in +- Fixed bug which allowed a value of ``None`` for password parameter in :meth:`authenticate_human_user` - Add :meth:`follow`, :meth:`unfollow` and :meth:`followers` methods. - Add ability to login as HumanUser. @@ -355,24 +355,24 @@ v3.0.14 (2013 Jun 26) ===================== - added: additional tests for thumbnails. -- added: support for downloading from s3 in :meth:`download_attachment`. Accepts an Attachment - entity dict as a parameter (is still backwards compatible with passing in an Attachment id). -- added: optional ``file_path`` parameter to :meth:`download_attachment` to write data directly to +- added: support for downloading from s3 in :meth:`download_attachment`. Accepts an Attachment + entity dict as a parameter (is still backwards compatible with passing in an Attachment id). +- added: optional ``file_path`` parameter to :meth:`download_attachment` to write data directly to disk instead of loading into memory. (thanks to Adam Goforth `@aag `_) v3.0.13 (2013 Apr 11) ===================== -- fixed: #20856 :meth:`authenticate_human_user` login was sticky and would be used for permissions +- fixed: #20856 :meth:`authenticate_human_user` login was sticky and would be used for permissions and logging. v3.0.12 (2013 Feb 22) ===================== *no tag* -- added: #18171 New ``ca_certs`` argument to the :class:`Shotgun` constructor to specify the +- added: #18171 New ``ca_certs`` argument to the :class:`Shotgun` constructor to specify the certificates to use in SSL validation. -- added: ``setup.py`` doesn't compress the installed ``.egg`` file which makes the +- added: ``setup.py`` doesn't compress the installed ``.egg`` file which makes the ``cacerts.txt`` file accessible. v3.0.11 (2013 Jan 31) @@ -383,21 +383,21 @@ v3.0.11 (2013 Jan 31) v3.0.10 (2013 Jan 25) ===================== -- added: :meth:`add_user_agent()` and :meth:`reset_user_agent` methods to allow client code to add +- added: :meth:`add_user_agent()` and :meth:`reset_user_agent` methods to allow client code to add strings to track. -- added: Changed default ``user-agent`` to include API version. +- added: Changed default ``user-agent`` to include API version. - updated: advanced summarize filter support. - fixed: #19830 :meth:`share_thumbnail` errors when source has no thumbnail. v3.0.9 (2012 Dec 05) ==================== -- added: :meth:`share_thumbnail` method to share the same thumbnail record and media between +- added: :meth:`share_thumbnail` method to share the same thumbnail record and media between entities. -- added: proxy handling to methods that transfer binary data (ie. :meth:`upload`, +- added: proxy handling to methods that transfer binary data (ie. :meth:`upload`, :meth:`upload_thumbnail`, etc.). - updated: default logging level to WARN. -- updated: documentation for :meth:`summarize()` method, previously released but without +- updated: documentation for :meth:`summarize()` method, previously released but without documentation. - fixed: unicode strings not always being encoded correctly. - fixed: :meth:`create()` generates error when ``return_fields`` is None. @@ -411,10 +411,10 @@ v3.0.9.beta2 (2012 Mar 19) ========================== - use relative imports for included libraries when using Python v2.5 or later. -- replace sideband request for ``image`` (thumbnail) field with native support (requires Shotgun - server >= v3.3.0. Request will still work on older versions but fallback to slow sideband +- replace sideband request for ``image`` (thumbnail) field with native support (requires Shotgun + server >= v3.3.0. Request will still work on older versions but fallback to slow sideband method). -- allow setting ``image`` and ``filmstrip_thumbnail`` in data dict on :meth:`create` and +- allow setting ``image`` and ``filmstrip_thumbnail`` in data dict on :meth:`create` and :meth:`update` (thanks `@hughmacdonald `_). - fixed bug causing ``Attachment.tag_list`` to be set to ``"None"`` (str) for uploads. @@ -433,7 +433,7 @@ v3.0.8 (2011 Oct 7) - added the :meth:`summarize` method. - refactored single file into package. - tests added (Thanks to Aaron Morton `@amorton `_). -- return all strings as ascii for backwards compatibility, added ``ensure_ascii`` parameter to +- return all strings as ascii for backwards compatibility, added ``ensure_ascii`` parameter to enable returning unicode. v3.0.7 (2011 Apr 04) @@ -473,7 +473,7 @@ v3.0.2 (2010 Aug 27) v3.0.1 (2010 May 10) ==================== -- :meth:`find`: default sorting to ascending, if not set (instead of requiring +- :meth:`find`: default sorting to ascending, if not set (instead of requiring ascending/descending). - :meth:`upload` and :meth:`upload_thumbnail`: pass auth info through. @@ -481,7 +481,7 @@ v3.0 (2010 May 5) ================= - non-beta! -- add :meth:`batch` method to do multiple :meth:`create`, :meth:`update`, and :meth:`delete` +- add :meth:`batch` method to do multiple :meth:`create`, :meth:`update`, and :meth:`delete` operations in one request to the server (requires Shotgun server to be v1.13.0 or higher). v3.0b8 (2010 Feb 19) @@ -498,7 +498,7 @@ v3.0b7 (2009 Nov 30) v3.0b6 (2009 Oct 20) ==================== -- add support for ``HTTP/1.1 keepalive``, which greatly improves performance for multiple +- add support for ``HTTP/1.1 keepalive``, which greatly improves performance for multiple requests. - add more helpful error if server entered is not ``http`` or ``https`` - add support assigning tags to file uploads (for Shotgun version >= 1.10.6). @@ -522,6 +522,6 @@ v3.0b3 (2009 June 24) - added ``schema_*`` methods for accessing entities and fields. - added support for http proxy servers. - added ``__version__`` string. -- removed ``RECORDS_PER_PAGE`` global (can just set ``records_per_page`` on the Shotgun object +- removed ``RECORDS_PER_PAGE`` global (can just set ``records_per_page`` on the Shotgun object after initializing it). - removed ``api_ver`` from the constructor, as this class is only designed to work with API v3. diff --git a/LICENSE b/LICENSE index a32a5bdcb..716d625d8 100644 --- a/LICENSE +++ b/LICENSE @@ -5,12 +5,12 @@ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - + list of conditions and the following disclaimer. + - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - + - Neither the name of the Shotgun Software Inc nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. @@ -51,4 +51,4 @@ BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE -OF THIS SOFTWARE. \ No newline at end of file +OF THIS SOFTWARE. diff --git a/SECURITY.md b/SECURITY.md index c32c73245..0cf2a2664 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -32,4 +32,4 @@ configurations, reproduction steps, exploit code, impact, etc. ## Additional Information -Please check out the [Flow Production Tracking Security White Paper](https://help.autodesk.com/view/SGSUB/ENU/?guid=SG_Administrator_ar_general_security_ar_security_white_paper_html). \ No newline at end of file +Please check out the [Flow Production Tracking Security White Paper](https://help.autodesk.com/view/SGSUB/ENU/?guid=SG_Administrator_ar_general_security_ar_security_white_paper_html). diff --git a/azure-pipelines-templates/code_style_validation.yml b/azure-pipelines-templates/code_style_validation.yml new file mode 100644 index 000000000..69e82b7e0 --- /dev/null +++ b/azure-pipelines-templates/code_style_validation.yml @@ -0,0 +1,50 @@ +# Copyright (c) 2024, Shotgun Software Inc. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# - Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# - Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# - Neither the name of the Shotgun Software Inc nor the names of its +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +jobs: +- job: code_style_validation + displayName: Code Style Validation + pool: + vmImage: 'ubuntu-latest' + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: 3.9 + addToPath: True + architecture: 'x64' + + - script: | + pip install --upgrade pip setuptools wheel + pip install --upgrade pre-commit + displayName: Install dependencies + + - bash: pre-commit autoupdate + displayName: Update pre-commit hook versions + + - bash: pre-commit run --all + displayName: Validate code with pre-commit diff --git a/azure-pipelines-templates/run-tests.yml b/azure-pipelines-templates/run-tests.yml index edf5ffe4a..831c276ec 100644 --- a/azure-pipelines-templates/run-tests.yml +++ b/azure-pipelines-templates/run-tests.yml @@ -34,7 +34,7 @@ parameters: jobs: # The job will be named after the OS and Azure will suffix the strategy to make it unique # so we'll have a job name "Windows Python27" for example. What's a strategy? Strategies are the - # name of the keys under the strategy.matrix scope. So for each OS we'll have " Python27" and + # name of the keys under the strategy.matrix scope. So for each OS we'll have " Python27" and # " Python37". - job: ${{ parameters.name }} pool: @@ -63,7 +63,7 @@ jobs: # Specifies which version of Python we want to use. That's where the strategy comes in. # Each job will share this set of steps, but each of them will receive a different # $(python.version) - # TODO: We should provide `githubToken` if we want to download a python release. + # TODO: We should provide `githubToken` if we want to download a python release. # Otherwise we may hit the GitHub anonymous download limit. - task: UsePythonVersion@0 inputs: @@ -138,7 +138,7 @@ jobs: Invoke-WebRequest -Uri https://uploader.codecov.io/latest/windows/codecov.exe -Outfile codecov.exe .\codecov.exe -f coverage.xml displayName: Uploading code coverage - - ${{ elseif eq(parameters.name, 'Linux') }}: + - ${{ elseif eq(parameters.name, 'Linux') }}: - script: | curl -Os https://uploader.codecov.io/latest/linux/codecov chmod +x codecov diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 989745755..52e6cfa9c 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -51,6 +51,7 @@ pr: # This here is the list of jobs we want to run for our build. # Jobs run in parallel. jobs: +- template: azure-pipelines-templates/code_style_validation.yml # These are jobs templates, they allow to reduce the redundancy between # variations of the same build. We pass in the image name diff --git a/docs/advanced/iron_python.rst b/docs/advanced/iron_python.rst index 6aac0a6a9..62ad6d791 100644 --- a/docs/advanced/iron_python.rst +++ b/docs/advanced/iron_python.rst @@ -34,4 +34,4 @@ v3.0.20 can be used with IronPython with a little bit of added work: lower level SSL library backing python's network infrastructure is attempting to connect to our servers via SSLv3, which is no longer supported. You can use the code from this gist to force the SSL connections to use a specific protocol. The forked repo linked above has an example of how to - do that to force the use of TLSv1. \ No newline at end of file + do that to force the use of TLSv1. diff --git a/docs/advanced/packaging.rst b/docs/advanced/packaging.rst index d46426e73..8467db9e2 100644 --- a/docs/advanced/packaging.rst +++ b/docs/advanced/packaging.rst @@ -6,7 +6,7 @@ Packaging an application with py2app (or py2exe) You can create standalone applications with Python scripts by using `py2app `_ on OS X or `py2exe `_ on -Windows. This is often done to more easily distribute applications that have a GUI based on +Windows. This is often done to more easily distribute applications that have a GUI based on toolkits like Tk, Qt or others. There are caveats you need to be aware of when creating such an app. @@ -37,4 +37,4 @@ into the Flow Production Tracking connection's constructor:: sg = shotgun_api3.Shotgun('https://my-site.shotgrid.autodesk.com', 'script_name', 'script_key', ca_certs=ca_certs) -The process for py2exe should be similar. \ No newline at end of file +The process for py2exe should be similar. diff --git a/docs/authentication.rst b/docs/authentication.rst index 0e5fe8572..ea049fbd0 100644 --- a/docs/authentication.rst +++ b/docs/authentication.rst @@ -62,4 +62,3 @@ For Scripts, the default permission role is "API Admin User" which allows full a When using user-based authentication in your script, it will be bound by the permission role assigned to you in Flow Production Tracking. For example, if you don't have access to edit the status field on Shots, your script won't be able to either. Attempting to perform actions that are prohibited by permissions will raise an appropriate exception. .. seealso:: `Permissions Documentation `_ - diff --git a/docs/changelog.rst b/docs/changelog.rst index f07fa1f9c..3b4977908 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -1,3 +1,3 @@ .. currentmodule:: shotgun_api3.shotgun.Shotgun -.. include:: ../HISTORY.rst \ No newline at end of file +.. include:: ../HISTORY.rst diff --git a/docs/cookbook.rst b/docs/cookbook.rst index f69334a7b..fe0a5a300 100644 --- a/docs/cookbook.rst +++ b/docs/cookbook.rst @@ -3,7 +3,7 @@ API Cookbook ************ Here we have a collection of useful information you can use for reference when writing your API -scripts. From usage tips and gotchas to deeper examples of working with entities like Tasks and +scripts. From usage tips and gotchas to deeper examples of working with entities like Tasks and Files, there's a lot of example code in here for you to play with. .. rubric:: Usage Tips @@ -28,7 +28,7 @@ and paste any of these into your own scripts. .. rubric:: Working With Files -You'll probably be doing some work with files at your studio. This is a deep dive into some of +You'll probably be doing some work with files at your studio. This is a deep dive into some of the inners of how Flow Production Tracking handles files (also called Attachments) and the different ways to link to them. @@ -51,12 +51,12 @@ need to do. .. rubric:: Smart Cut Fields -Smart Cut Fields are deprecated in favor of the +Smart Cut Fields are deprecated in favor of the `new cut support added in ShotGrid v7.0 `_. This documentation remains only to support studios who may not have upgraded to the new cut support -features. +features. .. toctree:: :maxdepth: 2 - cookbook/smart_cut_fields \ No newline at end of file + cookbook/smart_cut_fields diff --git a/docs/cookbook/examples/ami_handler.rst b/docs/cookbook/examples/ami_handler.rst index f64ccd558..3fb5e3571 100644 --- a/docs/cookbook/examples/ami_handler.rst +++ b/docs/cookbook/examples/ami_handler.rst @@ -4,15 +4,15 @@ Handling Action Menu Item Calls ############################### -This is an example ActionMenu Python class to handle the ``GET`` request sent from an -ActionMenuItem. It doesn't manage dispatching custom protocols but rather takes the arguments -from any ``GET`` data and parses them into the easily accessible and correctly typed instance +This is an example ActionMenu Python class to handle the ``GET`` request sent from an +ActionMenuItem. It doesn't manage dispatching custom protocols but rather takes the arguments +from any ``GET`` data and parses them into the easily accessible and correctly typed instance variables for your Python scripts. Available as a Gist at https://gist.github.com/3253287 .. seealso:: - Our `support site has more information about Action Menu Items + Our `support site has more information about Action Menu Items `_. ************ diff --git a/docs/cookbook/examples/ami_version_packager.rst b/docs/cookbook/examples/ami_version_packager.rst index 5d3035014..415075a1d 100644 --- a/docs/cookbook/examples/ami_version_packager.rst +++ b/docs/cookbook/examples/ami_version_packager.rst @@ -4,7 +4,7 @@ Using an ActionMenuItem to Package Versions for a Client ######################################################## -This is an example script to demonstrate how you can use an ActionMenuItem to launch a local +This is an example script to demonstrate how you can use an ActionMenuItem to launch a local script to package up files for a client. It performs the following: - Downloads Attachments from a specified field for all selected entities. @@ -37,10 +37,10 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h login who ran the ActionMenuItem ('Demo_Project_2010-04-29-172210_kp.tar.gz'): sa = ShotgunAction(sys.argv[1]) - sg = shotgun_connect() + sg = shotgun_connect() if sa.action == 'package4client': r = packageFilesForClient('sg_qt','/path/where/i/want/to/put/the/archive/') - + """ # --------------------------------------------------------------------------------------------- @@ -61,8 +61,8 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h # --------------------------------------------------------------------------------------------- # Flow Production Tracking server auth info shotgun_conf = { - 'url':'https://my-site.shotgrid.autodesk.com', - 'name':'YOUR_SCRIPT_NAME_HERE', + 'url':'https://my-site.shotgrid.autodesk.com', + 'name':'YOUR_SCRIPT_NAME_HERE', 'key':'YOUR_SCRIPT_KEY_HERE' } @@ -70,9 +70,9 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h logfile = os.path.dirname(sys.argv[0])+"/version_packager.log" # temporary directory to download movie files to and create thumbnail files in - file_dir = os.path.dirname(sys.argv[0])+"/tmp" + file_dir = os.path.dirname(sys.argv[0])+"/tmp" - # compress command + # compress command # tar czf /home/user/backup_www.tar.gz -C / var/www/html compress_cmd = "tar czf %s -C / %s" @@ -89,7 +89,7 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h # ---------------------------------------------- # Set up logging # ---------------------------------------------- - def init_log(filename="version_packager.log"): + def init_log(filename="version_packager.log"): try: logger.basicConfig(level=logger.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s', @@ -98,8 +98,8 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h filemode='w+') except IOError, e: raise ShotgunException ("Unable to open logfile for writing: %s" % e) - logger.info("Version Packager logging started.") - return logger + logger.info("Version Packager logging started.") + return logger # ---------------------------------------------- @@ -111,9 +111,9 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h try: attachment_id = int(attachment_id) except: - # not an integer. + # not an integer. return None - # raise ShotgunException("invalid Attachment id returned. Expected an integer: %s "% attachment_id) + # raise ShotgunException("invalid Attachment id returned. Expected an integer: %s "% attachment_id) return attachment_id @@ -126,16 +126,16 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h if type(attachment_id) != int: return None # download the attachment file from Flow Production Tracking and write it to local disk - logger.info("Downloading Attachment #%s" % (attachment_id)) + logger.info("Downloading Attachment #%s" % (attachment_id)) stream = sg.download_attachment(attachment_id) try: file = open(destination_filename, 'w') file.write(stream) file.close() logger.info("Downloaded attachment %s" % (destination_filename)) - return True + return True except e: - raise ShotgunException("unable to write attachment to disk: %s"% e) + raise ShotgunException("unable to write attachment to disk: %s"% e) # ---------------------------------------------- @@ -194,28 +194,28 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h logger.info("copied files to: %s" % destination_directory) return destination_directory - - + + def packageFilesForClient(file_field,destination_dir): - - # get entities matching the selected ids - logger.info("Querying Shotgun for %s %ss" % (len(sa.selected_ids_filter), sa.params['entity_type'])) + + # get entities matching the selected ids + logger.info("Querying Shotgun for %s %ss" % (len(sa.selected_ids_filter), sa.params['entity_type'])) entities = sg.find(sa.params['entity_type'],sa.selected_ids_filter,['id','code',file_field],filter_operator='any') - + # download the attachments for each entity, zip them, and copy to destination directory files = [] for e in entities: if not e[file_field]: - logger.info("%s #%s: No file exists. Skippinsa." % (sa.params['entity_type'], e['id'])) + logger.info("%s #%s: No file exists. Skippinsa." % (sa.params['entity_type'], e['id'])) else: - logger.info("%s #%s: %s" % (sa.params['entity_type'], e['id'], e[file_field])) + logger.info("%s #%s: %s" % (sa.params['entity_type'], e['id'], e[file_field])) path_to_file = file_dir+"/"+re.sub(r"\s+", '_', e[file_field]['name']) - result = download_attachment_to_disk(e[file_field], path_to_file ) - + result = download_attachment_to_disk(e[file_field], path_to_file ) + # only include attachments. urls won't return true if result: files.append(path_to_file) - + # compress files # create a nice valid destination filename project_name = '' @@ -223,7 +223,7 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h project_name = re.sub(r"\s+", '_', sa.params['project_name'])+'_' dest_filename = project_name+datetime.today().strftime('%Y-%m-%d-%H%M%S')+"_"+sa.params['user_login'] archive = compress_files(files,file_dir+"/"+dest_filename) - + # now that we have the archive, remove the downloads r = remove_downloaded_files(files) @@ -232,26 +232,25 @@ It is intended to be used in conjunction with the script dicussed in :ref:`ami_h return True - + # ---------------------------------------------- # Main Block # ---------------------------------------------- if __name__ == "__main__": init_log(logfile) - + try: sa = ShotgunAction(sys.argv[1]) logger.info("Firing... %s" % (sys.argv[1]) ) except IndexError, e: raise ShotgunException("Missing POST arguments") - - sg = Shotgun(shotgun_conf['url'], shotgun_conf['name'], shotgun_conf['key'],convert_datetimes_to_utc=convert_tz) - + + sg = Shotgun(shotgun_conf['url'], shotgun_conf['name'], shotgun_conf['key'],convert_datetimes_to_utc=convert_tz) + if sa.action == 'package4client': result = packageFilesForClient('sg_qt','/Users/kp/Documents/shotgun/dev/api/files/') else: raise ShotgunException("Unknown action... :%s" % sa.action) - - - print("\nVersion Packager done!") + + print("\nVersion Packager done!") diff --git a/docs/cookbook/examples/basic_create_shot.rst b/docs/cookbook/examples/basic_create_shot.rst index 4a9ece5f6..7513305fa 100644 --- a/docs/cookbook/examples/basic_create_shot.rst +++ b/docs/cookbook/examples/basic_create_shot.rst @@ -7,28 +7,28 @@ Building the data and calling :meth:`~shotgun_api3.Shotgun.create` ------------------------------------------------------------------ To create a Shot, you need to provide the following values: -- ``project`` is a link to the Project the Shot belongs to. It should be a dictionary like +- ``project`` is a link to the Project the Shot belongs to. It should be a dictionary like ``{"type": "Project", "id": 123}`` where ``id`` is the ``id`` of the Project. - ``code`` (this is the field that stores the name Shot) - optionally any other info you want to provide Example:: - data = { + data = { 'project': {"type":"Project","id": 4}, 'code': '100_010', 'description': 'Open on a beautiful field with fuzzy bunnies', - 'sg_status_list': 'ip' + 'sg_status_list': 'ip' } result = sg.create('Shot', data) This will create a new Shot named "100_010" in the Project "Gunslinger" (which has an ``id`` of 4). -- ``data`` is a list of key/value pairs where the key is the column name to update and the value +- ``data`` is a list of key/value pairs where the key is the column name to update and the value is the the value to set. - ``sg`` is the Flow Production Tracking API instance you created in :ref:`example_sg_instance`. -- ``create()`` is the :meth:`shotgun_api3.Shotgun.create` API method we are calling. We pass in the +- ``create()`` is the :meth:`shotgun_api3.Shotgun.create` API method we are calling. We pass in the entity type we're searching for and the data we're setting. .. rubric:: Result @@ -67,27 +67,27 @@ The Complete Example # Globals # -------------------------------------- # make sure to change this to match your Flow Production Tracking server and auth credentials. - SERVER_PATH = "https://my-site.shotgrid.autodesk.com" - SCRIPT_NAME = 'my_script' + SERVER_PATH = "https://my-site.shotgrid.autodesk.com" + SCRIPT_NAME = 'my_script' SCRIPT_KEY = '27b65d7063f46b82e670fe807bd2b6f3fd1676c1' # -------------------------------------- - # Main + # Main # -------------------------------------- - if __name__ == '__main__': + if __name__ == '__main__': sg = shotgun_api3.Shotgun(SERVER_PATH, SCRIPT_NAME, SCRIPT_KEY) # -------------------------------------- # Create a Shot with data # -------------------------------------- - data = { + data = { 'project': {"type":"Project","id": 4}, 'code': '100_010', 'description': 'Open on a beautiful field with fuzzy bunnies', - 'sg_status_list': 'ip' + 'sg_status_list': 'ip' } - result = sg.create('Shot', data) + result = sg.create('Shot', data) pprint(result) print("The id of the {} is {}.".format(result['type'], result['id'])) @@ -100,4 +100,3 @@ And here is the output:: 'sg_status_list': 'ip', 'type': 'Shot'} The id of the Shot is 40435. - diff --git a/docs/cookbook/examples/basic_create_shot_task_template.rst b/docs/cookbook/examples/basic_create_shot_task_template.rst index 0fa3b0828..ab6248227 100644 --- a/docs/cookbook/examples/basic_create_shot_task_template.rst +++ b/docs/cookbook/examples/basic_create_shot_task_template.rst @@ -14,7 +14,7 @@ First we need to find the Task Template we're going to apply. We'll assume you k The Resulting Task Template --------------------------- -Assuming the task template was found, we will now have something like this in our ``template`` +Assuming the task template was found, we will now have something like this in our ``template`` variable:: {'type': 'TaskTemplate', 'id': 12} @@ -30,16 +30,16 @@ Now we can create the Shot with the link to the ``TaskTemplate`` to apply. 'task_template': template } result = sg.create('Shot', data) -This will create a new Shot named "100_010" linked to the TaskTemplate "3D Shot Template" and +This will create a new Shot named "100_010" linked to the TaskTemplate "3D Shot Template" and Flow Production Tracking will then create the Tasks defined in the template and link them to the Shot you just created. -- ``data`` is a list of key/value pairs where the key is the column name to update and the value is +- ``data`` is a list of key/value pairs where the key is the column name to update and the value is the value. - ``project`` and `code` are required - ``description`` is just a text field that you might want to update as well -- ``task_template`` is another entity column where we set the Task Template which has the Tasks we - wish to create by default on this Shot. We found the specific template we wanted to assign in the +- ``task_template`` is another entity column where we set the Task Template which has the Tasks we + wish to create by default on this Shot. We found the specific template we wanted to assign in the previous block by searching Result @@ -59,7 +59,7 @@ The variable ``result`` now contains the dictionary of the new Shot that was cre } -If we now search for the Tasks linked to the Shot, we'll find the Tasks that match our +If we now search for the Tasks linked to the Shot, we'll find the Tasks that match our ``TaskTemplate``:: tasks = sg.find('Task', filters=[['entity', 'is', result]]) diff --git a/docs/cookbook/examples/basic_create_version_link_shot.rst b/docs/cookbook/examples/basic_create_version_link_shot.rst index 71e6f3e1e..3f025eb46 100644 --- a/docs/cookbook/examples/basic_create_version_link_shot.rst +++ b/docs/cookbook/examples/basic_create_version_link_shot.rst @@ -5,7 +5,7 @@ new ``Version`` entity linked to the Shot. Find the Shot ------------- -First we need to find the Shot since we'll need to know know its ``id`` in order to link our Version +First we need to find the Shot since we'll need to know know its ``id`` in order to link our Version to it. :: @@ -16,8 +16,8 @@ to it. Find the Task ------------- -Now we find the Task that the Version relates to, again so we can use the ``id`` to link it to the -Version we're creating. For this search we'll use the Shot ``id`` (which we have now in the ``shot`` +Now we find the Task that the Version relates to, again so we can use the ``id`` to link it to the +Version we're creating. For this search we'll use the Shot ``id`` (which we have now in the ``shot`` variable from the previous search) and the Task Name, which maps to the ``content`` field. :: @@ -27,7 +27,7 @@ variable from the previous search) and the Task Name, which maps to the ``conten task = sg.find_one('Task', filters) .. note:: Linking a Task to the Version is good practice. By doing so it is easy for users to see - at what stage a particular Version was created, and opens up other possibilities for tracking + at what stage a particular Version was created, and opens up other possibilities for tracking in Flow Production Tracking. We highly recommend doing this whenever possible. Create the Version @@ -44,22 +44,22 @@ Now we can create the Version with the link to the Shot and the Task:: 'user': {'type': 'HumanUser', 'id': 165} } result = sg.create('Version', data) -This will create a new Version named '100_010_anim_v1' linked to the 'Animation' Task for Shot +This will create a new Version named '100_010_anim_v1' linked to the 'Animation' Task for Shot '100_010' in the Project 'Gunslinger'. -- ``data`` is a list of key/value pairs where the key is the column name to update and the value is +- ``data`` is a list of key/value pairs where the key is the column name to update and the value is the value to set. - ``project`` and ``code`` are required -- ``description`` and ``sg_path_to_frames`` are just text fields that you might want to update as +- ``description`` and ``sg_path_to_frames`` are just text fields that you might want to update as well -- ``sg_status_list`` is the status column for the Version. Here we are setting it to "rev" (Pending +- ``sg_status_list`` is the status column for the Version. Here we are setting it to "rev" (Pending Review) so that it will get reviewed in the next dailies session and people will "ooh" and "aaah". -- ``entity`` is where we link this version to the Shot. Entity columns are always handled with this +- ``entity`` is where we link this version to the Shot. Entity columns are always handled with this format. You must provide the entity ``type`` and its ``id``. -- ``sg_task`` is another entity link field specifically for the Version's Task link. This uses the +- ``sg_task`` is another entity link field specifically for the Version's Task link. This uses the same entity format as the Shot link, but pointing to the Task entity this time. -- ``user`` is another entity column where we set the artist responsible for this masterpiece. In - this example, I know the 'id' that corresponds to this user, but if you don't know the id you can +- ``user`` is another entity column where we set the artist responsible for this masterpiece. In + this example, I know the 'id' that corresponds to this user, but if you don't know the id you can look it up by searching on any of the fields, similar to what we did for the Shot above, like:: filters = [['login', 'is', 'jschmoe']] @@ -72,11 +72,11 @@ The ``result`` variable now contains the ``id`` of the new Version that was crea Upload a movie for review in Screening Room ------------------------------------------- -If Screening Room's transcoding feature is enabled on your site (hosted sites have this by -default), then you can use the :meth:`~shotgun_api3.Shotgun.upload` method to upload a QuickTime -movie, PDF, still image, etc. to the ``sg_uploaded_movie`` field on a Version. Once the movie is -uploaded, it will automatically be queued for transcoding. When transcoding is complete, the -Version will be playable in the Screening Room app, or in the Overlay player by clicking on the +If Screening Room's transcoding feature is enabled on your site (hosted sites have this by +default), then you can use the :meth:`~shotgun_api3.Shotgun.upload` method to upload a QuickTime +movie, PDF, still image, etc. to the ``sg_uploaded_movie`` field on a Version. Once the movie is +uploaded, it will automatically be queued for transcoding. When transcoding is complete, the +Version will be playable in the Screening Room app, or in the Overlay player by clicking on the Play button that will appear on the Version's thumbnail. -.. note:: Transcoding also generates a thumbnail and filmstrip thumbnail automatically. \ No newline at end of file +.. note:: Transcoding also generates a thumbnail and filmstrip thumbnail automatically. diff --git a/docs/cookbook/examples/basic_delete_shot.rst b/docs/cookbook/examples/basic_delete_shot.rst index 5275735d6..4f2e91018 100644 --- a/docs/cookbook/examples/basic_delete_shot.rst +++ b/docs/cookbook/examples/basic_delete_shot.rst @@ -5,7 +5,7 @@ Calling :meth:`~shotgun_api3.Shotgun.delete` -------------------------------------------- Deleting an entity in Flow Production Tracking is pretty straight-forward. No extraneous steps required.:: - result = sg.delete("Shot", 40435) + result = sg.delete("Shot", 40435) Result ------ @@ -30,23 +30,22 @@ The Complete Example # -------------------------------------- # make sure to change this to match your Flow Production Tracking server and auth credentials. SERVER_PATH = "https://my-site.shotgrid.autodesk.com" - SCRIPT_NAME = 'my_script' + SCRIPT_NAME = 'my_script' SCRIPT_KEY = '27b65d7063f46b82e670fe807bd2b6f3fd1676c1' # -------------------------------------- - # Main + # Main # -------------------------------------- - if __name__ == '__main__': + if __name__ == '__main__': sg = shotgun_api3.Shotgun(SERVER_PATH, SCRIPT_NAME, SCRIPT_KEY) # -------------------------------------- # Delete a Shot by id # -------------------------------------- - result = sg.delete("Shot", 40435) + result = sg.delete("Shot", 40435) pprint(result) And here is the output:: True - diff --git a/docs/cookbook/examples/basic_find_shot.rst b/docs/cookbook/examples/basic_find_shot.rst index 88c8c81e0..945eb1be6 100644 --- a/docs/cookbook/examples/basic_find_shot.rst +++ b/docs/cookbook/examples/basic_find_shot.rst @@ -12,7 +12,7 @@ We are going to assume we know the 'id' of the Shot we're looking for in this ex Pretty simple right? Well here's a little more insight into what's going on. -- ``filters`` is an list of filter conditions. In this example we are filtering for Shots where +- ``filters`` is an list of filter conditions. In this example we are filtering for Shots where the ``id`` column is **40435**. - ``sg`` is the Flow Production Tracking API instance. - ``find_one()`` is the :meth:`~shotgun_api3.Shotgun.find_one` API method we are calling. We @@ -25,13 +25,13 @@ So what does this return? The variable result now contains:: {'type': 'Shot','id': 40435} -By default, :meth:`~shotgun_api3.Shotgun.find_one` returns a single dictionary object with +By default, :meth:`~shotgun_api3.Shotgun.find_one` returns a single dictionary object with the ``type`` and ``id`` fields. So in this example, we found a Shot matching that id, and Flow Production Tracking returned it as a dictionary object with ``type`` and ``id`` keys . -How do we know that result contains the Shot dictionary object? You can trust us... but just to be -sure, the :mod:`pprint` (PrettyPrint) module from the Python standard library is a really good tool -to help with debugging. It will print out objects in a nicely formatted way that makes things +How do we know that result contains the Shot dictionary object? You can trust us... but just to be +sure, the :mod:`pprint` (PrettyPrint) module from the Python standard library is a really good tool +to help with debugging. It will print out objects in a nicely formatted way that makes things easier to read. So we'll add that to the import section of our script.:: import shotgun_api3 @@ -54,13 +54,13 @@ The Complete Example # -------------------------------------- # make sure to change this to match your Flow Production Tracking server and auth credentials. SERVER_PATH = "https://my-site.shotgrid.autodesk.com" - SCRIPT_NAME = 'my_script' + SCRIPT_NAME = 'my_script' SCRIPT_KEY = '27b65d7063f46b82e670fe807bd2b6f3fd1676c1' # -------------------------------------- - # Main + # Main # -------------------------------------- - if __name__ == '__main__': + if __name__ == '__main__': sg = shotgun_api3.Shotgun(SERVER_PATH, SCRIPT_NAME, SCRIPT_KEY) @@ -68,7 +68,7 @@ The Complete Example # Find a Shot by id # -------------------------------------- filters = [['id', 'is', 40435]] - result = sg.find_one('Shot', filters) + result = sg.find_one('Shot', filters) pprint(result) And here is the output:: diff --git a/docs/cookbook/examples/basic_sg_instance.rst b/docs/cookbook/examples/basic_sg_instance.rst index b39c78432..d17b57de5 100644 --- a/docs/cookbook/examples/basic_sg_instance.rst +++ b/docs/cookbook/examples/basic_sg_instance.rst @@ -13,14 +13,14 @@ authentication. ``sg`` represents your Flow Production Tracking API instance. Be import shotgun_api3 SERVER_PATH = "https://my-site.shotgrid.autodesk.com" - SCRIPT_NAME = 'my_script' + SCRIPT_NAME = 'my_script' SCRIPT_KEY = '27b65d7063f46b82e670fe807bd2b6f3fd1676c1' sg = shotgun_api3.Shotgun(SERVER_PATH, SCRIPT_NAME, SCRIPT_KEY) - # Just for demo purposes, this will print out property and method names available on the + # Just for demo purposes, this will print out property and method names available on the # sg connection object pprint.pprint([symbol for symbol in sorted(dir(sg)) if not symbol.startswith('_')]) For further information on what you can do with this Flow Production Tracking object you can read the -:ref:`API reference `. \ No newline at end of file +:ref:`API reference `. diff --git a/docs/cookbook/examples/basic_update_shot.rst b/docs/cookbook/examples/basic_update_shot.rst index 52e57e70e..c2413c3ee 100644 --- a/docs/cookbook/examples/basic_update_shot.rst +++ b/docs/cookbook/examples/basic_update_shot.rst @@ -3,23 +3,23 @@ Update A Shot Building the data and calling :meth:`~shotgun_api3.Shotgun.update` ------------------------------------------------------------------ -To update a Shot, you need to provide the ``id`` of the Shot and a list of fields you want to +To update a Shot, you need to provide the ``id`` of the Shot and a list of fields you want to update.:: - data = { + data = { 'description': 'Open on a beautiful field with fuzzy bunnies', - 'sg_status_list': 'ip' + 'sg_status_list': 'ip' } result = sg.update('Shot', 40435, data) -This will update the ``description`` and the ``sg_status_list`` fields for the Shot with ``id`` of +This will update the ``description`` and the ``sg_status_list`` fields for the Shot with ``id`` of **40435**. - ``data`` is a list of key/value pairs where the key is the field name to update and the value to update it to. - ``sg`` is the Flow Production Tracking API instance. -- ``update()`` is the :meth:`shotgun_api3.Shotgun.update` API method we are calling. We provide it - with the entity type we're updating, the ``id`` of the entity, and the data we're updating it +- ``update()`` is the :meth:`shotgun_api3.Shotgun.update` API method we are calling. We provide it + with the entity type we're updating, the ``id`` of the entity, and the data we're updating it with. Result @@ -34,7 +34,7 @@ The variable ``result`` now contains the Shot object that with the updated value } In addition, Flow Production Tracking has returned the ``id`` for the Shot, as well as a ``type`` value. ``type`` -is provided for convenience simply to help you identify what entity type this dictionary represents. +is provided for convenience simply to help you identify what entity type this dictionary represents. It does not correspond to any field in Flow Production Tracking. Flow Production Tracking will *always* return the ``id`` and ``type`` keys in the dictionary when there are results @@ -57,24 +57,24 @@ The Complete Example # -------------------------------------- # make sure to change this to match your Flow Production Tracking server and auth credentials. SERVER_PATH = "https://my-site.shotgrid.autodesk.com" - SCRIPT_NAME = 'my_script' + SCRIPT_NAME = 'my_script' SCRIPT_KEY = '27b65d7063f46b82e670fe807bd2b6f3fd1676c1' # -------------------------------------- - # Main + # Main # -------------------------------------- - if __name__ == '__main__': + if __name__ == '__main__': sg = shotgun_api3.Shotgun(SERVER_PATH, SCRIPT_NAME, SCRIPT_KEY) # -------------------------------------- # Update Shot with data # -------------------------------------- - data = { + data = { 'description': 'Open on a beautiful field with fuzzy bunnies', - 'sg_status_list': 'ip' + 'sg_status_list': 'ip' } - result = sg.update('Shot', 40435, data) + result = sg.update('Shot', 40435, data) pprint(result) And here is the output:: @@ -83,4 +83,3 @@ And here is the output:: 'id': 40435, 'sg_status_list': 'ip', 'type': 'Shot'} - diff --git a/docs/cookbook/examples/basic_upload_thumbnail_version.rst b/docs/cookbook/examples/basic_upload_thumbnail_version.rst index 2ae399d5f..ba7337150 100644 --- a/docs/cookbook/examples/basic_upload_thumbnail_version.rst +++ b/docs/cookbook/examples/basic_upload_thumbnail_version.rst @@ -2,11 +2,11 @@ Upload a Thumbnail for a Version ================================ So you've created a new Version of a Shot, and you've updated Flow Production Tracking, but now you want to upload a -beauty frame to display as the thumbnail for your Version. We'll assume you already have the image -made (located on your machine at ``/v1/gun/s100/010/beauties/anim/100_010_animv1.jpg``) . And since +beauty frame to display as the thumbnail for your Version. We'll assume you already have the image +made (located on your machine at ``/v1/gun/s100/010/beauties/anim/100_010_animv1.jpg``) . And since you've just created your Version in Flow Production Tracking, you know its ``id`` is **214**. -.. note:: If you upload a movie file or image to the ``sg_uploaded_movie`` field and you have +.. note:: If you upload a movie file or image to the ``sg_uploaded_movie`` field and you have transcoding enabled on your server (the default for hosted sites), a thumbnail will be generated automatically as well as a filmstrip thumbnail (if possible). This is a basic example of how to manually provide or replace a thumbnail image. @@ -21,6 +21,6 @@ Upload the Image using :meth:`~shotgun_api3.Shotgun.upload_thumbnail` Flow Production Tracking will take care of resizing the thumbnail for you. If something does go wrong, an exception will be thrown and you'll see the error details. -.. note:: The result returned by :meth:`~shotgun_api3.Shotgun.upload_thumbnail` is an integer +.. note:: The result returned by :meth:`~shotgun_api3.Shotgun.upload_thumbnail` is an integer representing the id of a special ``Attachment`` entity in Flow Production Tracking. Working with Attachments - is beyond the scope of this example. :) \ No newline at end of file + is beyond the scope of this example. :) diff --git a/docs/cookbook/examples/svn_integration.rst b/docs/cookbook/examples/svn_integration.rst index 9a877b322..8b0a6ce46 100644 --- a/docs/cookbook/examples/svn_integration.rst +++ b/docs/cookbook/examples/svn_integration.rst @@ -9,26 +9,26 @@ Integrating Flow Production Tracking with Subversion consists of two basic parts - Setup a post-commit hook in Subversion. - Create a Flow Production Tracking API script to create the Revision in Flow Production Tracking. This script will be called by the post-commit hook. - + **************** Post-Commit Hook **************** To setup the post-commit hook: -- Locate the ``post-commit.tmpl`` file, which is found inside the ``hooks`` folder in your - repository directory. This is a template script that has lots of useful comments and can serve +- Locate the ``post-commit.tmpl`` file, which is found inside the ``hooks`` folder in your + repository directory. This is a template script that has lots of useful comments and can serve as a starting point for the real thing. -- Create your very own executable script, and save it in the same ``hooks`` folder, name it +- Create your very own executable script, and save it in the same ``hooks`` folder, name it ``post-commit``, and give it executable permission. - In your ``post-commit`` script, invoke your Flow Production Tracking API script. -If this is entirely new to you, we highly suggest reading up on the topic. O'Reilly has `a free -online guide for Subversion 1.5 and 1.6 +If this is entirely new to you, we highly suggest reading up on the topic. O'Reilly has `a free +online guide for Subversion 1.5 and 1.6 `_ -Here's an example of a post-commit hook that we've made for Subversion 1.6 using an executable -Unix shell script. The last line invokes "shotgun_api_script.py" which is our Python script that +Here's an example of a post-commit hook that we've made for Subversion 1.6 using an executable +Unix shell script. The last line invokes "shotgun_api_script.py" which is our Python script that will do all the heavy lifting. Lines 4 thru 8 queue up some objects that we'll use later on. .. code-block:: sh @@ -48,13 +48,13 @@ will do all the heavy lifting. Lines 4 thru 8 queue up some objects that we'll Explanation of selected lines ============================= -- lines ``4-5``: After the commit, Subversion leaves us two string objects in the environment: - ``REPOS`` and ``REV`` (the repository path and the revision number, respectively). -- lines ``7-8``: Here we use the shell command ``export`` to create two more string objects in the - environment: ``AUTHOR`` and ``COMMENT``. To get each value, we use the ``svnlook`` command with - our ``REPOS`` and ``REV`` values, first with the ``author``, and then with ``log`` subcommand. - These are actually the first two original lines of code - everything else to this point was - pre-written already in the ``post-commit.tmpl`` file. nice :) +- lines ``4-5``: After the commit, Subversion leaves us two string objects in the environment: + ``REPOS`` and ``REV`` (the repository path and the revision number, respectively). +- lines ``7-8``: Here we use the shell command ``export`` to create two more string objects in the + environment: ``AUTHOR`` and ``COMMENT``. To get each value, we use the ``svnlook`` command with + our ``REPOS`` and ``REV`` values, first with the ``author``, and then with ``log`` subcommand. + These are actually the first two original lines of code - everything else to this point was + pre-written already in the ``post-commit.tmpl`` file. nice :) - line ``10``: This is the absolute path to our Flow Production Tracking API Script. *********************************** @@ -63,7 +63,7 @@ Flow Production Tracking API Script This script will create the Revision and populate it with some metadata using the Flow Production Tracking Python API. It will create our Revision in Flow Production Tracking along with the author, comment, and because we use -Trac (a web-based interface for Subversion), it will also populate a URL field with a clickable +Trac (a web-based interface for Subversion), it will also populate a URL field with a clickable link to the Revision. .. code-block:: python @@ -84,27 +84,27 @@ link to the Revision. # Globals - update all of these values to those of your studio # --------------------------------------------------------------------------------------------- SERVER_PATH = 'https ://my-site.shotgrid.autodesk.com' # or http: - SCRIPT_USER = 'script_name' + SCRIPT_USER = 'script_name' SCRIPT_KEY = '3333333333333333333333333333333333333333' REVISIONS_PATH = 'https ://serveraddress/trac/changeset/' # or other web-based UI PROJECT = {'type':'Project', 'id':27} - + # --------------------------------------------------------------------------------------------- # Main # --------------------------------------------------------------------------------------------- if __name__ == '__main__': sg = Shotgun(SERVER_PATH, SCRIPT_USER, SCRIPT_KEY) - + # Set Python variables from the environment objects revision_code = os.environ['REV'] repository = os.environ['REPOS'] description = os.environ['COMMENT'] author = os.environ['AUTHOR'] - + # Set the Trac path for this specific revision revision_url = REVISIONS_PATH + revision_code - + # Validate that author is a valid Flow Production Tracking HumanUser result = sg.find_one("HumanUser", [['login', 'is', author]]) if result: @@ -118,7 +118,7 @@ link to the Revision. } revision = sg.create("Revision", parameters) print("created Revision #"+str(revision_code)) - + # Send error message if valid HumanUser is not found else: print("Unable to find a valid Flow Production Tracking User with login: {}, Revision not created in Flow Production Tracking.".format(author)) @@ -131,16 +131,16 @@ Explanation of selected lines: - line ``14``: This should be the URL to your instance of Flow Production Tracking. - lines ``15-16``: Make sure you get these values from the "Scripts" page in the Admin section of the Flow Production Tracking web application. If you're not sure how to do this, check out :doc:`authentication`. -- line ``17``: This is the address of Trac, our web-based interface that we use with Subversion. - You may use a different interface, or none at all, so feel free to adjust this line or ignore it +- line ``17``: This is the address of Trac, our web-based interface that we use with Subversion. + You may use a different interface, or none at all, so feel free to adjust this line or ignore it as your case may be. - line ``18``: Every Revision in Flow Production Tracking must have a Project, which is passed to the API as a - dictionary with two keys, the ``type`` and the ``id``. Of course the ``type`` value will always - remain ``Project`` (case sensitive), but the ``id`` will change by Project. To find out the + dictionary with two keys, the ``type`` and the ``id``. Of course the ``type`` value will always + remain ``Project`` (case sensitive), but the ``id`` will change by Project. To find out the ``id`` of your Project, go to the Projects page in the Flow Production Tracking web application, locate the - Project where you want your Revisions created, and then locate its ``id`` field (which you may - need to display - if you don't see it, right click on any column header then select - "Insert Column" > "Id"). Note that for this example we assume that all Revisions in this + Project where you want your Revisions created, and then locate its ``id`` field (which you may + need to display - if you don't see it, right click on any column header then select + "Insert Column" > "Id"). Note that for this example we assume that all Revisions in this Subversion repository will belong to the same Project. - lines ``28-31``: Grab the values from the objects that were left for us in the environment. - line ``34``: Add the Revision number to complete the path of our Trac url. @@ -148,10 +148,10 @@ Explanation of selected lines: Users' Flow Production Tracking logins match their Subversion names. If the user exists in Flow Production Tracking, that user's ``id`` will be returned as ``result['id']``, which we will need later on in line 46. - lines ``40-48``: Use all the meta data we've gathered to create a Revision in Flow Production Tracking. If none - of these lines make any sense, check out more on the :meth:`~shotgun_api3.Shotgun.create` method - here. Line 41 deserves special mention: notice that we define a dictionary called ``url`` that - has three important keys: ``content_type``, ``url``, and ``name``, and we then pass this in as - the value for the ``attachment`` field when we create the Revision. If you're even in doubt, + of these lines make any sense, check out more on the :meth:`~shotgun_api3.Shotgun.create` method + here. Line 41 deserves special mention: notice that we define a dictionary called ``url`` that + has three important keys: ``content_type``, ``url``, and ``name``, and we then pass this in as + the value for the ``attachment`` field when we create the Revision. If you're even in doubt, double check the syntax and requirements for the different field types here. *************** @@ -161,8 +161,8 @@ Troubleshooting My post-commit script is simply not running. I can run it manually, but commits are not triggering it. ====================================================================================================== -Make sure that the script is has explicitly been made executable and that all users who will -invoke it have appropriate permissions for the script and that folders going back to root. +Make sure that the script is has explicitly been made executable and that all users who will +invoke it have appropriate permissions for the script and that folders going back to root. My Flow Production Tracking API script is not getting called by the post-commit hook. ===================================================================================== diff --git a/docs/cookbook/smart_cut_fields.rst b/docs/cookbook/smart_cut_fields.rst index 0ee74d189..928cf3b53 100644 --- a/docs/cookbook/smart_cut_fields.rst +++ b/docs/cookbook/smart_cut_fields.rst @@ -9,16 +9,16 @@ Smart Cut Fields cut support. `Read the Cut Support Documentation here `_. If you want to work with 'smart' cut fields through the API, your script should use a corresponding -'raw' fields for all updates. The 'smart_cut_fields' are primarily for display in the UI, the real +'raw' fields for all updates. The 'smart_cut_fields' are primarily for display in the UI, the real data is stored in a set of 'raw' fields that have different names. ************ Smart Fields ************ -In the UI these fields attempt to calculate values based on data entered into the various fields. -These fields can be queried via the API using the find() method, but not updated. Note that we are -deprecating this feature and recommend creating your own cut fields from scratch, which will not +In the UI these fields attempt to calculate values based on data entered into the various fields. +These fields can be queried via the API using the find() method, but not updated. Note that we are +deprecating this feature and recommend creating your own cut fields from scratch, which will not contain any calculations which have proven to be too magical at times. - ``smart_cut_duration`` diff --git a/docs/cookbook/tasks.rst b/docs/cookbook/tasks.rst index 0acb5f5e7..dd3a1426d 100644 --- a/docs/cookbook/tasks.rst +++ b/docs/cookbook/tasks.rst @@ -2,12 +2,12 @@ Working With Tasks ################## -Tasks have various special functionality available in the UI that can also be queried and +Tasks have various special functionality available in the UI that can also be queried and manipulated through the API. The sections below cover these topics. .. toctree:: :maxdepth: 2 - + tasks/updating_tasks tasks/task_dependencies tasks/split_tasks diff --git a/docs/cookbook/tasks/split_tasks.rst b/docs/cookbook/tasks/split_tasks.rst index 45dfc303f..d16c50e94 100644 --- a/docs/cookbook/tasks/split_tasks.rst +++ b/docs/cookbook/tasks/split_tasks.rst @@ -4,15 +4,15 @@ Split Tasks ########### -Split tasks can be created and edited via the API but must comply to some rules. Before going +Split tasks can be created and edited via the API but must comply to some rules. Before going further, a good understanding of :ref:`how Flow Production Tracking handles task dates is useful `. ******** Overview ******** -The Task entity has a field called ``splits`` which is a list of dictionaries. Each dictionary -in the list has two string keys, ``start`` and ``end``, who's values are strings representing dates +The Task entity has a field called ``splits`` which is a list of dictionaries. Each dictionary +in the list has two string keys, ``start`` and ``end``, who's values are strings representing dates in the ``YYYY-mm-dd`` format. :: @@ -21,11 +21,11 @@ in the ``YYYY-mm-dd`` format. - Splits should be ordered from eldest to newest. - There should be gaps between each split. - - - Gaps are defined as at least one working day. Non-workdays such as weekends and holidays + + - Gaps are defined as at least one working day. Non-workdays such as weekends and holidays are not gaps. -If there are multiple splits but there between two or more splits there is no gap, an error will be +If there are multiple splits but there between two or more splits there is no gap, an error will be raised. For example:: >>> sg.update('Task', 2088, {'splits':[{'start':'2012-12-10', 'end':'2012-12-11'}, {'start':'2012-12-12', 'end':'2012-12-14'}, {'start':'2012-12-19', 'end':'2012-12-20'}]}) @@ -40,7 +40,7 @@ raised. For example:: shotgun_api3.shotgun.Fault: API update() CRUD ERROR #5: Update failed for [Task.splits]: (task.rb) The start date in split segment 2 is only one calendar day away from the end date of the previous segment. There must be calendar days between split segments. Alternately, a split value can be set to ``None`` to remove splits (you can also use an empty list). -This will preserve the ``start_date`` and ``due_date`` values but recalculate the ``duration`` value +This will preserve the ``start_date`` and ``due_date`` values but recalculate the ``duration`` value while appropriately considering all workday rules in effect. ******************************************************** @@ -50,16 +50,16 @@ How Do Splits Influence Dates And Dates Influence Splits - If splits are specified the supplied ``start_date``, ``due_date`` and ``duration`` fields will be ignored. - The ``start_date`` will be inferred from the earliest split. - The ``due_date`` will be inferred from the last split. -- If the ``start_date`` is changed on a task that has splits the first split will be moved to start - on the new ``start_date`` and all further splits will be moved while maintaining gap lengths +- If the ``start_date`` is changed on a task that has splits the first split will be moved to start + on the new ``start_date`` and all further splits will be moved while maintaining gap lengths between splits and respecting workday rules. -- If the ``due_date`` is changed on a task that has splits the last split will be moved to end on - the new ``due_date`` and all prior splits will be moved while maintaining gap lengths between +- If the ``due_date`` is changed on a task that has splits the last split will be moved to end on + the new ``due_date`` and all prior splits will be moved while maintaining gap lengths between splits and respecting workday rules. - If the ``duration`` is changed two scenarios are possible - + - In the case of a longer duration, additional days will be added to the end of the last split - - In the case of a shorter duration splits, starting with the latest ones, will be either + - In the case of a shorter duration splits, starting with the latest ones, will be either removed or shortened until the new duration is met. Examples @@ -216,7 +216,7 @@ Result: Setting the due_date in a gap ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -When a due date is set in a gap later splits are removed and the day of the due date is considered +When a due date is set in a gap later splits are removed and the day of the due date is considered a day when work will be done. For this example let's assume as a starting point the result of the 5th example: @@ -242,16 +242,3 @@ For this example let's assume as a starting point the result of the 5th example: Result: .. image:: /images/split_tasks_9.png - - - - - - - - - - - - - diff --git a/docs/cookbook/tasks/task_dependencies.rst b/docs/cookbook/tasks/task_dependencies.rst index a5cfc20be..bf6f5e610 100644 --- a/docs/cookbook/tasks/task_dependencies.rst +++ b/docs/cookbook/tasks/task_dependencies.rst @@ -4,16 +4,16 @@ Task Dependencies ################# -Task dependencies work the same way in the API as they do in the UI. You can filter and sort on +Task dependencies work the same way in the API as they do in the UI. You can filter and sort on any of the fields. For information about Task Dependencies in Flow Production Tracking, check out the `main -documentation page on our support site +documentation page on our support site `_ ************ Create Tasks ************ -Let's create a couple of Tasks and create dependencies between them. First we'll create a "Layout" +Let's create a couple of Tasks and create dependencies between them. First we'll create a "Layout" Task for our Shot:: data = { @@ -22,7 +22,7 @@ Task for our Shot:: 'start_date': '2010-04-28', 'due_date': '2010-05-05', 'entity': {'type':'Shot', 'id':860} - } + } result = sg.create(Task, data) @@ -45,7 +45,7 @@ Now let's create an "Anm" Task for our Shot:: 'start_date': '2010-05-06', 'due_date': '2010-05-12', 'entity': {'type':'Shot', 'id':860} - } + } result = sg.create(Task, data) Returns:: @@ -63,11 +63,11 @@ Returns:: Create A Dependency ******************* -Tasks each have an ``upstream_tasks`` field and a ``downstream_tasks`` field. Each field is a -list ``[]`` type and can contain zero, one, or multiple Task entity dictionaries representing the +Tasks each have an ``upstream_tasks`` field and a ``downstream_tasks`` field. Each field is a +list ``[]`` type and can contain zero, one, or multiple Task entity dictionaries representing the dependent Tasks. There are four dependency types from which you can choose: ``finish-to-start-next-day``, ``start-to-finish-next-day``, ``start-to-start``, ``finish-to-finish``. -If no dependency type is provided the default ``finish-to-start-next-day`` will be used. +If no dependency type is provided the default ``finish-to-start-next-day`` will be used. Here is how to create a dependency between our "Layout" and "Anm" Tasks:: # make 'Layout' an upstream Task to 'Anm'. (aka, make 'Anm' dependent on 'Layout') with finish-to-start-next-day dependency type @@ -85,7 +85,7 @@ Returns:: This will also automatically update the `downstream_tasks` field on 'Layout' to include the 'Anm' Task. *********************** -Query Task Dependencies +Query Task Dependencies *********************** Task Dependencies each have a ``dependent_task_id`` and a ``task_id`` fields. @@ -127,7 +127,7 @@ So now lets look at the Tasks we've created and their dependency-related fields: 'due_date', 'upstream_tasks', 'downstream_tasks', - 'dependency_violation', + 'dependency_violation', 'pinned' ] result = sg.find("Task", filters, fields) @@ -151,17 +151,17 @@ Returns:: 'pinned': False, 'start_date': '2010-05-06', 'type': 'Task', - 'upstream_tasks': [{'type': 'Task', 'name': 'Layout', 'id': 556}]}, - ... + 'upstream_tasks': [{'type': 'Task', 'name': 'Layout', 'id': 556}]}, + ... -*Note that we have also created additional Tasks for this Shot but we're going to focus on these +*Note that we have also created additional Tasks for this Shot but we're going to focus on these first two for simplicity.* ***************************************************************** Updating the End Date on a Task with Downstream Task Dependencies ***************************************************************** -If we update the ``due_date`` field on our "Layout" Task, we'll see that the "Anm" Task dates +If we update the ``due_date`` field on our "Layout" Task, we'll see that the "Anm" Task dates will automatically get pushed back to keep the dependency satisfied:: result = sg.update('Task', 556, {'due_date': '2010-05-07'}) @@ -189,20 +189,20 @@ Our Tasks now look like this (notice the new dates on the "Anm" Task):: 'pinned': False, 'start_date': '2010-05-10', 'type': 'Task', - 'upstream_tasks': [{'type': 'Task', 'name': 'Layout', 'id': 556}]}, - ... + 'upstream_tasks': [{'type': 'Task', 'name': 'Layout', 'id': 556}]}, + ... ********************************************************** Creating a Dependency Violation by pushing up a Start Date ********************************************************** -Task Dependencies can work nicely if you are pushing out an end date for a Task as it will just -recalculate the dates for all of the dependent Tasks. But what if we push up the Start Date of our +Task Dependencies can work nicely if you are pushing out an end date for a Task as it will just +recalculate the dates for all of the dependent Tasks. But what if we push up the Start Date of our "Anm" Task to start before our "Layout" Task is scheduled to end? :: - + result = sg.update('Task', 557, {'start_date': '2010-05-06'}) Returns:: @@ -229,21 +229,21 @@ Our Tasks now look like this:: 'start_date': '2010-05-06', 'type': 'Task', 'upstream_tasks': [{'type': 'Task', 'name': 'Layout', 'id': 556}]}, - ... + ... -Because the "Anm" Task ``start_date`` depends on the ``due_date`` of the "Layout" Task, this +Because the "Anm" Task ``start_date`` depends on the ``due_date`` of the "Layout" Task, this change creates a dependency violation. The update succeeds, but Flow Production Tracking has also set the -``dependency_violation`` field to ``True`` and has also updated the ``pinned`` field to ``True``. +``dependency_violation`` field to ``True`` and has also updated the ``pinned`` field to ``True``. -The ``pinned`` field simply means that if the upstream Task(s) are moved, the "Anm" Task will no -longer get moved with it. The dependency is still there (in ``upstream_tasks``) but the Task is +The ``pinned`` field simply means that if the upstream Task(s) are moved, the "Anm" Task will no +longer get moved with it. The dependency is still there (in ``upstream_tasks``) but the Task is now "pinned" to those dates until the Dependency Violation is resolved. *********************************************************** -Resolving a Dependency Violation by updating the Start Date +Resolving a Dependency Violation by updating the Start Date *********************************************************** -We don't want that violation there. Let's revert that change so the Start Date for "Anm" is after +We don't want that violation there. Let's revert that change so the Start Date for "Anm" is after the End Date of "Layout":: result = sg.update('Task', 557, {'start_date': '2010-05-10'}) @@ -272,10 +272,10 @@ Our Tasks now look like this:: 'start_date': '2010-05-10', 'type': 'Task', 'upstream_tasks': [{'type': 'Task', 'name': 'Layout', 'id': 556}]}, - ... + ... -The ``dependency_violation`` field has now been set back to ``False`` since there is no longer -a violation. But notice that the ``pinned`` field is still ``True``. We will have to manually +The ``dependency_violation`` field has now been set back to ``False`` since there is no longer +a violation. But notice that the ``pinned`` field is still ``True``. We will have to manually update that if we want the Task to travel with its dependencies again:: result = sg.update('Task', 557, {'pinned': False}) @@ -304,19 +304,19 @@ Our Tasks now look like this:: 'start_date': '2010-05-10', 'type': 'Task', 'upstream_tasks': [{'type': 'Task', 'name': 'Layout', 'id': 556}]}, - ... + ... -Looks great. But that's an annoying manual process. What if we want to just reset a Task so that +Looks great. But that's an annoying manual process. What if we want to just reset a Task so that it automatically gets updated so that the Start Date is after its dependent Tasks? ******************************************************************* Updating the ``pinned`` field on a Task with a Dependency Violation ******************************************************************* -Let's go back a couple of steps to where our "Anm" Task had a Dependency Violation because we had -moved the Start Date up before the "Layout" Task End Date. Remember that the ``pinned`` field +Let's go back a couple of steps to where our "Anm" Task had a Dependency Violation because we had +moved the Start Date up before the "Layout" Task End Date. Remember that the ``pinned`` field was also ``True``. If we simply update the ``pinned`` field to be ``False``, Flow Production Tracking will also -automatically update the Task dates to satisfy the upstream dependencies and reset the +automatically update the Task dates to satisfy the upstream dependencies and reset the ``dependency_violation`` value to ``False``:: result = sg.update('Task', 557, {'pinned': False}) @@ -345,19 +345,19 @@ Our Tasks now look like this:: 'pinned': False, 'start_date': '2010-05-10', 'type': 'Task', - 'upstream_tasks': [{'type': 'Task', 'name': 'Layout', 'id': 556}]}, - ... + 'upstream_tasks': [{'type': 'Task', 'name': 'Layout', 'id': 556}]}, + ... Notice by updating ``pinned`` to ``False``, Flow Production Tracking also updated the ``start_date`` and -``due_date`` fields of our "Anm" Task so it will satisfy the upstream Task dependencies. And since +``due_date`` fields of our "Anm" Task so it will satisfy the upstream Task dependencies. And since that succeeded, the ``dependency_violation`` field has also been set to ``False`` ******************************************* ``dependency_violation`` field is read-only ******************************************* -The ``dependency_violation`` field is the only dependency-related field that is read-only. Trying +The ``dependency_violation`` field is the only dependency-related field that is read-only. Trying to modify it will generate a Fault:: result = sg.update('Task', 557, {'dependency_violation': False}) diff --git a/docs/cookbook/tasks/updating_tasks.rst b/docs/cookbook/tasks/updating_tasks.rst index 97eb8c7b7..c7c216e3f 100644 --- a/docs/cookbook/tasks/updating_tasks.rst +++ b/docs/cookbook/tasks/updating_tasks.rst @@ -4,9 +4,9 @@ Updating Task Dates: How Flow Production Tracking Thinks ######################################################## -When updating Task dates in an API update() request, there is no specified order to the values that +When updating Task dates in an API update() request, there is no specified order to the values that are passed in. Flow Production Tracking also does automatic calculation of the``start_date``,``due_date``, and ``duration`` fields for convenience. In order to clarify how updates are handled by Flow Production Tracking we are -providing some general rules below and examples of what will happen when you make updates to your +providing some general rules below and examples of what will happen when you make updates to your Tasks. ************** @@ -17,31 +17,31 @@ General Rules - Updating the ``due_date`` automatically updates the ``duration`` (``start_date`` remains constant) - Updating the ``duration`` automatically updates the ``due_date`` (``start_date`` remains constant) - When updating Task values, Flow Production Tracking sets schedule fields (``milestone``, ``duration``, - ``start_date``, ``due_date``) after all other fields, because the Project and Task Assignees + ``start_date``, ``due_date``) after all other fields, because the Project and Task Assignees affect schedule calculations. -- If ``start_date`` and ``due_date`` are both set, ``duration`` is ignored (``duration`` can often +- If ``start_date`` and ``due_date`` are both set, ``duration`` is ignored (``duration`` can often be wrong since it's easy to calculate scheduling incorrectly). - If both ``start_date`` and ``due_date`` are provided, Flow Production Tracking sets ``start_date`` before ``due_date``. -- Set ``milestone`` before other schedule fields (because ``start_date``, ``due_date``, and +- Set ``milestone`` before other schedule fields (because ``start_date``, ``due_date``, and ``duration`` get lost if ``milestone`` is not set to ``False`` first) - If ``milestone`` is being set to ``True``, ``duration`` is ignored. -- If ``milestone`` is set to ``True`` and ``start_date`` and ``due_date`` are also being set to +- If ``milestone`` is set to ``True`` and ``start_date`` and ``due_date`` are also being set to conflicting values, an Exception is raised. -- If ``due_date`` and ``duration`` are set together (without ``start_date``), ``duration`` is set - first, then ``due_date`` (otherwise setting ``duration`` will change ``due_date`` after it is +- If ``due_date`` and ``duration`` are set together (without ``start_date``), ``duration`` is set + first, then ``due_date`` (otherwise setting ``duration`` will change ``due_date`` after it is set). ******** Examples ******** -The following examples show what the resulting Task object will look like after being run on the +The following examples show what the resulting Task object will look like after being run on the initial Task object listed under the header of each section. The ``duration`` values in the following examples assume your Flow Production Tracking instance is set to -10-hour work days. If your server is configured with a different setting, the ``duration`` values -will vary. +10-hour work days. If your server is configured with a different setting, the ``duration`` values +will vary. .. note:: The ``duration`` field stores ``duration`` values in minutes @@ -56,7 +56,7 @@ Regardless of current values on the Task, this behavior rules:: **Update start_date and due_date** -``duration`` is ignored if also provided. It is instead set automatically as (``due_date`` - +``duration`` is ignored if also provided. It is instead set automatically as (``due_date`` - ``start_date``) :: @@ -66,7 +66,7 @@ Regardless of current values on the Task, this behavior rules:: - ``start_date`` is updated. - ``due_date`` is updated. -- ``duration`` is calculated as (``due_date`` - ``start_date``) +- ``duration`` is calculated as (``due_date`` - ``start_date``) .. note:: The value provided in the update() is ignored (and in this case was also incorrect). @@ -90,7 +90,7 @@ Regardless of current values on the Task, this behavior rules:: - ``duration`` is updated. - ``due_date`` is updated. -- ``duration`` is calculated as (``due_date`` - ``start_date``) +- ``duration`` is calculated as (``due_date`` - ``start_date``) .. note:: This means the ``duration`` provided is overwritten. @@ -226,7 +226,7 @@ If the Task has ``start_date`` and ``due_date`` values but has no ``duration``, will behave. :: - + # Task = {'start_date': '2011-05-20', 'due_date': '2011-05-25', 'duration': None, 'id':123} **Update start_date** @@ -310,7 +310,7 @@ If the Task has ``due_date`` and ``duration`` values but has no ``start_date``, will behave. :: - + # Task = {'start_date': None, 'due_date': '2011-05-25', 'duration': 2400, 'id':123} **Update start_date** @@ -383,4 +383,4 @@ will behave. # Task = {'start_date': '2011-05-20', 'due_date': '2011-05-27', 'duration': 3600, 'id':123} - ``duration`` is updated. -- ``due_date`` is updated to (``start_date`` + ``duration``) \ No newline at end of file +- ``due_date`` is updated to (``start_date`` + ``duration``) diff --git a/docs/cookbook/tutorials.rst b/docs/cookbook/tutorials.rst index 99f56da02..2adbd4fbf 100644 --- a/docs/cookbook/tutorials.rst +++ b/docs/cookbook/tutorials.rst @@ -2,7 +2,7 @@ Examples ######## -Here's a list of various simple tutorials to walk through that should provide you with a good base +Here's a list of various simple tutorials to walk through that should provide you with a good base understanding of how to use the Flow Production Tracking API and what you can do with it. ***** diff --git a/docs/cookbook/usage_tips.rst b/docs/cookbook/usage_tips.rst index 91cd6e8cb..5d1a7bc1f 100644 --- a/docs/cookbook/usage_tips.rst +++ b/docs/cookbook/usage_tips.rst @@ -3,9 +3,9 @@ API Usage Tips ############## Below is a list of helpful tips when using the Flow Production Tracking API3. We have tried to make the API very -simple to use with predictable results while remaining a powerful tool to integrate with your -pipeline. However, there's always a couple of things that crop up that our users might not be -aware of. Those are the types of things you'll find below. We'll be adding to this document over +simple to use with predictable results while remaining a powerful tool to integrate with your +pipeline. However, there's always a couple of things that crop up that our users might not be +aware of. Those are the types of things you'll find below. We'll be adding to this document over time as new questions come up from our users that exhibit these types of cases. ********* @@ -43,13 +43,13 @@ the entities are returned in a standard dictionary:: {'type': 'Asset', 'name': 'redBall', 'id': 1} -For each entity returned, you will get a ``type``, ``name``, and ``id`` key. This does not mean -there are fields named ``type`` and ``name`` on the Asset. These are only used to provide a +For each entity returned, you will get a ``type``, ``name``, and ``id`` key. This does not mean +there are fields named ``type`` and ``name`` on the Asset. These are only used to provide a consistent way to represent entities returned via the API. - ``type``: the entity type (CamelCase) - ``name``: the display name of the entity. For most entity types this is the value of the ``code`` - field but not always. For example, on the Ticket and Delivery entities the ``name`` key would + field but not always. For example, on the Ticket and Delivery entities the ``name`` key would contain the value of the ``title`` field. .. _custom_entities: @@ -100,14 +100,14 @@ Connection entities exist behind the scenes for any many-to-many relationship. M you won't need to pay any attention to them. But in some cases, you may need to track information on the instance of one entity's relationship to another. -For example, when viewing a list of Versions on a Playlist, the Sort Order (``sg_sort_order``) field is an +For example, when viewing a list of Versions on a Playlist, the Sort Order (``sg_sort_order``) field is an example of a field that resides on the connection entity between Playlists and Versions. This -connection entity is appropriately called `PlaylistVersionConnection`. Because any Version can -exist in multiple Playlists, the sort order isn't specific to the Version, it's specific to -each _instance_ of the Version in a Playlist. These instances are tracked using connection +connection entity is appropriately called `PlaylistVersionConnection`. Because any Version can +exist in multiple Playlists, the sort order isn't specific to the Version, it's specific to +each _instance_ of the Version in a Playlist. These instances are tracked using connection entities in Shtogun and are accessible just like any other entity type in Flow Production Tracking. -To find information about your Versions in the Playlist "Director Review" (let's say it has an +To find information about your Versions in the Playlist "Director Review" (let's say it has an ``id`` of 4). We'd run a query like so:: filters = [['playlist', 'is', {'type':'Playlist', 'id':4}]] @@ -169,9 +169,9 @@ Which returns the following:: - ``playlist`` is the Playlist record for this connection instance. - ``sg_sort_order`` is the sort order field on the connection instance. -We can pull in field values from the linked Playlist and Version entities using dot notation like -``version.Version.code``. The syntax is ``fieldname.EntityType.fieldname``. In this example, -``PlaylistVersionConnection`` has a field named ``version``. That field contains a ``Version`` +We can pull in field values from the linked Playlist and Version entities using dot notation like +``version.Version.code``. The syntax is ``fieldname.EntityType.fieldname``. In this example, +``PlaylistVersionConnection`` has a field named ``version``. That field contains a ``Version`` entity. The field we are interested on the Version is ``code``. Put those together with our f riend the dot and we have ``version.Version.code``. @@ -179,20 +179,20 @@ riend the dot and we have ``version.Version.code``. Flow Production Tracking UI fields not available via the API ************************************************************ -Summary type fields like Query Fields and Pipeline Step summary fields are currently only available -via the UI. Some other fields may not work as expected through the API because they are "display +Summary type fields like Query Fields and Pipeline Step summary fields are currently only available +via the UI. Some other fields may not work as expected through the API because they are "display only" fields made available for convenience and are only available in the browser UI. HumanUser ========= -- ``name``: This is a UI-only field that is a combination of the ``firstname`` + ``' '`` + +- ``name``: This is a UI-only field that is a combination of the ``firstname`` + ``' '`` + ``lastname``. Shot ==== -**Smart Cut Fields**: These fields are available only in the browser UI. You can read more about +**Smart Cut Fields**: These fields are available only in the browser UI. You can read more about smart cut fields and the API in the :ref:`Smart Cut Fields doc `:: smart_cut_in @@ -212,25 +212,25 @@ smart cut fields and the API in the :ref:`Smart Cut Fields doc `_. This allows you to write plug-ins that watch for certain types of events and then run code when they occur. - + Structure of Event Types ======================== @@ -872,57 +872,57 @@ The basic structure of event types is broken into 3 parts: - ``Application``: Is always "Shotgun" for events automatically created by the Flow Production Tracking server. Other Flow Production Tracking products may use their name in here, for example, Toolkit has its own events - that it logs and the application portion is identified by "Toolkit". If you decide to use the + that it logs and the application portion is identified by "Toolkit". If you decide to use the EventLogEntry entity to log events for your scripts or tools, you would use your tool name here. - ``EntityType``: This is the entity type in Flow Production Tracking that was acted upon (eg. Shot, Asset, etc.) -- ``Action``: The general action that was taken. (eg. New, Change, Retirement, Revival) - +- ``Action``: The general action that was taken. (eg. New, Change, Retirement, Revival) + Standard Event Types ==================== -Each entity type has a standard set of events associated with it when it's created, updated, +Each entity type has a standard set of events associated with it when it's created, updated, deleted, and revived. They follow this pattern: - ``Shotgun_EntityType_New``: a new entity was created. Example: ``Shotgun_Task_New`` - ``Shotgun_EntityType_Change``: an entity was modified. Example: ``Shotgun_HumanUser_Change`` - ``Shotgun_EntityType_Retirement``: an entity was deleted. Example: ``Shotgun_Ticket_Retirement`` -- ``Shotgun_EntityType_Revival``: an entity was revived. Example: ``Shotgun_CustomEntity03_Revival`` +- ``Shotgun_EntityType_Revival``: an entity was revived. Example: ``Shotgun_CustomEntity03_Revival`` Additional Event Types ====================== These are _some_ of the additional event types that are logged by Flow Production Tracking: - + - ``Shotgun_Attachment_View``: an Attachment (file) was viewed by a user. -- ``Shotgun_Reading_Change``: a threaded entity has been marked read or unread. For example, a - Note was read by a user. The readings are unique to the entity<->user connection so when a +- ``Shotgun_Reading_Change``: a threaded entity has been marked read or unread. For example, a + Note was read by a user. The readings are unique to the entity<->user connection so when a Note is read by user "joe" it may still be unread by user "jane". - ``Shotgun_User_Login``: a user logged in to Flow Production Tracking. - ``Shotgun_User_Logout``: a user logged out of Flow Production Tracking. - + Custom Event Types ================== -Since ``EventLogEntries`` are entities themselves, you can create them using the API just like any -other entity type. As mentioned previously, if you'd like to have your scripts or tools log to +Since ``EventLogEntries`` are entities themselves, you can create them using the API just like any +other entity type. As mentioned previously, if you'd like to have your scripts or tools log to the Flow Production Tracking event log, simply devise a thoughtful naming structure for your event types and create the EventLogEntry as needed following the usual methods for creating entities via the API. Again, other Flow Production Tracking products like Toolkit use event logs this way. -.. note:: - EventLogEntries cannot be updated or deleted (that would defeat the purpose of course). - +.. note:: + EventLogEntries cannot be updated or deleted (that would defeat the purpose of course). + Performance =========== Event log database tables can get large very quickly. While Flow Production Tracking does very well with event logs -that get into the millions of records, there's an inevitable degradation of performance for pages -that display them in the web application as well as any API queries for events when they get too -big. This volume of events is not the norm, but can be reached if your server expereinces high -usage. +that get into the millions of records, there's an inevitable degradation of performance for pages +that display them in the web application as well as any API queries for events when they get too +big. This volume of events is not the norm, but can be reached if your server expereinces high +usage. This **does not** mean your Flow Production Tracking server performance will suffer in general, just any pages that are specifically displaying EventLogEntries in the web application, or API queries on the event @@ -976,7 +976,7 @@ Will internally be transformed as if you invoked something like this: .. code-block:: python - sg.find('Asset', [['project', 'is', {'id': 999, 'type': 'Project'}]]) + sg.find('Asset', [['project', 'is', {'id': 999, 'type': 'Project'}]]) ************ diff --git a/nose.cfg b/nose.cfg index 59c3f0974..22c0e11cd 100644 --- a/nose.cfg +++ b/nose.cfg @@ -9,4 +9,4 @@ # not expressly granted therein are reserved by Shotgun Software Inc. [nosetests] -exclude-dir=shotgun_api3/lib \ No newline at end of file +exclude-dir=shotgun_api3/lib diff --git a/run-tests b/run-tests index dbe93f8b2..61b0f82c9 100755 --- a/run-tests +++ b/run-tests @@ -1,3 +1,5 @@ +#!/usr/bin/env bash + # Copyright (c) 2019 Shotgun Software Inc. # # CONFIDENTIAL AND PROPRIETARY diff --git a/setup.py b/setup.py index 337e3b13b..f92018fe1 100644 --- a/setup.py +++ b/setup.py @@ -12,25 +12,25 @@ import sys from setuptools import setup, find_packages -f = open('README.md') +f = open("README.md") readme = f.read().strip() -f = open('LICENSE') +f = open("LICENSE") license = f.read().strip() setup( - name='shotgun_api3', - version='3.8.0', - description='Flow Production Tracking Python API', + name="shotgun_api3", + version="3.8.0", + description="Flow Production Tracking Python API", long_description=readme, - author='Autodesk', - author_email='https://www.autodesk.com/support/contact-support', - url='https://github.com/shotgunsoftware/python-api', + author="Autodesk", + author_email="https://www.autodesk.com/support/contact-support", + url="https://github.com/shotgunsoftware/python-api", license=license, - packages=find_packages(exclude=('tests',)), + packages=find_packages(exclude=("tests",)), script_args=sys.argv[1:], include_package_data=True, - package_data={'': ['cacerts.txt', 'cacert.pem']}, + package_data={"": ["cacerts.txt", "cacert.pem"]}, zip_safe=False, python_requires=">=3.7.0", classifiers=[ diff --git a/shotgun_api3/__init__.py b/shotgun_api3/__init__.py index 49e96db7a..d296aa97a 100644 --- a/shotgun_api3/__init__.py +++ b/shotgun_api3/__init__.py @@ -8,9 +8,18 @@ # agreement to the Shotgun Pipeline Toolkit Source Code License. All rights # not expressly granted therein are reserved by Shotgun Software Inc. -from .shotgun import (Shotgun, ShotgunError, ShotgunFileDownloadError, # noqa unused imports - ShotgunThumbnailNotReady, Fault, - AuthenticationFault, MissingTwoFactorAuthenticationFault, - UserCredentialsNotAllowedForSSOAuthenticationFault, - ProtocolError, ResponseError, Error, __version__) -from .shotgun import SG_TIMEZONE as sg_timezone # noqa unused imports +from .shotgun import ( + Shotgun, + ShotgunError, + ShotgunFileDownloadError, # noqa unused imports + ShotgunThumbnailNotReady, + Fault, + AuthenticationFault, + MissingTwoFactorAuthenticationFault, + UserCredentialsNotAllowedForSSOAuthenticationFault, + ProtocolError, + ResponseError, + Error, + __version__, +) +from .shotgun import SG_TIMEZONE as sg_timezone # noqa unused imports diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index f0d4faf48..a805fa5f4 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -1,56 +1,56 @@ #!/usr/bin/env python """ - ----------------------------------------------------------------------------- - Copyright (c) 2009-2019, Shotgun Software Inc. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - - Neither the name of the Shotgun Software Inc nor the names of its - contributors may be used to endorse or promote products derived from this - software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +----------------------------------------------------------------------------- +Copyright (c) 2009-2019, Shotgun Software Inc. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + - Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + - Neither the name of the Shotgun Software Inc nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ # Python 2/3 compatibility from .lib import six from .lib import sgsix from .lib import sgutils -from .lib.six import BytesIO # used for attachment upload +from .lib.six import BytesIO # used for attachment upload from .lib.six.moves import map from .lib.six.moves import http_cookiejar # used for attachment upload import datetime import logging -import uuid # used for attachment upload +import uuid # used for attachment upload import os import re import copy import ssl -import stat # used for attachment upload +import stat # used for attachment upload import sys import time import json from .lib.six.moves import urllib -import shutil # used for attachment download -from .lib.six.moves import http_client # Used for secure file upload. +import shutil # used for attachment download +from .lib.six.moves import http_client # Used for secure file upload. from .lib.httplib2 import Http, ProxyInfo, socks, ssl_error_classes from .lib.sgtimezone import SgTimezone @@ -88,9 +88,13 @@ def _is_mimetypes_broken(): # http://bugs.python.org/issue9291 <- Fixed in 2.7.7 # http://bugs.python.org/issue21652 <- Fixed in 2.7.8 # http://bugs.python.org/issue22028 <- Fixed in 2.7.10 - return (sys.platform == "win32" and - sys.version_info[0] == 2 and sys.version_info[1] == 7 and - sys.version_info[2] >= 0 and sys.version_info[2] <= 9) + return ( + sys.platform == "win32" + and sys.version_info[0] == 2 + and sys.version_info[1] == 7 + and sys.version_info[2] >= 0 + and sys.version_info[2] <= 9 + ) if _is_mimetypes_broken(): @@ -101,7 +105,7 @@ def _is_mimetypes_broken(): # mimetypes imported in version specific imports mimetypes.add_type("video/webm", ".webm") # webm and mp4 seem to be missing -mimetypes.add_type("video/mp4", ".mp4") # from some OS/distros +mimetypes.add_type("video/mp4", ".mp4") # from some OS/distros SG_TIMEZONE = SgTimezone() @@ -128,6 +132,7 @@ class ShotgunError(Exception): """ Base for all Shotgun API Errors. """ + pass @@ -135,6 +140,7 @@ class ShotgunFileDownloadError(ShotgunError): """ Exception for file download-related errors. """ + pass @@ -142,6 +148,7 @@ class ShotgunThumbnailNotReady(ShotgunError): """ Exception for when trying to use a 'pending thumbnail' (aka transient thumbnail) in an operation """ + pass @@ -149,6 +156,7 @@ class Fault(ShotgunError): """ Exception when server-side exception detected. """ + pass @@ -156,6 +164,7 @@ class AuthenticationFault(Fault): """ Exception when the server side reports an error related to authentication. """ + pass @@ -164,6 +173,7 @@ class MissingTwoFactorAuthenticationFault(Fault): Exception when the server side reports an error related to missing two-factor authentication credentials. """ + pass @@ -172,6 +182,7 @@ class UserCredentialsNotAllowedForSSOAuthenticationFault(Fault): Exception when the server is configured to use SSO. It is not possible to use a username/password pair to authenticate on such server. """ + pass @@ -180,8 +191,10 @@ class UserCredentialsNotAllowedForOxygenAuthenticationFault(Fault): Exception when the server is configured to use Oxygen. It is not possible to use a username/password pair to authenticate on such server. """ + pass + # ---------------------------------------------------------------------------- # API @@ -221,10 +234,12 @@ def __init__(self, host, meta): except AttributeError: self.version = None if not self.version: - raise ShotgunError("The Flow Production Tracking Server didn't respond with a version number. " - "This may be because you are running an older version of " - "Flow Production Tracking against a more recent version of the Flow Production Tracking API. " - "For more information, please contact the Autodesk support.") + raise ShotgunError( + "The Flow Production Tracking Server didn't respond with a version number. " + "This may be because you are running an older version of " + "Flow Production Tracking against a more recent version of the Flow Production Tracking API. " + "For more information, please contact the Autodesk support." + ) if len(self.version) > 3 and self.version[3] == "Dev": self.is_dev = True @@ -258,7 +273,12 @@ def _ensure_support(self, feature, raise_hell=True): if raise_hell: raise ShotgunError( "%s requires server version %s or higher, " - "server is %s" % (feature["label"], _version_str(feature["version"]), _version_str(self.version)) + "server is %s" + % ( + feature["label"], + _version_str(feature["version"]), + _version_str(self.version), + ) ) return False else: @@ -268,68 +288,62 @@ def _ensure_json_supported(self): """ Ensures server has support for JSON API endpoint added in v2.4.0. """ - self._ensure_support({ - "version": (2, 4, 0), - "label": "JSON API" - }) + self._ensure_support({"version": (2, 4, 0), "label": "JSON API"}) def ensure_include_archived_projects(self): """ Ensures server has support for archived Projects feature added in v5.3.14. """ - self._ensure_support({ - "version": (5, 3, 14), - "label": "include_archived_projects parameter" - }) + self._ensure_support( + {"version": (5, 3, 14), "label": "include_archived_projects parameter"} + ) def ensure_per_project_customization(self): """ Ensures server has support for per-project customization feature added in v5.4.4. """ - return self._ensure_support({ - "version": (5, 4, 4), - "label": "project parameter" - }, True) + return self._ensure_support( + {"version": (5, 4, 4), "label": "project parameter"}, True + ) def ensure_support_for_additional_filter_presets(self): """ Ensures server has support for additional filter presets feature added in v7.0.0. """ - return self._ensure_support({ - "version": (7, 0, 0), - "label": "additional_filter_presets parameter" - }, True) + return self._ensure_support( + {"version": (7, 0, 0), "label": "additional_filter_presets parameter"}, True + ) def ensure_user_following_support(self): """ Ensures server has support for listing items a user is following, added in v7.0.12. """ - return self._ensure_support({ - "version": (7, 0, 12), - "label": "user_following parameter" - }, True) + return self._ensure_support( + {"version": (7, 0, 12), "label": "user_following parameter"}, True + ) def ensure_paging_info_without_counts_support(self): """ Ensures server has support for optimized pagination, added in v7.4.0. """ - return self._ensure_support({ - "version": (7, 4, 0), - "label": "optimized pagination" - }, False) + return self._ensure_support( + {"version": (7, 4, 0), "label": "optimized pagination"}, False + ) def ensure_return_image_urls_support(self): """ Ensures server has support for returning thumbnail URLs without additional round-trips, added in v3.3.0. """ - return self._ensure_support({ - "version": (3, 3, 0), - "label": "return thumbnail URLs" - }, False) + return self._ensure_support( + {"version": (3, 3, 0), "label": "return thumbnail URLs"}, False + ) def __str__(self): - return "ServerCapabilities: host %s, version %s, is_dev %s"\ - % (self.host, self.version, self.is_dev) + return "ServerCapabilities: host %s, version %s, is_dev %s" % ( + self.host, + self.version, + self.is_dev, + ) class ClientCapabilities(object): @@ -379,9 +393,11 @@ def __init__(self): pass def __str__(self): - return "ClientCapabilities: platform %s, local_path_field %s, "\ - "py_verison %s, ssl version %s" % (self.platform, self.local_path_field, - self.py_version, self.ssl_version) + return ( + "ClientCapabilities: platform %s, local_path_field %s, " + "py_verison %s, ssl version %s" + % (self.platform, self.local_path_field, self.py_version, self.ssl_version) + ) class _Config(object): @@ -459,14 +475,11 @@ def set_server_params(self, base_url): :raises ValueError: Raised if protocol is not http or https. """ - self.scheme, self.server, api_base, _, _ = \ - urllib.parse.urlsplit(base_url) + self.scheme, self.server, api_base, _, _ = urllib.parse.urlsplit(base_url) if self.scheme not in ("http", "https"): - raise ValueError( - "base_url must use http or https got '%s'" % base_url - ) - self.api_path = urllib.parse.urljoin(urllib.parse.urljoin( - api_base or "/", self.api_ver + "/"), "json" + raise ValueError("base_url must use http or https got '%s'" % base_url) + self.api_path = urllib.parse.urljoin( + urllib.parse.urljoin(api_base or "/", self.api_ver + "/"), "json" ) @property @@ -477,7 +490,9 @@ def records_per_page(self): if self._records_per_page is None: # Check for api_max_entities_per_page in the server info and change the record per page # value if it is supplied. - self._records_per_page = self._sg.server_info.get("api_max_entities_per_page") or 500 + self._records_per_page = ( + self._sg.server_info.get("api_max_entities_per_page") or 500 + ) return self._records_per_page @@ -489,30 +504,32 @@ class Shotgun(object): # reg ex from # http://underground.infovark.com/2008/07/22/iso-date-validation-regex/ # Note a length check is done before checking the reg ex - _DATE_PATTERN = re.compile( - r"^(\d{4})\D?(0[1-9]|1[0-2])\D?([12]\d|0[1-9]|3[01])$") + _DATE_PATTERN = re.compile(r"^(\d{4})\D?(0[1-9]|1[0-2])\D?([12]\d|0[1-9]|3[01])$") _DATE_TIME_PATTERN = re.compile( r"^(\d{4})\D?(0[1-9]|1[0-2])\D?([12]\d|0[1-9]|3[01])" - r"(\D?([01]\d|2[0-3])\D?([0-5]\d)\D?([0-5]\d)?\D?(\d{3})?)?$") + r"(\D?([01]\d|2[0-3])\D?([0-5]\d)\D?([0-5]\d)?\D?(\d{3})?)?$" + ) _MULTIPART_UPLOAD_CHUNK_SIZE = 20000000 - MAX_ATTEMPTS = 3 # Retries on failure - BACKOFF = 0.75 # Seconds to wait before retry, times the attempt number - - def __init__(self, - base_url, - script_name=None, - api_key=None, - convert_datetimes_to_utc=True, - http_proxy=None, - ensure_ascii=True, - connect=True, - ca_certs=None, - login=None, - password=None, - sudo_as_login=None, - session_token=None, - auth_token=None): + MAX_ATTEMPTS = 3 # Retries on failure + BACKOFF = 0.75 # Seconds to wait before retry, times the attempt number + + def __init__( + self, + base_url, + script_name=None, + api_key=None, + convert_datetimes_to_utc=True, + http_proxy=None, + ensure_ascii=True, + connect=True, + ca_certs=None, + login=None, + password=None, + sudo_as_login=None, + session_token=None, + auth_token=None, + ): """ Initializes a new instance of the Shotgun client. @@ -597,16 +614,19 @@ def __init__(self, # verify authentication arguments if session_token is not None: if script_name is not None or api_key is not None: - raise ValueError("cannot provide both session_token " - "and script_name/api_key") + raise ValueError( + "cannot provide both session_token " "and script_name/api_key" + ) if login is not None or password is not None: - raise ValueError("cannot provide both session_token " - "and login/password") + raise ValueError( + "cannot provide both session_token " "and login/password" + ) if login is not None or password is not None: if script_name is not None or api_key is not None: - raise ValueError("cannot provide both login/password " - "and script_name/api_key") + raise ValueError( + "cannot provide both login/password " "and script_name/api_key" + ) if login is None: raise ValueError("password provided without login") if password is None: @@ -620,15 +640,24 @@ def __init__(self, if auth_token is not None: if login is None or password is None: - raise ValueError("must provide a user login and password with an auth_token") + raise ValueError( + "must provide a user login and password with an auth_token" + ) if script_name is not None or api_key is not None: raise ValueError("cannot provide an auth_code with script_name/api_key") # Can't use 'all' with python 2.4 - if len([x for x in [session_token, script_name, api_key, login, password] if x]) == 0: + if ( + len( + [x for x in [session_token, script_name, api_key, login, password] if x] + ) + == 0 + ): if connect: - raise ValueError("must provide login/password, session_token or script_name/api_key") + raise ValueError( + "must provide login/password, session_token or script_name/api_key" + ) self.config = _Config(self) self.config.api_key = api_key @@ -643,19 +672,30 @@ def __init__(self, self.config.raw_http_proxy = http_proxy try: - self.config.rpc_attempt_interval = int(os.environ.get("SHOTGUN_API_RETRY_INTERVAL", 3000)) + self.config.rpc_attempt_interval = int( + os.environ.get("SHOTGUN_API_RETRY_INTERVAL", 3000) + ) except ValueError: retry_interval = os.environ.get("SHOTGUN_API_RETRY_INTERVAL", 3000) - raise ValueError("Invalid value '%s' found in environment variable " - "SHOTGUN_API_RETRY_INTERVAL, must be int." % retry_interval) + raise ValueError( + "Invalid value '%s' found in environment variable " + "SHOTGUN_API_RETRY_INTERVAL, must be int." % retry_interval + ) if self.config.rpc_attempt_interval < 0: - raise ValueError("Value of SHOTGUN_API_RETRY_INTERVAL must be positive, " - "got '%s'." % self.config.rpc_attempt_interval) - + raise ValueError( + "Value of SHOTGUN_API_RETRY_INTERVAL must be positive, " + "got '%s'." % self.config.rpc_attempt_interval + ) + global SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION - if os.environ.get("SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION", "0").strip().lower() == "1": + if ( + os.environ.get("SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION", "0") + .strip() + .lower() + == "1" + ): SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION = True - + self._connection = None self.__ca_certs = self._get_certs_file(ca_certs) @@ -672,8 +712,9 @@ def __init__(self, # the lowercase version of the credentials. auth, self.config.server = self._split_url(base_url) if auth: - auth = base64encode(sgutils.ensure_binary( - urllib.parse.unquote(auth))).decode("utf-8") + auth = base64encode( + sgutils.ensure_binary(urllib.parse.unquote(auth)) + ).decode("utf-8") self.config.authorization = "Basic " + auth.strip() # foo:bar@123.456.789.012:3456 @@ -682,8 +723,7 @@ def __init__(self, # there might be @ in the user's password. p = http_proxy.rsplit("@", 1) if len(p) > 1: - self.config.proxy_user, self.config.proxy_pass = \ - p[0].split(":", 1) + self.config.proxy_user, self.config.proxy_pass = p[0].split(":", 1) proxy_server = p[1] else: proxy_server = http_proxy @@ -693,18 +733,29 @@ def __init__(self, try: self.config.proxy_port = int(proxy_netloc_list[1]) except ValueError: - raise ValueError("Invalid http_proxy address '%s'. Valid " - "format is '123.456.789.012' or '123.456.789.012:3456'" - ". If no port is specified, a default of %d will be " - "used." % (http_proxy, self.config.proxy_port)) + raise ValueError( + "Invalid http_proxy address '%s'. Valid " + "format is '123.456.789.012' or '123.456.789.012:3456'" + ". If no port is specified, a default of %d will be " + "used." % (http_proxy, self.config.proxy_port) + ) # now populate self.config.proxy_handler if self.config.proxy_user and self.config.proxy_pass: - auth_string = "%s:%s@" % (self.config.proxy_user, self.config.proxy_pass) + auth_string = "%s:%s@" % ( + self.config.proxy_user, + self.config.proxy_pass, + ) else: auth_string = "" - proxy_addr = "http://%s%s:%d" % (auth_string, self.config.proxy_server, self.config.proxy_port) - self.config.proxy_handler = urllib.request.ProxyHandler({self.config.scheme: proxy_addr}) + proxy_addr = "http://%s%s:%d" % ( + auth_string, + self.config.proxy_server, + self.config.proxy_port, + ) + self.config.proxy_handler = urllib.request.ProxyHandler( + {self.config.scheme: proxy_addr} + ) if ensure_ascii: self._json_loads = self._json_loads_ascii @@ -750,7 +801,8 @@ def _split_url(self, base_url): else: auth, server = urllib.parse.splituser( - urllib.parse.urlsplit(base_url).netloc) + urllib.parse.urlsplit(base_url).netloc + ) return auth, server @@ -842,8 +894,17 @@ def info(self): """ return self._call_rpc("info", None, include_auth_params=False) - def find_one(self, entity_type, filters, fields=None, order=None, filter_operator=None, retired_only=False, - include_archived_projects=True, additional_filter_presets=None): + def find_one( + self, + entity_type, + filters, + fields=None, + order=None, + filter_operator=None, + retired_only=False, + include_archived_projects=True, + additional_filter_presets=None, + ): """ Shortcut for :meth:`~shotgun_api3.Shotgun.find` with ``limit=1`` so it returns a single result. @@ -897,16 +958,35 @@ def find_one(self, entity_type, filters, fields=None, order=None, filter_operato :rtype: dict """ - results = self.find(entity_type, filters, fields, order, filter_operator, 1, retired_only, - include_archived_projects=include_archived_projects, - additional_filter_presets=additional_filter_presets) + results = self.find( + entity_type, + filters, + fields, + order, + filter_operator, + 1, + retired_only, + include_archived_projects=include_archived_projects, + additional_filter_presets=additional_filter_presets, + ) if results: return results[0] return None - def find(self, entity_type, filters, fields=None, order=None, filter_operator=None, limit=0, - retired_only=False, page=0, include_archived_projects=True, additional_filter_presets=None): + def find( + self, + entity_type, + filters, + fields=None, + order=None, + filter_operator=None, + limit=0, + retired_only=False, + page=0, + include_archived_projects=True, + additional_filter_presets=None, + ): """ Find entities matching the given filters. @@ -959,7 +1039,7 @@ def find(self, entity_type, filters, fields=None, order=None, filter_operator=No Defaults to ``["id"]``. .. seealso:: :ref:`combining-related-queries` - + :param list order: Optional list of dictionaries defining how to order the results of the query. Each dictionary contains the ``field_name`` to order by and the ``direction`` to sort:: @@ -1016,8 +1096,10 @@ def find(self, entity_type, filters, fields=None, order=None, filter_operator=No filters = _translate_filters(filters, filter_operator) elif filter_operator: # TODO: Not sure if this test is correct, replicated from prev api - raise ShotgunError("Deprecated: Use of filter_operator for find() is not valid any more." - " See the documentation on find()") + raise ShotgunError( + "Deprecated: Use of filter_operator for find() is not valid any more." + " See the documentation on find()" + ) if not include_archived_projects: # This defaults to True on the server (no argument is sent) @@ -1027,13 +1109,15 @@ def find(self, entity_type, filters, fields=None, order=None, filter_operator=No if additional_filter_presets: self.server_caps.ensure_support_for_additional_filter_presets() - params = self._construct_read_parameters(entity_type, - fields, - filters, - retired_only, - order, - include_archived_projects, - additional_filter_presets) + params = self._construct_read_parameters( + entity_type, + fields, + filters, + retired_only, + order, + include_archived_projects, + additional_filter_presets, + ) if self.server_caps.ensure_return_image_urls_support(): params["api_return_image_urls"] = True @@ -1089,21 +1173,25 @@ def find(self, entity_type, filters, fields=None, order=None, filter_operator=No return self._parse_records(records) - def _construct_read_parameters(self, - entity_type, - fields, - filters, - retired_only, - order, - include_archived_projects, - additional_filter_presets): + def _construct_read_parameters( + self, + entity_type, + fields, + filters, + retired_only, + order, + include_archived_projects, + additional_filter_presets, + ): params = {} params["type"] = entity_type params["return_fields"] = fields or ["id"] params["filters"] = filters params["return_only"] = (retired_only and "retired") or "active" - params["paging"] = {"entities_per_page": self.config.records_per_page, - "current_page": 1} + params["paging"] = { + "entities_per_page": self.config.records_per_page, + "current_page": 1, + } if additional_filter_presets: params["additional_filter_presets"] = additional_filter_presets @@ -1119,10 +1207,9 @@ def _construct_read_parameters(self, # TODO: warn about deprecation of 'column' param name sort["field_name"] = sort["column"] sort.setdefault("direction", "asc") - sort_list.append({ - "field_name": sort["field_name"], - "direction": sort["direction"] - }) + sort_list.append( + {"field_name": sort["field_name"], "direction": sort["direction"]} + ) params["sorts"] = sort_list return params @@ -1132,7 +1219,7 @@ def _add_project_param(self, params, project_entity): params["project"] = project_entity return params - + def _translate_update_params( self, entity_type, entity_id, data, multi_entity_update_modes ): @@ -1155,13 +1242,15 @@ def optimize_field(field_dict): "fields": [optimize_field(field_dict) for field_dict in full_fields], } - def summarize(self, - entity_type, - filters, - summary_fields, - filter_operator=None, - grouping=None, - include_archived_projects=True): + def summarize( + self, + entity_type, + filters, + summary_fields, + filter_operator=None, + grouping=None, + include_archived_projects=True, + ): """ Summarize field data returned by a query. @@ -1349,9 +1438,7 @@ def summarize(self, # So we only need to check the server version if it is False self.server_caps.ensure_include_archived_projects() - params = {"type": entity_type, - "summaries": summary_fields, - "filters": filters} + params = {"type": entity_type, "summaries": summary_fields, "filters": filters} if include_archived_projects is False: # Defaults to True on the server, so only pass it if it's False @@ -1411,14 +1498,16 @@ def create(self, entity_type, data, return_fields=None): upload_filmstrip_image = None if "filmstrip_image" in data: if not self.server_caps.version or self.server_caps.version < (3, 1, 0): - raise ShotgunError("Filmstrip thumbnail support requires server version 3.1 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Filmstrip thumbnail support requires server version 3.1 or " + "higher, server is %s" % (self.server_caps.version,) + ) upload_filmstrip_image = data.pop("filmstrip_image") params = { "type": entity_type, "fields": self._dict_to_list(data), - "return_fields": return_fields + "return_fields": return_fields, } record = self._call_rpc("create", params, first=True) @@ -1426,12 +1515,20 @@ def create(self, entity_type, data, return_fields=None): if upload_image: self.upload_thumbnail(entity_type, result["id"], upload_image) - image = self.find_one(entity_type, [["id", "is", result.get("id")]], fields=["image"]) + image = self.find_one( + entity_type, [["id", "is", result.get("id")]], fields=["image"] + ) result["image"] = image.get("image") if upload_filmstrip_image: - self.upload_filmstrip_thumbnail(entity_type, result["id"], upload_filmstrip_image) - filmstrip = self.find_one(entity_type, [["id", "is", result.get("id")]], fields=["filmstrip_image"]) + self.upload_filmstrip_thumbnail( + entity_type, result["id"], upload_filmstrip_image + ) + filmstrip = self.find_one( + entity_type, + [["id", "is", result.get("id")]], + fields=["filmstrip_image"], + ) result["filmstrip_image"] = filmstrip.get("filmstrip_image") return result @@ -1480,12 +1577,16 @@ def update(self, entity_type, entity_id, data, multi_entity_update_modes=None): upload_filmstrip_image = None if "filmstrip_image" in data: if not self.server_caps.version or self.server_caps.version < (3, 1, 0): - raise ShotgunError("Filmstrip thumbnail support requires server version 3.1 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Filmstrip thumbnail support requires server version 3.1 or " + "higher, server is %s" % (self.server_caps.version,) + ) upload_filmstrip_image = data.pop("filmstrip_image") if data: - params = self._translate_update_params(entity_type, entity_id, data, multi_entity_update_modes) + params = self._translate_update_params( + entity_type, entity_id, data, multi_entity_update_modes + ) record = self._call_rpc("update", params) result = self._parse_records(record)[0] else: @@ -1493,12 +1594,20 @@ def update(self, entity_type, entity_id, data, multi_entity_update_modes=None): if upload_image: self.upload_thumbnail(entity_type, entity_id, upload_image) - image = self.find_one(entity_type, [["id", "is", result.get("id")]], fields=["image"]) + image = self.find_one( + entity_type, [["id", "is", result.get("id")]], fields=["image"] + ) result["image"] = image.get("image") if upload_filmstrip_image: - self.upload_filmstrip_thumbnail(entity_type, result["id"], upload_filmstrip_image) - filmstrip = self.find_one(entity_type, [["id", "is", result.get("id")]], fields=["filmstrip_image"]) + self.upload_filmstrip_thumbnail( + entity_type, result["id"], upload_filmstrip_image + ) + filmstrip = self.find_one( + entity_type, + [["id", "is", result.get("id")]], + fields=["filmstrip_image"], + ) result["filmstrip_image"] = filmstrip.get("filmstrip_image") return result @@ -1521,12 +1630,9 @@ def delete(self, entity_type, entity_id): entity was already deleted). :rtype: bool :raises: :class:`Fault` if entity does not exist (deleted or not). - """ + """ - params = { - "type": entity_type, - "id": entity_id - } + params = {"type": entity_type, "id": entity_id} return self._call_rpc("delete", params) @@ -1544,10 +1650,7 @@ def revive(self, entity_type, entity_id): :rtype: bool """ - params = { - "type": entity_type, - "id": entity_id - } + params = {"type": entity_type, "id": entity_id} return self._call_rpc("revive", params) @@ -1612,7 +1715,9 @@ def batch(self, requests): """ if not isinstance(requests, list): - raise ShotgunError("batch() expects a list. Instead was sent a %s" % type(requests)) + raise ShotgunError( + "batch() expects a list. Instead was sent a %s" % type(requests) + ) # If we have no requests, just return an empty list immediately. # Nothing to process means nothing to get results of. @@ -1624,39 +1729,42 @@ def batch(self, requests): def _required_keys(message, required_keys, data): missing = set(required_keys) - set(data.keys()) if missing: - raise ShotgunError("%s missing required key: %s. " - "Value was: %s." % (message, ", ".join(missing), data)) + raise ShotgunError( + "%s missing required key: %s. " + "Value was: %s." % (message, ", ".join(missing), data) + ) for req in requests: - _required_keys("Batched request", - ["request_type", "entity_type"], - req) - request_params = {"request_type": req["request_type"], "type": req["entity_type"]} + _required_keys("Batched request", ["request_type", "entity_type"], req) + request_params = { + "request_type": req["request_type"], + "type": req["entity_type"], + } if req["request_type"] == "create": _required_keys("Batched create request", ["data"], req) request_params["fields"] = self._dict_to_list(req["data"]) - request_params["return_fields"] = req.get("return_fields") or["id"] + request_params["return_fields"] = req.get("return_fields") or ["id"] elif req["request_type"] == "update": - _required_keys("Batched update request", - ["entity_id", "data"], - req) + _required_keys("Batched update request", ["entity_id", "data"], req) request_params["id"] = req["entity_id"] request_params["fields"] = self._dict_to_list( req["data"], extra_data=self._dict_to_extra_data( - req.get("multi_entity_update_modes"), - "multi_entity_update_mode" - ) + req.get("multi_entity_update_modes"), "multi_entity_update_mode" + ), ) if "multi_entity_update_mode" in req: - request_params["multi_entity_update_mode"] = req["multi_entity_update_mode"] + request_params["multi_entity_update_mode"] = req[ + "multi_entity_update_mode" + ] elif req["request_type"] == "delete": _required_keys("Batched delete request", ["entity_id"], req) request_params["id"] = req["entity_id"] else: - raise ShotgunError("Invalid request_type '%s' for batch" % ( - req["request_type"])) + raise ShotgunError( + "Invalid request_type '%s' for batch" % (req["request_type"]) + ) calls.append(request_params) records = self._call_rpc("batch", calls) return self._parse_records(records) @@ -1714,23 +1822,31 @@ def work_schedule_read(self, start_date, end_date, project=None, user=None): """ if not self.server_caps.version or self.server_caps.version < (3, 2, 0): - raise ShotgunError("Work schedule support requires server version 3.2 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Work schedule support requires server version 3.2 or " + "higher, server is %s" % (self.server_caps.version,) + ) if not isinstance(start_date, str) or not isinstance(end_date, str): - raise ShotgunError("The start_date and end_date arguments must be strings in YYYY-MM-DD format") + raise ShotgunError( + "The start_date and end_date arguments must be strings in YYYY-MM-DD format" + ) params = dict( - start_date=start_date, - end_date=end_date, - project=project, - user=user + start_date=start_date, end_date=end_date, project=project, user=user ) return self._call_rpc("work_schedule_read", params) - def work_schedule_update(self, date, working, description=None, project=None, user=None, - recalculate_field=None): + def work_schedule_update( + self, + date, + working, + description=None, + project=None, + user=None, + recalculate_field=None, + ): """ Update the work schedule for a given date. @@ -1765,8 +1881,10 @@ def work_schedule_update(self, date, working, description=None, project=None, us """ if not self.server_caps.version or self.server_caps.version < (3, 2, 0): - raise ShotgunError("Work schedule support requires server version 3.2 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Work schedule support requires server version 3.2 or " + "higher, server is %s" % (self.server_caps.version,) + ) if not isinstance(date, str): raise ShotgunError("The date argument must be string in YYYY-MM-DD format") @@ -1777,7 +1895,7 @@ def work_schedule_update(self, date, working, description=None, project=None, us description=description, project=project, user=user, - recalculate_field=recalculate_field + recalculate_field=recalculate_field, ) return self._call_rpc("work_schedule_update", params) @@ -1801,13 +1919,12 @@ def follow(self, user, entity): """ if not self.server_caps.version or self.server_caps.version < (5, 1, 22): - raise ShotgunError("Follow support requires server version 5.2 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Follow support requires server version 5.2 or " + "higher, server is %s" % (self.server_caps.version,) + ) - params = dict( - user=user, - entity=entity - ) + params = dict(user=user, entity=entity) return self._call_rpc("follow", params) @@ -1829,13 +1946,12 @@ def unfollow(self, user, entity): """ if not self.server_caps.version or self.server_caps.version < (5, 1, 22): - raise ShotgunError("Follow support requires server version 5.2 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Follow support requires server version 5.2 or " + "higher, server is %s" % (self.server_caps.version,) + ) - params = dict( - user=user, - entity=entity - ) + params = dict(user=user, entity=entity) return self._call_rpc("unfollow", params) @@ -1858,12 +1974,12 @@ def followers(self, entity): """ if not self.server_caps.version or self.server_caps.version < (5, 1, 22): - raise ShotgunError("Follow support requires server version 5.2 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Follow support requires server version 5.2 or " + "higher, server is %s" % (self.server_caps.version,) + ) - params = dict( - entity=entity - ) + params = dict(entity=entity) return self._call_rpc("followers", params) @@ -1890,9 +2006,7 @@ def following(self, user, project=None, entity_type=None): self.server_caps.ensure_user_following_support() - params = { - "user": user - } + params = {"user": user} if project: params["project"] = project if entity_type: @@ -2080,7 +2194,9 @@ def schema_field_read(self, entity_type, field_name=None, project_entity=None): return self._call_rpc("schema_field_read", params) - def schema_field_create(self, entity_type, data_type, display_name, properties=None): + def schema_field_create( + self, entity_type, data_type, display_name, properties=None + ): """ Create a field for the specified entity type. @@ -2109,15 +2225,17 @@ def schema_field_create(self, entity_type, data_type, display_name, properties=N params = { "type": entity_type, "data_type": data_type, - "properties": [ - {"property_name": "name", "value": display_name} - ] + "properties": [{"property_name": "name", "value": display_name}], } - params["properties"].extend(self._dict_to_list(properties, key_name="property_name", value_name="value")) + params["properties"].extend( + self._dict_to_list(properties, key_name="property_name", value_name="value") + ) return self._call_rpc("schema_field_create", params) - def schema_field_update(self, entity_type, field_name, properties, project_entity=None): + def schema_field_update( + self, entity_type, field_name, properties, project_entity=None + ): """ Update the properties for the specified field on an entity. @@ -2154,7 +2272,7 @@ def schema_field_update(self, entity_type, field_name, properties, project_entit "properties": [ {"property_name": k, "value": v} for k, v in six.iteritems((properties or {})) - ] + ], } params = self._add_project_param(params, project_entity) return self._call_rpc("schema_field_update", params) @@ -2172,10 +2290,7 @@ def schema_field_delete(self, entity_type, field_name): :rtype: bool """ - params = { - "type": entity_type, - "field_name": field_name - } + params = {"type": entity_type, "field_name": field_name} return self._call_rpc("schema_field_delete", params) @@ -2209,9 +2324,11 @@ def reset_user_agent(self): if self.config.no_ssl_validation: validation_str = "no-validate" - self._user_agents = ["shotgun-json (%s)" % __version__, - "Python %s (%s)" % (self.client_caps.py_version, ua_platform), - "ssl %s (%s)" % (self.client_caps.ssl_version, validation_str)] + self._user_agents = [ + "shotgun-json (%s)" % __version__, + "Python %s (%s)" % (self.client_caps.py_version, ua_platform), + "ssl %s (%s)" % (self.client_caps.ssl_version, validation_str), + ] def set_session_uuid(self, session_uuid): """ @@ -2229,8 +2346,14 @@ def set_session_uuid(self, session_uuid): self.config.session_uuid = session_uuid return - def share_thumbnail(self, entities, thumbnail_path=None, source_entity=None, - filmstrip_thumbnail=False, **kwargs): + def share_thumbnail( + self, + entities, + thumbnail_path=None, + source_entity=None, + filmstrip_thumbnail=False, + **kwargs, + ): """ Associate a thumbnail with more than one Shotgun entity. @@ -2246,7 +2369,7 @@ def share_thumbnail(self, entities, thumbnail_path=None, source_entity=None, .. note:: When sharing a filmstrip thumbnail, it is required to have a static thumbnail in place before the filmstrip will be displayed in the Shotgun web UI. - If the :ref:`thumbnail is still processing and is using a placeholder + If the :ref:`thumbnail is still processing and is using a placeholder `, this method will error. Simple use case: @@ -2273,48 +2396,58 @@ def share_thumbnail(self, entities, thumbnail_path=None, source_entity=None, share the static thumbnail. Defaults to ``False``. :returns: ``id`` of the Attachment entity representing the source thumbnail that is shared. :rtype: int - :raises: :class:`ShotgunError` if not supported by server version or improperly called, + :raises: :class:`ShotgunError` if not supported by server version or improperly called, or :class:`ShotgunThumbnailNotReady` if thumbnail is still pending. """ if not self.server_caps.version or self.server_caps.version < (4, 0, 0): - raise ShotgunError("Thumbnail sharing support requires server " - "version 4.0 or higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Thumbnail sharing support requires server " + "version 4.0 or higher, server is %s" % (self.server_caps.version,) + ) if not isinstance(entities, list) or len(entities) == 0: - raise ShotgunError("'entities' parameter must be a list of entity " - "hashes and may not be empty") + raise ShotgunError( + "'entities' parameter must be a list of entity " + "hashes and may not be empty" + ) for e in entities: if not isinstance(e, dict) or "id" not in e or "type" not in e: - raise ShotgunError("'entities' parameter must be a list of " - "entity hashes with at least 'type' and 'id' keys.\nInvalid " - "entity: %s" % e) + raise ShotgunError( + "'entities' parameter must be a list of " + "entity hashes with at least 'type' and 'id' keys.\nInvalid " + "entity: %s" % e + ) - if (not thumbnail_path and not source_entity) or (thumbnail_path and source_entity): - raise ShotgunError("You must supply either thumbnail_path OR source_entity.") + if (not thumbnail_path and not source_entity) or ( + thumbnail_path and source_entity + ): + raise ShotgunError( + "You must supply either thumbnail_path OR source_entity." + ) # upload thumbnail if thumbnail_path: source_entity = entities.pop(0) if filmstrip_thumbnail: thumb_id = self.upload_filmstrip_thumbnail( - source_entity["type"], - source_entity["id"], - thumbnail_path, - **kwargs + source_entity["type"], source_entity["id"], thumbnail_path, **kwargs ) else: thumb_id = self.upload_thumbnail( - source_entity["type"], - source_entity["id"], - thumbnail_path, - **kwargs + source_entity["type"], source_entity["id"], thumbnail_path, **kwargs ) else: - if not isinstance(source_entity, dict) or "id" not in source_entity or "type" not in source_entity: - raise ShotgunError("'source_entity' parameter must be a dict " - "with at least 'type' and 'id' keys.\nGot: %s (%s)" - % (source_entity, type(source_entity))) + if ( + not isinstance(source_entity, dict) + or "id" not in source_entity + or "type" not in source_entity + ): + raise ShotgunError( + "'source_entity' parameter must be a dict " + "with at least 'type' and 'id' keys.\nGot: %s (%s)" + % (source_entity, type(source_entity)) + ) # only 1 entity in list and we already uploaded the thumbnail to it if len(entities) == 0: @@ -2333,8 +2466,16 @@ def share_thumbnail(self, entities, thumbnail_path=None, source_entity=None, "filmstrip_thumbnail": filmstrip_thumbnail, } - url = urllib.parse.urlunparse((self.config.scheme, self.config.server, - "/upload/share_thumbnail", None, None, None)) + url = urllib.parse.urlunparse( + ( + self.config.scheme, + self.config.server, + "/upload/share_thumbnail", + None, + None, + None, + ) + ) result = self._send_form(url, params) @@ -2377,7 +2518,9 @@ def upload_thumbnail(self, entity_type, entity_id, path, **kwargs): :param str path: Full path to the thumbnail file on disk. :returns: Id of the new attachment """ - return self.upload(entity_type, entity_id, path, field_name="thumb_image", **kwargs) + return self.upload( + entity_type, entity_id, path, field_name="thumb_image", **kwargs + ) def upload_filmstrip_thumbnail(self, entity_type, entity_id, path, **kwargs): """ @@ -2419,13 +2562,24 @@ def upload_filmstrip_thumbnail(self, entity_type, entity_id, path, **kwargs): :rtype: int """ if not self.server_caps.version or self.server_caps.version < (3, 1, 0): - raise ShotgunError("Filmstrip thumbnail support requires server version 3.1 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Filmstrip thumbnail support requires server version 3.1 or " + "higher, server is %s" % (self.server_caps.version,) + ) - return self.upload(entity_type, entity_id, path, field_name="filmstrip_thumb_image", **kwargs) + return self.upload( + entity_type, entity_id, path, field_name="filmstrip_thumb_image", **kwargs + ) - def upload(self, entity_type, entity_id, path, field_name=None, display_name=None, - tag_list=None): + def upload( + self, + entity_type, + entity_id, + path, + field_name=None, + display_name=None, + tag_list=None, + ): """ Upload a file to the specified entity. @@ -2434,7 +2588,7 @@ def upload(self, entity_type, entity_id, path, field_name=None, display_name=Non assign tags to the Attachment. .. note:: - Make sure to have retries for file uploads. Failures when uploading will occasionally happen. + Make sure to have retries for file uploads. Failures when uploading will occasionally happen. When it does, immediately retrying to upload usually works >>> mov_file = '/data/show/ne2/100_110/anim/01.mlk-02b.mov' @@ -2477,19 +2631,45 @@ def upload(self, entity_type, entity_id, path, field_name=None, display_name=Non if os.path.getsize(path) == 0: raise ShotgunError("Path cannot be an empty file: '%s'" % path) - is_thumbnail = (field_name in ["thumb_image", "filmstrip_thumb_image", "image", - "filmstrip_image"]) + is_thumbnail = field_name in [ + "thumb_image", + "filmstrip_thumb_image", + "image", + "filmstrip_image", + ] # Supported types can be directly uploaded to Cloud storage if self._requires_direct_s3_upload(entity_type, field_name): - return self._upload_to_storage(entity_type, entity_id, path, field_name, display_name, - tag_list, is_thumbnail) + return self._upload_to_storage( + entity_type, + entity_id, + path, + field_name, + display_name, + tag_list, + is_thumbnail, + ) else: - return self._upload_to_sg(entity_type, entity_id, path, field_name, display_name, - tag_list, is_thumbnail) + return self._upload_to_sg( + entity_type, + entity_id, + path, + field_name, + display_name, + tag_list, + is_thumbnail, + ) - def _upload_to_storage(self, entity_type, entity_id, path, field_name, display_name, - tag_list, is_thumbnail): + def _upload_to_storage( + self, + entity_type, + entity_id, + path, + field_name, + display_name, + tag_list, + is_thumbnail, + ): """ Internal function to upload a file to the Cloud storage and link it to the specified entity. @@ -2509,9 +2689,11 @@ def _upload_to_storage(self, entity_type, entity_id, path, field_name, display_n # Step 1: get the upload url - is_multipart_upload = (os.path.getsize(path) > self._MULTIPART_UPLOAD_CHUNK_SIZE) + is_multipart_upload = os.path.getsize(path) > self._MULTIPART_UPLOAD_CHUNK_SIZE - upload_info = self._get_attachment_upload_info(is_thumbnail, filename, is_multipart_upload) + upload_info = self._get_attachment_upload_info( + is_thumbnail, filename, is_multipart_upload + ) # Step 2: upload the file # We upload large files in multiple parts because it is more robust @@ -2523,13 +2705,21 @@ def _upload_to_storage(self, entity_type, entity_id, path, field_name, display_n # Step 3: create the attachment - url = urllib.parse.urlunparse((self.config.scheme, self.config.server, - "/upload/api_link_file", None, None, None)) + url = urllib.parse.urlunparse( + ( + self.config.scheme, + self.config.server, + "/upload/api_link_file", + None, + None, + None, + ) + ) params = { "entity_type": entity_type, "entity_id": entity_id, - "upload_link_info": upload_info["upload_info"] + "upload_link_info": upload_info["upload_info"], } params.update(self._auth_params()) @@ -2550,17 +2740,26 @@ def _upload_to_storage(self, entity_type, entity_id, path, field_name, display_n result = self._send_form(url, params) if not result.startswith("1"): - raise ShotgunError("Could not upload file successfully, but " - "not sure why.\nPath: %s\nUrl: %s\nError: %s" - % (path, url, result)) + raise ShotgunError( + "Could not upload file successfully, but " + "not sure why.\nPath: %s\nUrl: %s\nError: %s" % (path, url, result) + ) LOG.debug("Attachment linked to content on Cloud storage") attachment_id = int(result.split(":", 2)[1].split("\n", 1)[0]) return attachment_id - def _upload_to_sg(self, entity_type, entity_id, path, field_name, display_name, - tag_list, is_thumbnail): + def _upload_to_sg( + self, + entity_type, + entity_id, + path, + field_name, + display_name, + tag_list, + is_thumbnail, + ): """ Internal function to upload a file to Shotgun and link it to the specified entity. @@ -2585,14 +2784,30 @@ def _upload_to_sg(self, entity_type, entity_id, path, field_name, display_name, params.update(self._auth_params()) if is_thumbnail: - url = urllib.parse.urlunparse((self.config.scheme, self.config.server, - "/upload/publish_thumbnail", None, None, None)) + url = urllib.parse.urlunparse( + ( + self.config.scheme, + self.config.server, + "/upload/publish_thumbnail", + None, + None, + None, + ) + ) params["thumb_image"] = open(path, "rb") if field_name == "filmstrip_thumb_image" or field_name == "filmstrip_image": params["filmstrip"] = True else: - url = urllib.parse.urlunparse((self.config.scheme, self.config.server, - "/upload/upload_file", None, None, None)) + url = urllib.parse.urlunparse( + ( + self.config.scheme, + self.config.server, + "/upload/upload_file", + None, + None, + None, + ) + ) if display_name is None: display_name = os.path.basename(path) # we allow linking to nothing for generic reference use cases @@ -2608,9 +2823,10 @@ def _upload_to_sg(self, entity_type, entity_id, path, field_name, display_name, result = self._send_form(url, params) if not result.startswith("1"): - raise ShotgunError("Could not upload file successfully, but " - "not sure why.\nPath: %s\nUrl: %s\nError: %s" - % (path, url, result)) + raise ShotgunError( + "Could not upload file successfully, but " + "not sure why.\nPath: %s\nUrl: %s\nError: %s" % (path, url, result) + ) attachment_id = int(result.split(":", 2)[1].split("\n", 1)[0]) return attachment_id @@ -2633,21 +2849,22 @@ def _get_attachment_upload_info(self, is_thumbnail, filename, is_multipart_uploa else: upload_type = "Attachment" - params = { - "upload_type": upload_type, - "filename": filename - } + params = {"upload_type": upload_type, "filename": filename} params["multipart_upload"] = is_multipart_upload upload_url = "/upload/api_get_upload_link_info" - url = urllib.parse.urlunparse((self.config.scheme, self.config.server, upload_url, None, None, None)) + url = urllib.parse.urlunparse( + (self.config.scheme, self.config.server, upload_url, None, None, None) + ) upload_info = self._send_form(url, params) if not upload_info.startswith("1"): - raise ShotgunError("Could not get upload_url but " - "not sure why.\nPath: %s\nUrl: %s\nError: %s" - % (filename, url, upload_info)) + raise ShotgunError( + "Could not get upload_url but " + "not sure why.\nPath: %s\nUrl: %s\nError: %s" + % (filename, url, upload_info) + ) LOG.debug("Completed rpc call to %s" % (upload_url)) @@ -2658,7 +2875,7 @@ def _get_attachment_upload_info(self, is_thumbnail, filename, is_multipart_uploa "timestamp": upload_info_parts[2], "upload_type": upload_info_parts[3], "upload_id": upload_info_parts[4], - "upload_info": upload_info + "upload_info": upload_info, } def download_attachment(self, attachment=False, file_path=None, attachment_id=None): @@ -2702,16 +2919,19 @@ def download_attachment(self, attachment=False, file_path=None, attachment_id=No if type(attachment_id) == int: attachment = attachment_id else: - raise TypeError("Missing parameter 'attachment'. Expected a " - "dict, int, NoneType value or" - "an int for parameter attachment_id") + raise TypeError( + "Missing parameter 'attachment'. Expected a " + "dict, int, NoneType value or" + "an int for parameter attachment_id" + ) # write to disk if file_path: try: fp = open(file_path, "wb") except IOError as e: - raise IOError("Unable to write Attachment to disk using " - "file_path. %s" % e) + raise IOError( + "Unable to write Attachment to disk using " "file_path. %s" % e + ) url = self.get_attachment_download_url(attachment) if url is None: @@ -2742,7 +2962,10 @@ def download_attachment(self, attachment=False, file_path=None, attachment_id=No err += "\nAttachment may not exist or is a local file?" elif e.code == 403: # Only parse the body if it is an Amazon S3 url. - if url.find("s3.amazonaws.com") != -1 and e.headers["content-type"] == "application/xml": + if ( + url.find("s3.amazonaws.com") != -1 + and e.headers["content-type"] == "application/xml" + ): body = [sgutils.ensure_text(line) for line in e.readlines()] if body: xml = "".join(body) @@ -2778,8 +3001,24 @@ def get_auth_cookie_handler(self): """ sid = self.get_session_token() cj = http_cookiejar.LWPCookieJar() - c = http_cookiejar.Cookie("0", "_session_id", sid, None, False, self.config.server, False, - False, "/", True, False, None, True, None, None, {}) + c = http_cookiejar.Cookie( + "0", + "_session_id", + sid, + None, + False, + self.config.server, + False, + False, + "/", + True, + False, + None, + True, + None, + None, + {}, + ) cj.set_cookie(c) return urllib.request.HTTPCookieProcessor(cj) @@ -2811,20 +3050,34 @@ def get_attachment_download_url(self, attachment): try: url = attachment["url"] except KeyError: - if ("id" in attachment and "type" in attachment and attachment["type"] == "Attachment"): + if ( + "id" in attachment + and "type" in attachment + and attachment["type"] == "Attachment" + ): attachment_id = attachment["id"] else: raise ValueError("Missing 'url' key in Attachment dict") elif attachment is None: url = None else: - raise TypeError("Unable to determine download url. Expected " - "dict, int, or NoneType. Instead got %s" % type(attachment)) + raise TypeError( + "Unable to determine download url. Expected " + "dict, int, or NoneType. Instead got %s" % type(attachment) + ) if attachment_id: - url = urllib.parse.urlunparse((self.config.scheme, self.config.server, - "/file_serve/attachment/%s" % urllib.parse.quote(str(attachment_id)), - None, None, None)) + url = urllib.parse.urlunparse( + ( + self.config.scheme, + self.config.server, + "/file_serve/attachment/%s" + % urllib.parse.quote(str(attachment_id)), + None, + None, + None, + ) + ) return url def authenticate_human_user(self, user_login, user_password, auth_token=None): @@ -2862,9 +3115,13 @@ def authenticate_human_user(self, user_login, user_password, auth_token=None): self.config.auth_token = auth_token try: - data = self.find_one("HumanUser", [["sg_status_list", "is", "act"], - ["login", "is", user_login]], - ["id", "login"], "", "all") + data = self.find_one( + "HumanUser", + [["sg_status_list", "is", "act"], ["login", "is", user_login]], + ["id", "login"], + "", + "all", + ) # Set back to default - There finally and except cannot be used together in python2.4 self.config.user_login = original_login self.config.user_password = original_password @@ -2902,18 +3159,26 @@ def update_project_last_accessed(self, project, user=None): value from the current instance will be used instead. """ if self.server_caps.version and self.server_caps.version < (5, 3, 20): - raise ShotgunError("update_project_last_accessed requires server version 5.3.20 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "update_project_last_accessed requires server version 5.3.20 or " + "higher, server is %s" % (self.server_caps.version,) + ) if not user: # Try to use sudo as user if present if self.config.sudo_as_login: - user = self.find_one("HumanUser", [["login", "is", self.config.sudo_as_login]]) + user = self.find_one( + "HumanUser", [["login", "is", self.config.sudo_as_login]] + ) # Try to use login if present if self.config.user_login: - user = self.find_one("HumanUser", [["login", "is", self.config.user_login]]) + user = self.find_one( + "HumanUser", [["login", "is", self.config.user_login]] + ) - params = {"project_id": project["id"], } + params = { + "project_id": project["id"], + } if user: params["user_id"] = user["id"] @@ -2979,8 +3244,10 @@ def note_thread_read(self, note_id, entity_fields=None): """ if self.server_caps.version and self.server_caps.version < (6, 2, 0): - raise ShotgunError("note_thread requires server version 6.2.0 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "note_thread requires server version 6.2.0 or " + "higher, server is %s" % (self.server_caps.version,) + ) entity_fields = entity_fields or {} @@ -3050,8 +3317,10 @@ def text_search(self, text, entity_types, project_ids=None, limit=None): :rtype: dict """ if self.server_caps.version and self.server_caps.version < (6, 2, 0): - raise ShotgunError("auto_complete requires server version 6.2.0 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "auto_complete requires server version 6.2.0 or " + "higher, server is %s" % (self.server_caps.version,) + ) # convert entity_types structure into the form # that the API endpoint expects @@ -3059,28 +3328,39 @@ def text_search(self, text, entity_types, project_ids=None, limit=None): raise ValueError("entity_types parameter must be a dictionary") api_entity_types = {} - for (entity_type, filter_list) in six.iteritems(entity_types): + for entity_type, filter_list in six.iteritems(entity_types): if isinstance(filter_list, (list, tuple)): resolved_filters = _translate_filters(filter_list, filter_operator=None) api_entity_types[entity_type] = resolved_filters else: - raise ValueError("value of entity_types['%s'] must " - "be a list or tuple." % entity_type) + raise ValueError( + "value of entity_types['%s'] must " + "be a list or tuple." % entity_type + ) project_ids = project_ids or [] - params = {"text": text, - "entity_types": api_entity_types, - "project_ids": project_ids, - "max_results": limit} + params = { + "text": text, + "entity_types": api_entity_types, + "project_ids": project_ids, + "max_results": limit, + } record = self._call_rpc("query_display_name_cache", params) result = self._parse_records(record)[0] return result - def activity_stream_read(self, entity_type, entity_id, entity_fields=None, min_id=None, - max_id=None, limit=None): + def activity_stream_read( + self, + entity_type, + entity_id, + entity_fields=None, + min_id=None, + max_id=None, + limit=None, + ): """ Retrieve activity stream data from Shotgun. @@ -3146,8 +3426,10 @@ def activity_stream_read(self, entity_type, entity_id, entity_fields=None, min_i :rtype: dict """ if self.server_caps.version and self.server_caps.version < (6, 2, 0): - raise ShotgunError("activity_stream requires server version 6.2.0 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "activity_stream requires server version 6.2.0 or " + "higher, server is %s" % (self.server_caps.version,) + ) # set up parameters to send to server. entity_fields = entity_fields or {} @@ -3155,12 +3437,14 @@ def activity_stream_read(self, entity_type, entity_id, entity_fields=None, min_i if not isinstance(entity_fields, dict): raise ValueError("entity_fields parameter must be a dictionary") - params = {"type": entity_type, - "id": entity_id, - "max_id": max_id, - "min_id": min_id, - "limit": limit, - "entity_fields": entity_fields} + params = { + "type": entity_type, + "id": entity_id, + "max_id": max_id, + "min_id": min_id, + "limit": limit, + "entity_fields": entity_fields, + } record = self._call_rpc("activity_stream", params) result = self._parse_records(record)[0] @@ -3182,8 +3466,8 @@ def nav_expand(self, path, seed_entity_field=None, entity_fields=None): { "path": path, "seed_entity_field": seed_entity_field, - "entity_fields": entity_fields - } + "entity_fields": entity_fields, + }, ) def nav_search_string(self, root_path, search_string, seed_entity_field=None): @@ -3201,8 +3485,8 @@ def nav_search_string(self, root_path, search_string, seed_entity_field=None): { "root_path": root_path, "seed_entity_field": seed_entity_field, - "search_criteria": {"search_string": search_string} - } + "search_criteria": {"search_string": search_string}, + }, ) def nav_search_entity(self, root_path, entity, seed_entity_field=None): @@ -3221,8 +3505,8 @@ def nav_search_entity(self, root_path, entity, seed_entity_field=None): { "root_path": root_path, "seed_entity_field": seed_entity_field, - "search_criteria": {"entity": entity} - } + "search_criteria": {"entity": entity}, + }, ) def get_session_token(self): @@ -3263,8 +3547,10 @@ def preferences_read(self, prefs=None): :rtype: dict """ if self.server_caps.version and self.server_caps.version < (7, 10, 0): - raise ShotgunError("preferences_read requires server version 7.10.0 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "preferences_read requires server version 7.10.0 or " + "higher, server is %s" % (self.server_caps.version,) + ) prefs = prefs or [] @@ -3297,10 +3583,7 @@ def user_subscriptions_create(self, users): :rtype: bool """ - response = self._call_rpc( - "user_subscriptions_create", - {"users": users} - ) + response = self._call_rpc("user_subscriptions_create", {"users": users}) if not isinstance(response, dict): return False @@ -3390,9 +3673,14 @@ def _turn_off_ssl_validation(self): self.config.no_ssl_validation = True NO_SSL_VALIDATION = True # reset ssl-validation in user-agents - self._user_agents = ["ssl %s (no-validate)" % self.client_caps.ssl_version - if ua.startswith("ssl ") else ua - for ua in self._user_agents] + self._user_agents = [ + ( + "ssl %s (no-validate)" % self.client_caps.ssl_version + if ua.startswith("ssl ") + else ua + ) + for ua in self._user_agents + ] # Deprecated methods from old wrapper def schema(self, entity_type): @@ -3400,7 +3688,9 @@ def schema(self, entity_type): .. deprecated:: 3.0.0 Use :meth:`~shotgun_api3.Shotgun.schema_field_read` instead. """ - raise ShotgunError("Deprecated: use schema_field_read('%s') instead" % entity_type) + raise ShotgunError( + "Deprecated: use schema_field_read('%s') instead" % entity_type + ) def entity_types(self): """ @@ -3408,6 +3698,7 @@ def entity_types(self): Use :meth:`~shotgun_api3.Shotgun.schema_entity_read` instead. """ raise ShotgunError("Deprecated: use schema_entity_read() instead") + # ======================================================================== # RPC Functions @@ -3416,16 +3707,17 @@ def _call_rpc(self, method, params, include_auth_params=True, first=False): Call the specified method on the Shotgun Server sending the supplied payload. """ - LOG.debug("Starting rpc call to %s with params %s" % ( - method, params)) + LOG.debug("Starting rpc call to %s with params %s" % (method, params)) params = self._transform_outbound(params) - payload = self._build_payload(method, params, include_auth_params=include_auth_params) + payload = self._build_payload( + method, params, include_auth_params=include_auth_params + ) encoded_payload = self._encode_payload(payload) req_headers = { "content-type": "application/json; charset=utf-8", - "connection": "keep-alive" + "connection": "keep-alive", } if self.config.localized is True: @@ -3497,8 +3789,10 @@ def _auth_params(self): # Authenticate using session_id elif self.config.session_token: if self.server_caps.version and self.server_caps.version < (5, 3, 0): - raise ShotgunError("Session token based authentication requires server version " - "5.3.0 or higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Session token based authentication requires server version " + "5.3.0 or higher, server is %s" % (self.server_caps.version,) + ) auth_params = {"session_token": str(self.config.session_token)} @@ -3516,8 +3810,10 @@ def _auth_params(self): # Make sure sudo_as_login is supported by server version if self.config.sudo_as_login: if self.server_caps.version and self.server_caps.version < (5, 3, 12): - raise ShotgunError("Option 'sudo_as_login' requires server version 5.3.12 or " - "higher, server is %s" % (self.server_caps.version,)) + raise ShotgunError( + "Option 'sudo_as_login' requires server version 5.3.12 or " + "higher, server is %s" % (self.server_caps.version,) + ) auth_params["sudo_as_login"] = self.config.sudo_as_login if self.config.extra_auth_params: @@ -3552,10 +3848,7 @@ def _build_payload(self, method, params, include_auth_params=True): if params: call_params.append(params) - return { - "method_name": method, - "params": call_params - } + return {"method_name": method, "params": call_params} def _encode_payload(self, payload): """ @@ -3588,7 +3881,7 @@ def _make_call(self, verb, path, body, headers): max_rpc_attempts = self.config.max_rpc_attempts rpc_attempt_interval = self.config.rpc_attempt_interval / 1000.0 - while (attempt < max_rpc_attempts): + while attempt < max_rpc_attempts: attempt += 1 try: return self._http_request(verb, path, body, req_headers) @@ -3624,15 +3917,19 @@ def _make_call(self, verb, path, body, headers): # unknown message digest algorithm # # Any other exceptions simply get raised. - if "unknown message digest algorithm" not in str(e) or \ - "SHOTGUN_FORCE_CERTIFICATE_VALIDATION" in os.environ: + if ( + "unknown message digest algorithm" not in str(e) + or "SHOTGUN_FORCE_CERTIFICATE_VALIDATION" in os.environ + ): raise if self.config.no_ssl_validation is False: - LOG.warning("SSL Error: this Python installation is incompatible with " - "certificates signed with SHA-2. Disabling certificate validation. " - "For more information, see https://www.shotgridsoftware.com/blog/" - "important-ssl-certificate-renewal-and-sha-2/") + LOG.warning( + "SSL Error: this Python installation is incompatible with " + "certificates signed with SHA-2. Disabling certificate validation. " + "For more information, see https://www.shotgridsoftware.com/blog/" + "important-ssl-certificate-renewal-and-sha-2/" + ) self._turn_off_ssl_validation() # reload user agent to reflect that we have turned off ssl validation req_headers["user-agent"] = "; ".join(self._user_agents) @@ -3648,8 +3945,8 @@ def _make_call(self, verb, path, body, headers): raise LOG.debug( - "Request failed, attempt %d of %d. Retrying in %.2f seconds..." % - (attempt, max_rpc_attempts, rpc_attempt_interval) + "Request failed, attempt %d of %d. Retrying in %.2f seconds..." + % (attempt, max_rpc_attempts, rpc_attempt_interval) ) time.sleep(rpc_attempt_interval) @@ -3657,7 +3954,9 @@ def _http_request(self, verb, path, body, headers): """ Make the actual HTTP request. """ - url = urllib.parse.urlunparse((self.config.scheme, self.config.server, path, None, None, None)) + url = urllib.parse.urlunparse( + (self.config.scheme, self.config.server, path, None, None, None) + ) LOG.debug("Request is %s:%s" % (verb, url)) LOG.debug("Request headers are %s" % headers) LOG.debug("Request body is %s" % body) @@ -3666,10 +3965,7 @@ def _http_request(self, verb, path, body, headers): resp, content = conn.request(url, method=verb, body=body, headers=headers) # http response code is handled else where http_status = (resp.status, resp.reason) - resp_headers = dict( - (k.lower(), v) - for k, v in six.iteritems(resp) - ) + resp_headers = dict((k.lower(), v) for k, v in six.iteritems(resp)) resp_body = content LOG.debug("Response status is %s %s" % http_status) @@ -3704,10 +4000,7 @@ def _parse_http_status(self, status): headers = "HTTP error from server" if status[0] == 503: errmsg = "Flow Production Tracking is currently down for maintenance or too busy to reply. Please try again later." - raise ProtocolError(self.config.server, - error_code, - errmsg, - headers) + raise ProtocolError(self.config.server, error_code, errmsg, headers) return @@ -3739,6 +4032,7 @@ def _json_loads_ascii(self, body): """ See http://stackoverflow.com/questions/956867 """ + def _decode_list(lst): newlist = [] for i in lst: @@ -3760,6 +4054,7 @@ def _decode_dict(dct): v = _decode_list(v) newdict[k] = v return newdict + return json.loads(body, object_hook=_decode_dict) def _response_errors(self, sg_response): @@ -3780,21 +4075,28 @@ def _response_errors(self, sg_response): if isinstance(sg_response, dict) and sg_response.get("exception"): if sg_response.get("error_code") == ERR_AUTH: - raise AuthenticationFault(sg_response.get("message", "Unknown Authentication Error")) + raise AuthenticationFault( + sg_response.get("message", "Unknown Authentication Error") + ) elif sg_response.get("error_code") == ERR_2FA: raise MissingTwoFactorAuthenticationFault( sg_response.get("message", "Unknown 2FA Authentication Error") ) elif sg_response.get("error_code") == ERR_SSO: raise UserCredentialsNotAllowedForSSOAuthenticationFault( - sg_response.get("message", - "Authentication using username/password is not " - "allowed for an SSO-enabled Flow Production Tracking site") + sg_response.get( + "message", + "Authentication using username/password is not " + "allowed for an SSO-enabled Flow Production Tracking site", + ) ) elif sg_response.get("error_code") == ERR_OXYG: raise UserCredentialsNotAllowedForOxygenAuthenticationFault( - sg_response.get("message", "Authentication using username/password is not " - "allowed for an Autodesk Identity enabled Flow Production Tracking site") + sg_response.get( + "message", + "Authentication using username/password is not " + "allowed for an Autodesk Identity enabled Flow Production Tracking site", + ) ) else: # raise general Fault @@ -3817,10 +4119,7 @@ def _visit_data(self, data, visitor): return tuple(recursive(i, visitor) for i in data) if isinstance(data, dict): - return dict( - (k, recursive(v, visitor)) - for k, v in six.iteritems(data) - ) + return dict((k, recursive(v, visitor)) for k, v in six.iteritems(data)) return visitor(data) @@ -3833,10 +4132,12 @@ def _transform_outbound(self, data): """ if self.config.convert_datetimes_to_utc: + def _change_tz(value): if value.tzinfo is None: value = value.replace(tzinfo=SG_TIMEZONE.local) return value.astimezone(SG_TIMEZONE.utc) + else: _change_tz = None @@ -3859,7 +4160,7 @@ def _outbound_visitor(value): hour=value.hour, minute=value.minute, second=value.second, - microsecond=value.microsecond + microsecond=value.microsecond, ) if _change_tz: value = _change_tz(value) @@ -3881,8 +4182,10 @@ def _transform_inbound(self, data): # to the local time, otherwise it will fail to compare to datetimes # that do not have a time zone. if self.config.convert_datetimes_to_utc: + def _change_tz(x): return x.replace(tzinfo=SG_TIMEZONE.utc).astimezone(SG_TIMEZONE.local) + else: _change_tz = None @@ -3892,7 +4195,8 @@ def _inbound_visitor(value): try: # strptime was not on datetime in python2.4 value = datetime.datetime( - *time.strptime(value, "%Y-%m-%dT%H:%M:%SZ")[:6]) + *time.strptime(value, "%Y-%m-%dT%H:%M:%SZ")[:6] + ) except ValueError: return value if _change_tz: @@ -3914,14 +4218,26 @@ def _get_connection(self): return self._connection if self.config.proxy_server: - pi = ProxyInfo(socks.PROXY_TYPE_HTTP, self.config.proxy_server, - self.config.proxy_port, proxy_user=self.config.proxy_user, - proxy_pass=self.config.proxy_pass) - self._connection = Http(timeout=self.config.timeout_secs, ca_certs=self.__ca_certs, - proxy_info=pi, disable_ssl_certificate_validation=self.config.no_ssl_validation) + pi = ProxyInfo( + socks.PROXY_TYPE_HTTP, + self.config.proxy_server, + self.config.proxy_port, + proxy_user=self.config.proxy_user, + proxy_pass=self.config.proxy_pass, + ) + self._connection = Http( + timeout=self.config.timeout_secs, + ca_certs=self.__ca_certs, + proxy_info=pi, + disable_ssl_certificate_validation=self.config.no_ssl_validation, + ) else: - self._connection = Http(timeout=self.config.timeout_secs, ca_certs=self.__ca_certs, - proxy_info=None, disable_ssl_certificate_validation=self.config.no_ssl_validation) + self._connection = Http( + timeout=self.config.timeout_secs, + ca_certs=self.__ca_certs, + proxy_info=None, + disable_ssl_certificate_validation=self.config.no_ssl_validation, + ) return self._connection @@ -3940,6 +4256,7 @@ def _close_connection(self): self._connection.connections.clear() self._connection = None return + # ======================================================================== # Utility @@ -3961,7 +4278,9 @@ def _parse_records(self, records): return [] if not isinstance(records, (list, tuple)): - records = [records, ] + records = [ + records, + ] for rec in records: # skip results that aren't entity dictionaries @@ -3978,11 +4297,19 @@ def _parse_records(self, records): rec[k] = rec[k].replace("<", "<") # check for thumbnail for older version (<3.3.0) of shotgun - if k == "image" and self.server_caps.version and self.server_caps.version < (3, 3, 0): + if ( + k == "image" + and self.server_caps.version + and self.server_caps.version < (3, 3, 0) + ): rec["image"] = self._build_thumb_url(rec["type"], rec["id"]) continue - if isinstance(v, dict) and v.get("link_type") == "local" and self.client_caps.local_path_field in v: + if ( + isinstance(v, dict) + and v.get("link_type") == "local" + and self.client_caps.local_path_field in v + ): local_path = v[self.client_caps.local_path_field] v["local_path"] = local_path v["url"] = "file://%s" % (local_path or "",) @@ -4003,10 +4330,14 @@ def _build_thumb_url(self, entity_type, entity_id): # curl "https://foo.com/upload/get_thumbnail_url?entity_type=Version&entity_id=1" # 1 # /files/0000/0000/0012/232/shot_thumb.jpg.jpg - entity_info = {"e_type": urllib.parse.quote(entity_type), - "e_id": urllib.parse.quote(str(entity_id))} - url = ("/upload/get_thumbnail_url?" + - "entity_type=%(e_type)s&entity_id=%(e_id)s" % entity_info) + entity_info = { + "e_type": urllib.parse.quote(entity_type), + "e_id": urllib.parse.quote(str(entity_id)), + } + url = ( + "/upload/get_thumbnail_url?" + + "entity_type=%(e_type)s&entity_id=%(e_id)s" % entity_info + ) body = self._make_call("GET", url, None, None)[2] @@ -4018,15 +4349,23 @@ def _build_thumb_url(self, entity_type, entity_id): raise ShotgunError(thumb_url) if code == 1: - return urllib.parse.urlunparse((self.config.scheme, - self.config.server, - thumb_url.strip(), - None, None, None)) + return urllib.parse.urlunparse( + ( + self.config.scheme, + self.config.server, + thumb_url.strip(), + None, + None, + None, + ) + ) # Comments in prev version said we can get this sometimes. raise RuntimeError("Unknown code %s %s" % (code, thumb_url)) - def _dict_to_list(self, d, key_name="field_name", value_name="value", extra_data=None): + def _dict_to_list( + self, d, key_name="field_name", value_name="value", extra_data=None + ): """ Utility function to convert a dict into a list dicts using the key_name and value_name keys. @@ -4098,8 +4437,14 @@ def _multipart_upload_file_to_storage(self, path, upload_info): # encoded. data = BytesIO(data) bytes_read += data_size - part_url = self._get_upload_part_link(upload_info, filename, part_number) - etags.append(self._upload_data_to_storage(data, content_type, data_size, part_url)) + part_url = self._get_upload_part_link( + upload_info, filename, part_number + ) + etags.append( + self._upload_data_to_storage( + data, content_type, data_size, part_url + ) + ) part_number += 1 self._complete_multipart_upload(upload_info, filename, etags) @@ -4124,11 +4469,19 @@ def _get_upload_part_link(self, upload_info, filename, part_number): "filename": filename, "timestamp": upload_info["timestamp"], "upload_id": upload_info["upload_id"], - "part_number": part_number + "part_number": part_number, } - url = urllib.parse.urlunparse((self.config.scheme, self.config.server, - "/upload/api_get_upload_link_for_part", None, None, None)) + url = urllib.parse.urlunparse( + ( + self.config.scheme, + self.config.server, + "/upload/api_get_upload_link_for_part", + None, + None, + None, + ) + ) result = self._send_form(url, params) # Response is of the form: 1\n (for success) or 0\n (for failure). @@ -4172,9 +4525,15 @@ def _upload_data_to_storage(self, data, content_type, size, storage_url): attempt += 1 continue elif e.code in [500, 503]: - raise ShotgunError("Got a %s response when uploading to %s: %s" % (e.code, storage_url, e)) + raise ShotgunError( + "Got a %s response when uploading to %s: %s" + % (e.code, storage_url, e) + ) else: - raise ShotgunError("Unanticipated error occurred uploading to %s: %s" % (storage_url, e)) + raise ShotgunError( + "Unanticipated error occurred uploading to %s: %s" + % (storage_url, e) + ) except urllib.error.URLError as e: LOG.debug("Got a '%s' response. Waiting and retrying..." % e) time.sleep(float(attempt) * self.BACKOFF) @@ -4203,11 +4562,19 @@ def _complete_multipart_upload(self, upload_info, filename, etags): "filename": filename, "timestamp": upload_info["timestamp"], "upload_id": upload_info["upload_id"], - "etags": ",".join(etags) + "etags": ",".join(etags), } - url = urllib.parse.urlunparse((self.config.scheme, self.config.server, - "/upload/api_complete_multipart_upload", None, None, None)) + url = urllib.parse.urlunparse( + ( + self.config.scheme, + self.config.server, + "/upload/api_complete_multipart_upload", + None, + None, + None, + ) + ) result = self._send_form(url, params) # Response is of the form: 1\n or 0\n to indicate success or failure of the call. @@ -4283,8 +4650,11 @@ def _send_form(self, url, params): continue except urllib.error.HTTPError as e: if e.code == 500: - raise ShotgunError("Server encountered an internal error. " - "\n%s\n(%s)\n%s\n\n" % (url, self._sanitize_auth_params(params), e)) + raise ShotgunError( + "Server encountered an internal error. " + "\n%s\n(%s)\n%s\n\n" + % (url, self._sanitize_auth_params(params), e) + ) else: raise ShotgunError("Unanticipated error occurred %s" % (e)) @@ -4294,7 +4664,7 @@ def _send_form(self, url, params): class CACertsHTTPSConnection(http_client.HTTPConnection): - """" + """ " This class allows to create an HTTPS connection that uses the custom certificates passed in. """ @@ -4324,9 +4694,7 @@ def connect(self): self.sock = context.wrap_socket(self.sock) else: self.sock = ssl.wrap_socket( - self.sock, - ca_certs=self.__ca_certs, - cert_reqs=ssl.CERT_REQUIRED + self.sock, ca_certs=self.__ca_certs, cert_reqs=ssl.CERT_REQUIRED ) @@ -4352,6 +4720,7 @@ class FormPostHandler(urllib.request.BaseHandler): """ Handler for multipart form data """ + handler_order = urllib.request.HTTPHandler.handler_order - 10 # needs to run first def http_request(self, request): @@ -4370,7 +4739,9 @@ def http_request(self, request): else: params.append((key, value)) if not files: - data = sgutils.ensure_binary(urllib.parse.urlencode(params, True)) # sequencing on + data = sgutils.ensure_binary( + urllib.parse.urlencode(params, True) + ) # sequencing on else: boundary, data = self.encode(params, files) content_type = "multipart/form-data; boundary=%s" % boundary @@ -4392,7 +4763,7 @@ def encode(self, params, files, boundary=None, buffer=None): boundary = uuid.uuid4() if buffer is None: buffer = BytesIO() - for (key, value) in params: + for key, value in params: if not isinstance(value, str): # If value is not a string (e.g. int) cast to text value = str(value) @@ -4400,9 +4771,11 @@ def encode(self, params, files, boundary=None, buffer=None): key = sgutils.ensure_text(key) buffer.write(sgutils.ensure_binary("--%s\r\n" % boundary)) - buffer.write(sgutils.ensure_binary("Content-Disposition: form-data; name=\"%s\"" % key)) + buffer.write( + sgutils.ensure_binary('Content-Disposition: form-data; name="%s"' % key) + ) buffer.write(sgutils.ensure_binary("\r\n\r\n%s\r\n" % value)) - for (key, fd) in files: + for key, fd in files: # On Windows, it's possible that we were forced to open a file # with non-ascii characters as unicode. In that case, we need to # encode it as a utf-8 string to remove unicode from the equation. @@ -4416,7 +4789,7 @@ def encode(self, params, files, boundary=None, buffer=None): content_type = content_type or "application/octet-stream" file_size = os.fstat(fd.fileno())[stat.ST_SIZE] buffer.write(sgutils.ensure_binary("--%s\r\n" % boundary)) - c_dis = "Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"%s" + c_dis = 'Content-Disposition: form-data; name="%s"; filename="%s"%s' content_disposition = c_dis % (key, filename, "\r\n") buffer.write(sgutils.ensure_binary(content_disposition)) buffer.write(sgutils.ensure_binary("Content-Type: %s\r\n" % content_type)) @@ -4438,10 +4811,7 @@ def _translate_filters(filters, filter_operator): """ Translate filters params into data structure expected by rpc call. """ - wrapped_filters = { - "filter_operator": filter_operator or "all", - "filters": filters - } + wrapped_filters = {"filter_operator": filter_operator or "all", "filters": filters} return _translate_filters_dict(wrapped_filters) @@ -4458,8 +4828,9 @@ def _translate_filters_dict(sg_filter): raise ShotgunError("Invalid filter_operator %s" % filter_operator) if not isinstance(sg_filter["filters"], (list, tuple)): - raise ShotgunError("Invalid filters, expected a list or a tuple, got %s" - % sg_filter["filters"]) + raise ShotgunError( + "Invalid filters, expected a list or a tuple, got %s" % sg_filter["filters"] + ) new_filters["conditions"] = _translate_filters_list(sg_filter["filters"]) @@ -4475,17 +4846,15 @@ def _translate_filters_list(filters): elif isinstance(sg_filter, dict): conditions.append(_translate_filters_dict(sg_filter)) else: - raise ShotgunError("Invalid filters, expected a list, tuple or dict, got %s" - % sg_filter) + raise ShotgunError( + "Invalid filters, expected a list, tuple or dict, got %s" % sg_filter + ) return conditions def _translate_filters_simple(sg_filter): - condition = { - "path": sg_filter[0], - "relation": sg_filter[1] - } + condition = {"path": sg_filter[0], "relation": sg_filter[1]} values = sg_filter[2:] if len(values) == 1 and isinstance(values[0], (list, tuple)): @@ -4523,7 +4892,7 @@ def _get_type_and_id_from_value(value): if isinstance(value, dict): return {"type": value["type"], "id": value["id"]} elif isinstance(value, list): - return [{"type": v["type"], "id": v["id"]} for v in value] + return [{"type": v["type"], "id": v["id"]} for v in value] except (KeyError, TypeError): LOG.debug(f"Could not optimize entity value {value}") diff --git a/tests/base.py b/tests/base.py index 4eaafb867..2820d495d 100644 --- a/tests/base.py +++ b/tests/base.py @@ -1,4 +1,5 @@ """Base class for Flow Production Tracking API tests.""" + import contextlib import os import random @@ -33,9 +34,9 @@ def skip(f): class TestBase(unittest.TestCase): - '''Base class for tests. + """Base class for tests. - Sets up mocking and database test data.''' + Sets up mocking and database test data.""" human_user = None project = None @@ -74,7 +75,7 @@ def setUpClass(cls): script_name=cls.config.script_name, api_key=cls.config.api_key ) - def setUp(self, auth_mode='ApiUser'): + def setUp(self, auth_mode="ApiUser"): # When running the tests from a pull request from a client, the Shotgun # site URL won't be set, so do not attempt to run the test. if not self.config.server_url: @@ -88,30 +89,38 @@ def setUp(self, auth_mode='ApiUser'): self.http_proxy = self.config.http_proxy self.session_uuid = self.config.session_uuid - if auth_mode == 'ApiUser': - self.sg = api.Shotgun(self.config.server_url, - self.config.script_name, - self.config.api_key, - http_proxy=self.config.http_proxy, - connect=self.connect) - elif auth_mode == 'HumanUser': - self.sg = api.Shotgun(self.config.server_url, - login=self.human_login, - password=self.human_password, - http_proxy=self.config.http_proxy, - connect=self.connect) - elif auth_mode == 'SessionToken': + if auth_mode == "ApiUser": + self.sg = api.Shotgun( + self.config.server_url, + self.config.script_name, + self.config.api_key, + http_proxy=self.config.http_proxy, + connect=self.connect, + ) + elif auth_mode == "HumanUser": + self.sg = api.Shotgun( + self.config.server_url, + login=self.human_login, + password=self.human_password, + http_proxy=self.config.http_proxy, + connect=self.connect, + ) + elif auth_mode == "SessionToken": # first make an instance based on script key/name so # we can generate a session token - sg = api.Shotgun(self.config.server_url, - http_proxy=self.config.http_proxy, - **self.auth_args) + sg = api.Shotgun( + self.config.server_url, + http_proxy=self.config.http_proxy, + **self.auth_args, + ) self.session_token = sg.get_session_token() # now log in using session token - self.sg = api.Shotgun(self.config.server_url, - session_token=self.session_token, - http_proxy=self.config.http_proxy, - connect=self.connect) + self.sg = api.Shotgun( + self.config.server_url, + session_token=self.session_token, + http_proxy=self.config.http_proxy, + connect=self.connect, + ) else: raise ValueError("Unknown value for auth_mode: %s" % auth_mode) @@ -123,7 +132,7 @@ def tearDown(self): class MockTestBase(TestBase): - '''Test base for tests mocking server interactions.''' + """Test base for tests mocking server interactions.""" def setUp(self): super(MockTestBase, self).setUp() @@ -132,23 +141,25 @@ def setUp(self): self._setup_mock_data() def _setup_mock(self, s3_status_code_error=503): - """Setup mocking on the ShotgunClient to stop it calling a live server - """ + """Setup mocking on the ShotgunClient to stop it calling a live server""" # Replace the function used to make the final call to the server # eaiser than mocking the http connection + response - self.sg._http_request = mock.Mock(spec=api.Shotgun._http_request, - return_value=((200, "OK"), {}, None)) + self.sg._http_request = mock.Mock( + spec=api.Shotgun._http_request, return_value=((200, "OK"), {}, None) + ) # Replace the function used to make the final call to the S3 server, and simulate # the exception HTTPError raised with 503 status errors - self.sg._make_upload_request = mock.Mock(spec=api.Shotgun._make_upload_request, - side_effect = urllib.error.HTTPError( - "url", - s3_status_code_error, - "The server is currently down or to busy to reply." - "Please try again later.", - {}, - None - )) + self.sg._make_upload_request = mock.Mock( + spec=api.Shotgun._make_upload_request, + side_effect=urllib.error.HTTPError( + "url", + s3_status_code_error, + "The server is currently down or to busy to reply." + "Please try again later.", + {}, + None, + ), + ) # also replace the function that is called to get the http connection # to avoid calling the server. OK to return a mock as we will not use # it @@ -160,8 +171,9 @@ def _setup_mock(self, s3_status_code_error=503): self.sg._get_connection = mock.Mock(return_value=self.mock_conn) # create the server caps directly to say we have the correct version - self.sg._server_caps = ServerCapabilities(self.sg.config.server, - {"version": [2, 4, 0]}) + self.sg._server_caps = ServerCapabilities( + self.sg.config.server, {"version": [2, 4, 0]} + ) # prevent waiting for backoff self.sg.BACKOFF = 0 @@ -177,24 +189,22 @@ def _mock_http(self, data, headers=None, status=None): if not isinstance(data, str): if six.PY2: - data = json.dumps( - data, - ensure_ascii=False, - encoding="utf-8" - ) + data = json.dumps(data, ensure_ascii=False, encoding="utf-8") else: data = json.dumps( data, ensure_ascii=False, ) - resp_headers = {'cache-control': 'no-cache', - 'connection': 'close', - 'content-length': (data and str(len(data))) or 0, - 'content-type': 'application/json; charset=utf-8', - 'date': 'Wed, 13 Apr 2011 04:18:58 GMT', - 'server': 'Apache/2.2.3 (CentOS)', - 'status': '200 OK'} + resp_headers = { + "cache-control": "no-cache", + "connection": "close", + "content-length": (data and str(len(data))) or 0, + "content-type": "application/json; charset=utf-8", + "date": "Wed, 13 Apr 2011 04:18:58 GMT", + "server": "Apache/2.2.3 (CentOS)", + "status": "200 OK", + } if headers: resp_headers.update(headers) @@ -220,42 +230,40 @@ def _assert_http_method(self, method, params, check_auth=True): self.assertEqual(self.api_key, auth["script_key"]) if params: - rpc_args = arg_params[len(arg_params)-1] + rpc_args = arg_params[len(arg_params) - 1] self.assertEqual(params, rpc_args) def _setup_mock_data(self): - self.human_user = {'id': 1, - 'login': self.config.human_login, - 'type': 'HumanUser'} - self.project = {'id': 2, - 'name': self.config.project_name, - 'type': 'Project'} - self.shot = {'id': 3, - 'code': self.config.shot_code, - 'type': 'Shot'} - self.asset = {'id': 4, - 'code': self.config.asset_code, - 'type': 'Asset'} - self.version = {'id': 5, - 'code': self.config.version_code, - 'type': 'Version'} - self.playlist = {'id': 7, - 'code': self.config.playlist_code, - 'type': 'Playlist'} + self.human_user = { + "id": 1, + "login": self.config.human_login, + "type": "HumanUser", + } + self.project = {"id": 2, "name": self.config.project_name, "type": "Project"} + self.shot = {"id": 3, "code": self.config.shot_code, "type": "Shot"} + self.asset = {"id": 4, "code": self.config.asset_code, "type": "Asset"} + self.version = {"id": 5, "code": self.config.version_code, "type": "Version"} + self.playlist = {"id": 7, "code": self.config.playlist_code, "type": "Playlist"} class LiveTestBase(TestBase): - '''Test base for tests relying on connection to server.''' + """Test base for tests relying on connection to server.""" def setUp(self, auth_mode=None): if not auth_mode: - auth_mode = 'HumanUser' if self.config.jenkins else 'ApiUser' + auth_mode = "HumanUser" if self.config.jenkins else "ApiUser" super(LiveTestBase, self).setUp(auth_mode) - if self.sg.server_caps.version and \ - self.sg.server_caps.version >= (3, 3, 0) and \ - (self.sg.server_caps.host.startswith('0.0.0.0') or - self.sg.server_caps.host.startswith('127.0.0.1')): - self.server_address = re.sub('^0.0.0.0|127.0.0.1', 'localhost', self.sg.server_caps.host) + if ( + self.sg.server_caps.version + and self.sg.server_caps.version >= (3, 3, 0) + and ( + self.sg.server_caps.host.startswith("0.0.0.0") + or self.sg.server_caps.host.startswith("127.0.0.1") + ) + ): + self.server_address = re.sub( + "^0.0.0.0|127.0.0.1", "localhost", self.sg.server_caps.host + ) else: self.server_address = self.sg.server_caps.host @@ -279,73 +287,75 @@ def setUpClass(cls): cls.config.server_url, **cls.auth_args, ) - cls.sg_version = tuple(sg.info()['version'][:3]) + cls.sg_version = tuple(sg.info()["version"][:3]) cls._setup_db(cls.config, sg) @classmethod def _setup_db(cls, config, sg): - data = {'name': cls.config.project_name} - cls.project = _find_or_create_entity(sg, 'Project', data) - - data = {'name': cls.config.human_name, - 'login': cls.config.human_login, - 'password_proxy': cls.config.human_password} + data = {"name": cls.config.project_name} + cls.project = _find_or_create_entity(sg, "Project", data) + + data = { + "name": cls.config.human_name, + "login": cls.config.human_login, + "password_proxy": cls.config.human_password, + } if cls.sg_version >= (3, 0, 0): - data['locked_until'] = None - - cls.human_user = _find_or_create_entity(sg, 'HumanUser', data) - - data = {'code': cls.config.asset_code, - 'project': cls.project} - keys = ['code'] - cls.asset = _find_or_create_entity(sg, 'Asset', data, keys) - - data = {'project': cls.project, - 'code': cls.config.version_code, - 'entity': cls.asset, - 'user': cls.human_user, - 'sg_frames_aspect_ratio': 13.3, - 'frame_count': 33} - keys = ['code', 'project'] - cls.version = _find_or_create_entity(sg, 'Version', data, keys) - - keys = ['code', 'project'] - data = {'code': cls.config.shot_code, - 'project': cls.project} - cls.shot = _find_or_create_entity(sg, 'Shot', data, keys) - - keys = ['project', 'user'] - data = {'project': cls.project, - 'user': cls.human_user, - 'content': 'anything'} - cls.note = _find_or_create_entity(sg, 'Note', data, keys) - - keys = ['code', 'project'] - data = {'project': cls.project, - 'code': cls.config.playlist_code} - cls.playlist = _find_or_create_entity(sg, 'Playlist', data, keys) - - keys = ['code', 'entity_type'] - data = {'code': 'wrapper test step', - 'entity_type': 'Shot'} - cls.step = _find_or_create_entity(sg, 'Step', data, keys) - - keys = ['project', 'entity', 'content'] - data = {'project': cls.project, - 'entity': cls.asset, - 'content': cls.config.task_content, - 'color': 'Black', - 'due_date': '1968-10-13', - 'task_assignees': [cls.human_user], - 'sg_status_list': 'ip'} - cls.task = _find_or_create_entity(sg, 'Task', data, keys) - - keys = ['code'] - data = {'code': 'api wrapper test storage', - 'mac_path': 'nowhere', - 'windows_path': 'nowhere', - 'linux_path': 'nowhere'} - cls.local_storage = _find_or_create_entity(sg, 'LocalStorage', data, keys) + data["locked_until"] = None + + cls.human_user = _find_or_create_entity(sg, "HumanUser", data) + + data = {"code": cls.config.asset_code, "project": cls.project} + keys = ["code"] + cls.asset = _find_or_create_entity(sg, "Asset", data, keys) + + data = { + "project": cls.project, + "code": cls.config.version_code, + "entity": cls.asset, + "user": cls.human_user, + "sg_frames_aspect_ratio": 13.3, + "frame_count": 33, + } + keys = ["code", "project"] + cls.version = _find_or_create_entity(sg, "Version", data, keys) + + keys = ["code", "project"] + data = {"code": cls.config.shot_code, "project": cls.project} + cls.shot = _find_or_create_entity(sg, "Shot", data, keys) + + keys = ["project", "user"] + data = {"project": cls.project, "user": cls.human_user, "content": "anything"} + cls.note = _find_or_create_entity(sg, "Note", data, keys) + + keys = ["code", "project"] + data = {"project": cls.project, "code": cls.config.playlist_code} + cls.playlist = _find_or_create_entity(sg, "Playlist", data, keys) + + keys = ["code", "entity_type"] + data = {"code": "wrapper test step", "entity_type": "Shot"} + cls.step = _find_or_create_entity(sg, "Step", data, keys) + + keys = ["project", "entity", "content"] + data = { + "project": cls.project, + "entity": cls.asset, + "content": cls.config.task_content, + "color": "Black", + "due_date": "1968-10-13", + "task_assignees": [cls.human_user], + "sg_status_list": "ip", + } + cls.task = _find_or_create_entity(sg, "Task", data, keys) + + keys = ["code"] + data = { + "code": "api wrapper test storage", + "mac_path": "nowhere", + "windows_path": "nowhere", + "linux_path": "nowhere", + } + cls.local_storage = _find_or_create_entity(sg, "LocalStorage", data, keys) @contextlib.contextmanager def gen_entity(self, entity_type, **kwargs): @@ -360,7 +370,7 @@ def gen_entity(self, entity_type, **kwargs): if "password_proxy" not in kwargs: kwargs["password_proxy"] = self.config.human_password - item_rnd = random.randrange(100,999) + item_rnd = random.randrange(100, 999) for k in kwargs: if isinstance(kwargs[k], str): kwargs[k] = kwargs[k].format(rnd=item_rnd) @@ -372,13 +382,20 @@ def gen_entity(self, entity_type, **kwargs): rv = self.sg.delete(entity_type, entity["id"]) assert rv == True - def find_one_await_thumbnail(self, entity_type, filters, fields=["image"], thumbnail_field_name="image", **kwargs): + def find_one_await_thumbnail( + self, + entity_type, + filters, + fields=["image"], + thumbnail_field_name="image", + **kwargs + ): attempts = 0 while attempts < THUMBNAIL_MAX_ATTEMPTS: result = self.sg.find_one(entity_type, filters, fields=fields, **kwargs) if TRANSIENT_IMAGE_PATH in result.get(thumbnail_field_name, ""): return result - + time.sleep(THUMBNAIL_RETRY_INTERVAL) attempts += 1 else: @@ -387,43 +404,55 @@ def find_one_await_thumbnail(self, entity_type, filters, fields=["image"], thumb class HumanUserAuthLiveTestBase(LiveTestBase): - ''' + """ Test base for relying on a Shotgun connection authenticate through the configured login/password pair. - ''' + """ def setUp(self): - super(HumanUserAuthLiveTestBase, self).setUp('HumanUser') + super(HumanUserAuthLiveTestBase, self).setUp("HumanUser") class SessionTokenAuthLiveTestBase(LiveTestBase): - ''' + """ Test base for relying on a Shotgun connection authenticate through the configured session_token parameter. - ''' + """ def setUp(self): - super(SessionTokenAuthLiveTestBase, self).setUp('SessionToken') + super(SessionTokenAuthLiveTestBase, self).setUp("SessionToken") class SgTestConfig(object): - '''Reads test config and holds values''' + """Reads test config and holds values""" def __init__(self): for key in self.config_keys(): # Look for any environment variables that match our test # configuration naming of "SG_{KEY}". Default is None. - value = os.environ.get('SG_%s' % (str(key).upper())) - if key in ['mock']: - value = (value is None) or (str(value).lower() in ['true', '1']) + value = os.environ.get("SG_%s" % (str(key).upper())) + if key in ["mock"]: + value = (value is None) or (str(value).lower() in ["true", "1"]) setattr(self, key, value) def config_keys(self): return [ - 'api_key', 'asset_code', 'http_proxy', 'human_login', 'human_name', - 'human_password', 'mock', 'project_name', 'script_name', - 'server_url', 'session_uuid', 'shot_code', 'task_content', - 'version_code', 'playlist_code', 'jenkins' + "api_key", + "asset_code", + "http_proxy", + "human_login", + "human_name", + "human_password", + "mock", + "project_name", + "script_name", + "server_url", + "session_uuid", + "shot_code", + "task_content", + "version_code", + "playlist_code", + "jenkins", ] def read_config(self, config_path): @@ -439,7 +468,7 @@ def read_config(self, config_path): def _find_or_create_entity(sg, entity_type, data, identifyiers=None): - '''Finds or creates entities. + """Finds or creates entities. @params: sg - shogun_json.Shotgun instance entity_type - entity type @@ -447,11 +476,11 @@ def _find_or_create_entity(sg, entity_type, data, identifyiers=None): identifyiers -list of subset of keys from data which should be used to uniquely identity the entity @returns dicitonary of the entity values - ''' - identifyiers = identifyiers or ['name'] + """ + identifyiers = identifyiers or ["name"] fields = list(data.keys()) - filters = [[key, 'is', data[key]] for key in identifyiers] + filters = [[key, "is", data[key]] for key in identifyiers] entity = sg.find_one(entity_type, filters, fields=fields) entity = entity or sg.create(entity_type, data, return_fields=fields) - assert(entity) + assert entity return entity diff --git a/tests/ci_requirements.txt b/tests/ci_requirements.txt index 92189202a..5c2074965 100644 --- a/tests/ci_requirements.txt +++ b/tests/ci_requirements.txt @@ -8,14 +8,14 @@ # agreement to the Shotgun Pipeline Toolkit Source Code License. All rights # not expressly granted therein are reserved by Shotgun Software Inc. +coverage coveralls==1.1 -nose==1.3.7 -nose-exclude==0.5.0 # Don't restrict flake8 version, since we install this in CI against Python 2.6, # where flake8 has discontinued support for newer releases. On Python 2.7 and # Python 3.7, linting has been performed with flake8 3.7.8 flake8 +nose==1.3.7 +nose-exclude==0.5.0 pytest pytest-azurepipelines -coverage pytest-coverage diff --git a/tests/mock.py b/tests/mock.py index 456c02594..736571c64 100644 --- a/tests/mock.py +++ b/tests/mock.py @@ -14,16 +14,16 @@ __all__ = ( - 'Mock', - 'MagicMock', - 'mocksignature', - 'patch', - 'patch_object', - 'sentinel', - 'DEFAULT' + "Mock", + "MagicMock", + "mocksignature", + "patch", + "patch_object", + "sentinel", + "DEFAULT", ) -__version__ = '0.7.0' +__version__ = "0.7.0" __unittest = True @@ -48,8 +48,10 @@ def inner(f): f.__doc__ = original.__doc__ f.__module__ = original.__module__ return f + return inner + try: unicode except NameError: @@ -65,18 +67,19 @@ def inner(f): inPy3k = sys.version_info[0] == 3 if inPy3k: - self = '__self__' + self = "__self__" else: - self = 'im_self' + self = "im_self" # getsignature and mocksignature heavily "inspired" by # the decorator module: http://pypi.python.org/pypi/decorator/ # by Michele Simionato + def _getsignature(func, skipfirst): if inspect is None: - raise ImportError('inspect module not available') + raise ImportError("inspect module not available") if inspect.isclass(func): func = func.__init__ @@ -92,15 +95,16 @@ def _getsignature(func, skipfirst): regargs = regargs[1:] _msg = "_mock_ is a reserved argument name, can't mock signatures using _mock_" - assert '_mock_' not in regargs, _msg + assert "_mock_" not in regargs, _msg if varargs is not None: - assert '_mock_' not in varargs, _msg + assert "_mock_" not in varargs, _msg if varkwargs is not None: - assert '_mock_' not in varkwargs, _msg + assert "_mock_" not in varkwargs, _msg if skipfirst: regargs = regargs[1:] - signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults, - formatvalue=lambda value: "") + signature = inspect.formatargspec( + regargs, varargs, varkwargs, defaults, formatvalue=lambda value: "" + ) return signature[1:-1], func @@ -138,9 +142,7 @@ def mocksignature(func, mock=None, skipfirst=False): if mock is None: mock = Mock() signature, func = _getsignature(func, skipfirst) - src = "lambda %(signature)s: _mock_(%(signature)s)" % { - 'signature': signature - } + src = "lambda %(signature)s: _mock_(%(signature)s)" % {"signature": signature} funcopy = eval(src, dict(_mock_=mock)) _copy_func_details(func, funcopy) @@ -149,11 +151,12 @@ def mocksignature(func, mock=None, skipfirst=False): def _is_magic(name): - return '__%s__' % name[2:-2] == name + return "__%s__" % name[2:-2] == name class SentinelObject(object): "A unique, named, sentinel object." + def __init__(self, name): self.name = name @@ -163,11 +166,12 @@ def __repr__(self): class Sentinel(object): """Access attributes to return a named object, usable as a sentinel.""" + def __init__(self): self._sentinels = {} def __getattr__(self, name): - if name == '__bases__': + if name == "__bases__": # Without this help(mock) raises an exception raise AttributeError return self._sentinels.setdefault(name, SentinelObject(name)) @@ -180,6 +184,8 @@ def __getattr__(self, name): class OldStyleClass: pass + + ClassType = type(OldStyleClass) @@ -241,16 +247,24 @@ class or instance) that acts as the specification for the mock object. If mock. This can be useful for debugging. The name is propagated to child mocks. """ + def __new__(cls, *args, **kw): # every instance has its own class # so we can create magic methods on the # class without stomping on other mocks - new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__}) + new = type(cls.__name__, (cls,), {"__doc__": cls.__doc__}) return object.__new__(new) - - def __init__(self, spec=None, side_effect=None, return_value=DEFAULT, - wraps=None, name=None, spec_set=None, parent=None): + def __init__( + self, + spec=None, + side_effect=None, + return_value=DEFAULT, + wraps=None, + name=None, + spec_set=None, + parent=None, + ): self._parent = parent self._name = name _spec_class = None @@ -275,14 +289,12 @@ def __init__(self, spec=None, side_effect=None, return_value=DEFAULT, self.reset_mock() - @property def __class__(self): if self._spec_class is None: return type(self) return self._spec_class - def reset_mock(self): "Restore the mock object to its initial state." self.called = False @@ -296,7 +308,6 @@ def reset_mock(self): if not self._return_value is self: self._return_value.reset_mock() - def __get_return_value(self): if self._return_value is DEFAULT: self._return_value = self._get_child_mock() @@ -306,9 +317,7 @@ def __set_return_value(self, value): self._return_value = value __return_value_doc = "The value to be returned when the mock is called." - return_value = property(__get_return_value, __set_return_value, - __return_value_doc) - + return_value = property(__get_return_value, __set_return_value, __return_value_doc) def __call__(self, *args, **kwargs): self.called = True @@ -322,14 +331,16 @@ def __call__(self, *args, **kwargs): parent.method_calls.append(callargs((name, args, kwargs))) if parent._parent is None: break - name = parent._name + '.' + name + name = parent._name + "." + name parent = parent._parent ret_val = DEFAULT if self.side_effect is not None: - if (isinstance(self.side_effect, BaseException) or - isinstance(self.side_effect, class_types) and - issubclass(self.side_effect, BaseException)): + if ( + isinstance(self.side_effect, BaseException) + or isinstance(self.side_effect, class_types) + and issubclass(self.side_effect, BaseException) + ): raise self.side_effect ret_val = self.side_effect(*args, **kwargs) @@ -342,9 +353,8 @@ def __call__(self, *args, **kwargs): ret_val = self.return_value return ret_val - def __getattr__(self, name): - if name == '_methods': + if name == "_methods": raise AttributeError(name) elif self._methods is not None: if name not in self._methods or name in _all_magics: @@ -356,49 +366,57 @@ def __getattr__(self, name): wraps = None if self._wraps is not None: wraps = getattr(self._wraps, name) - self._children[name] = self._get_child_mock(parent=self, name=name, wraps=wraps) + self._children[name] = self._get_child_mock( + parent=self, name=name, wraps=wraps + ) return self._children[name] - def __repr__(self): if self._name is None and self._spec_class is None: return object.__repr__(self) - name_string = '' - spec_string = '' + name_string = "" + spec_string = "" if self._name is not None: + def get_name(name): if name is None: - return 'mock' + return "mock" return name + parent = self._parent name = self._name while parent is not None: - name = get_name(parent._name) + '.' + name + name = get_name(parent._name) + "." + name parent = parent._parent - name_string = ' name=%r' % name + name_string = " name=%r" % name if self._spec_class is not None: - spec_string = ' spec=%r' + spec_string = " spec=%r" if self._spec_set: - spec_string = ' spec_set=%r' + spec_string = " spec_set=%r" spec_string = spec_string % self._spec_class.__name__ - return "<%s%s%s id='%s'>" % (type(self).__name__, - name_string, - spec_string, - id(self)) - + return "<%s%s%s id='%s'>" % ( + type(self).__name__, + name_string, + spec_string, + id(self), + ) def __setattr__(self, name, value): - if not 'method_calls' in self.__dict__: + if not "method_calls" in self.__dict__: # allow all attribute setting until initialisation is complete return object.__setattr__(self, name, value) - if (self._spec_set and self._methods is not None and name not in - self._methods and name not in self.__dict__ and - name != 'return_value'): + if ( + self._spec_set + and self._methods is not None + and name not in self._methods + and name not in self.__dict__ + and name != "return_value" + ): raise AttributeError("Mock object has no attribute '%s'" % name) if name in _unsupported_magics: - msg = 'Attempting to set unsupported magic method %r.' % name + msg = "Attempting to set unsupported magic method %r." % name raise AttributeError(msg) elif name in _all_magics: if self._methods is not None and name not in self._methods: @@ -413,13 +431,11 @@ def __setattr__(self, name, value): setattr(type(self), name, value) return object.__setattr__(self, name, value) - def __delattr__(self, name): if name in _all_magics and name in type(self).__dict__: delattr(type(self), name) return object.__delattr__(self, name) - def assert_called_with(self, *args, **kwargs): """ assert that the mock was called with the specified arguments. @@ -428,31 +444,27 @@ def assert_called_with(self, *args, **kwargs): different to the last call to the mock. """ if self.call_args is None: - raise AssertionError('Expected: %s\nNot called' % ((args, kwargs),)) + raise AssertionError("Expected: %s\nNot called" % ((args, kwargs),)) if not self.call_args == (args, kwargs): raise AssertionError( - 'Expected: %s\nCalled with: %s' % ((args, kwargs), self.call_args) + "Expected: %s\nCalled with: %s" % ((args, kwargs), self.call_args) ) - def assert_called_once_with(self, *args, **kwargs): """ assert that the mock was called exactly once and with the specified arguments. """ if not self.call_count == 1: - msg = ("Expected to be called once. Called %s times." % - self.call_count) + msg = "Expected to be called once. Called %s times." % self.call_count raise AssertionError(msg) return self.assert_called_with(*args, **kwargs) - def _get_child_mock(self, **kw): klass = type(self).__mro__[1] return klass(**kw) - class callargs(tuple): """ A tuple for holding the results of a call to a mock, either in the form @@ -465,6 +477,7 @@ class callargs(tuple): callargs('name', (1,), {}) == ('name', (1,)) callargs((), {'a': 'b'}) == ({'a': 'b'},) """ + def __eq__(self, other): if len(self) == 3: if other[0] != self[0]: @@ -499,7 +512,7 @@ def _dot_lookup(thing, comp, import_path): def _importer(target): - components = target.split('.') + components = target.split(".") import_path = components.pop(0) thing = __import__(import_path) @@ -510,8 +523,7 @@ def _importer(target): class _patch(object): - def __init__(self, target, attribute, new, spec, create, - mocksignature, spec_set): + def __init__(self, target, attribute, new, spec, create, mocksignature, spec_set): self.target = target self.attribute = attribute self.new = new @@ -521,11 +533,16 @@ def __init__(self, target, attribute, new, spec, create, self.mocksignature = mocksignature self.spec_set = spec_set - def copy(self): - return _patch(self.target, self.attribute, self.new, self.spec, - self.create, self.mocksignature, self.spec_set) - + return _patch( + self.target, + self.attribute, + self.new, + self.spec, + self.create, + self.mocksignature, + self.spec_set, + ) def __call__(self, func): if isinstance(func, class_types): @@ -533,7 +550,6 @@ def __call__(self, func): else: return self.decorate_callable(func) - def decorate_class(self, klass): for attr in dir(klass): attr_value = getattr(klass, attr) @@ -541,9 +557,8 @@ def decorate_class(self, klass): setattr(klass, attr, self.copy()(attr_value)) return klass - def decorate_callable(self, func): - if hasattr(func, 'patchings'): + if hasattr(func, "patchings"): func.patchings.append(self) return func @@ -559,17 +574,17 @@ def patched(*args, **keywargs): try: return func(*args, **keywargs) finally: - for patching in reversed(getattr(patched, 'patchings', [])): + for patching in reversed(getattr(patched, "patchings", [])): patching.__exit__() patched.patchings = [self] - if hasattr(func, 'func_code'): + if hasattr(func, "func_code"): # not in Python 3 - patched.compat_co_firstlineno = getattr(func, "compat_co_firstlineno", - func.func_code.co_firstlineno) + patched.compat_co_firstlineno = getattr( + func, "compat_co_firstlineno", func.func_code.co_firstlineno + ) return patched - def get_original(self): target = self.target name = self.attribute @@ -588,7 +603,6 @@ def get_original(self): raise AttributeError("%s does not have the attribute %r" % (target, name)) return original, local - def __enter__(self): """Perform the patch.""" new, spec, spec_set = self.new, self.spec, self.spec_set @@ -617,7 +631,6 @@ def __enter__(self): setattr(self.target, self.attribute, new_attr) return new - def __exit__(self, *_): """Undo the patch.""" if self.is_local and self.temp_original is not DEFAULT: @@ -635,8 +648,15 @@ def __exit__(self, *_): stop = __exit__ -def _patch_object(target, attribute, new=DEFAULT, spec=None, create=False, - mocksignature=False, spec_set=None): +def _patch_object( + target, + attribute, + new=DEFAULT, + spec=None, + create=False, + mocksignature=False, + spec_set=None, +): """ patch.object(target, attribute, new=DEFAULT, spec=None, create=False, mocksignature=False, spec_set=None) @@ -647,18 +667,18 @@ def _patch_object(target, attribute, new=DEFAULT, spec=None, create=False, Arguments new, spec, create, mocksignature and spec_set have the same meaning as for patch. """ - return _patch(target, attribute, new, spec, create, mocksignature, - spec_set) + return _patch(target, attribute, new, spec, create, mocksignature, spec_set) def patch_object(*args, **kwargs): "A deprecated form of patch.object(...)" - warnings.warn(('Please use patch.object instead.'), DeprecationWarning, 2) + warnings.warn(("Please use patch.object instead."), DeprecationWarning, 2) return _patch_object(*args, **kwargs) -def patch(target, new=DEFAULT, spec=None, create=False, - mocksignature=False, spec_set=None): +def patch( + target, new=DEFAULT, spec=None, create=False, mocksignature=False, spec_set=None +): """ ``patch`` acts as a function decorator, class decorator or a context manager. Inside the body of the function or with statement, the ``target`` @@ -707,10 +727,9 @@ def patch(target, new=DEFAULT, spec=None, create=False, use-cases. """ try: - target, attribute = target.rsplit('.', 1) + target, attribute = target.rsplit(".", 1) except (TypeError, ValueError): - raise TypeError("Need a valid target to patch. You supplied: %r" % - (target,)) + raise TypeError("Need a valid target to patch. You supplied: %r" % (target,)) target = _importer(target) return _patch(target, attribute, new, spec, create, mocksignature, spec_set) @@ -743,10 +762,10 @@ def __init__(self, in_dict, values=(), clear=False): self.clear = clear self._original = None - def __call__(self, f): if isinstance(f, class_types): return self.decorate_class(f) + @wraps(f) def _inner(*args, **kw): self._patch_dict() @@ -757,7 +776,6 @@ def _inner(*args, **kw): return _inner - def decorate_class(self, klass): for attr in dir(klass): attr_value = getattr(klass, attr) @@ -767,12 +785,10 @@ def decorate_class(self, klass): setattr(klass, attr, decorated) return klass - def __enter__(self): """Patch the dict.""" self._patch_dict() - def _patch_dict(self): """Unpatch the dict.""" values = self.values @@ -799,7 +815,6 @@ def _patch_dict(self): for key in values: in_dict[key] = values[key] - def _unpatch_dict(self): in_dict = self.in_dict original = self._original @@ -812,7 +827,6 @@ def _unpatch_dict(self): for key in original: in_dict[key] = original[key] - def __exit__(self, *args): self._unpatch_dict() return False @@ -846,71 +860,97 @@ def _clear_dict(in_dict): ) numerics = "add sub mul div truediv floordiv mod lshift rshift and xor or pow " -inplace = ' '.join('i%s' % n for n in numerics.split()) -right = ' '.join('r%s' % n for n in numerics.split()) -extra = '' +inplace = " ".join("i%s" % n for n in numerics.split()) +right = " ".join("r%s" % n for n in numerics.split()) +extra = "" if inPy3k: - extra = 'bool next ' + extra = "bool next " else: - extra = 'unicode long nonzero oct hex ' + extra = "unicode long nonzero oct hex " # __truediv__ and __rtruediv__ not available in Python 3 either # not including __prepare__, __instancecheck__, __subclasscheck__ # (as they are metaclass methods) # __del__ is not supported at all as it causes problems if it exists -_non_defaults = set('__%s__' % method for method in [ - 'cmp', 'getslice', 'setslice', 'coerce', 'subclasses', - 'dir', 'format', 'get', 'set', 'delete', 'reversed', - 'missing', 'reduce', 'reduce_ex', 'getinitargs', - 'getnewargs', 'getstate', 'setstate', 'getformat', - 'setformat', 'repr' -]) +_non_defaults = set( + "__%s__" % method + for method in [ + "cmp", + "getslice", + "setslice", + "coerce", + "subclasses", + "dir", + "format", + "get", + "set", + "delete", + "reversed", + "missing", + "reduce", + "reduce_ex", + "getinitargs", + "getnewargs", + "getstate", + "setstate", + "getformat", + "setformat", + "repr", + ] +) def _get_method(name, func): "Turns a callable object (like a mock) into a real function" + def method(self, *args, **kw): return func(self, *args, **kw) + method.__name__ = name return method _magics = set( - '__%s__' % method for method in - ' '.join([magic_methods, numerics, inplace, right, extra]).split() + "__%s__" % method + for method in " ".join([magic_methods, numerics, inplace, right, extra]).split() ) _all_magics = _magics | _non_defaults -_unsupported_magics = set([ - '__getattr__', '__setattr__', - '__init__', '__new__', '__prepare__' - '__instancecheck__', '__subclasscheck__', - '__del__' -]) +_unsupported_magics = set( + [ + "__getattr__", + "__setattr__", + "__init__", + "__new__", + "__prepare__" "__instancecheck__", + "__subclasscheck__", + "__del__", + ] +) _calculate_return_value = { - '__hash__': lambda self: object.__hash__(self), - '__str__': lambda self: object.__str__(self), - '__sizeof__': lambda self: object.__sizeof__(self), - '__unicode__': lambda self: unicode(object.__str__(self)), + "__hash__": lambda self: object.__hash__(self), + "__str__": lambda self: object.__str__(self), + "__sizeof__": lambda self: object.__sizeof__(self), + "__unicode__": lambda self: unicode(object.__str__(self)), } _return_values = { - '__int__': 1, - '__contains__': False, - '__len__': 0, - '__iter__': iter([]), - '__exit__': False, - '__complex__': 1j, - '__float__': 1.0, - '__bool__': True, - '__nonzero__': True, - '__oct__': '1', - '__hex__': '0x1', - '__long__': long(1), - '__index__': 1, + "__int__": 1, + "__contains__": False, + "__len__": 0, + "__iter__": iter([]), + "__exit__": False, + "__complex__": 1j, + "__float__": 1.0, + "__bool__": True, + "__nonzero__": True, + "__oct__": "1", + "__hex__": "0x1", + "__long__": long(1), + "__index__": 1, } @@ -938,6 +978,7 @@ class MagicMock(Mock): Attributes and the return value of a `MagicMock` will also be `MagicMocks`. """ + def __init__(self, *args, **kw): Mock.__init__(self, *args, **kw) diff --git a/tests/test_api.py b/tests/test_api.py index 9fbc7a678..0e611316a 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -45,7 +45,7 @@ class TestShotgunApi(base.LiveTestBase): def setUp(self): super(TestShotgunApi, self).setUp() # give note unicode content - self.sg.update('Note', self.note['id'], {'content': u'La Pe\xf1a'}) + self.sg.update("Note", self.note["id"], {"content": "La Pe\xf1a"}) def test_info(self): """Called info""" @@ -55,9 +55,8 @@ def test_info(self): def test_server_dates(self): """Pass datetimes to the server""" # TODO check results - t = {'project': self.project, - 'start_date': datetime.date.today()} - self.sg.create('Task', t, ['content', 'sg_status_list']) + t = {"project": self.project, "start_date": datetime.date.today()} + self.sg.create("Task", t, ["content", "sg_status_list"]) def test_batch(self): """Batched create, update, delete""" @@ -66,39 +65,30 @@ def test_batch(self): { "request_type": "create", "entity_type": "Shot", - "data": { - "code": "New Shot 5", - "project": self.project - } + "data": {"code": "New Shot 5", "project": self.project}, }, { "request_type": "update", "entity_type": "Shot", - "entity_id": self.shot['id'], - "data": { - "code": "Changed 1" - } - } + "entity_id": self.shot["id"], + "data": {"code": "Changed 1"}, + }, ] new_shot, updated_shot = self.sg.batch(requests) - self.assertEqual(self.shot['id'], updated_shot["id"]) + self.assertEqual(self.shot["id"], updated_shot["id"]) self.assertTrue(new_shot.get("id")) new_shot_id = new_shot["id"] requests = [ - { - "request_type": "delete", - "entity_type": "Shot", - "entity_id": new_shot_id - }, + {"request_type": "delete", "entity_type": "Shot", "entity_id": new_shot_id}, { "request_type": "update", "entity_type": "Shot", - "entity_id": self.shot['id'], - "data": {"code": self.shot['code']} - } + "entity_id": self.shot["id"], + "data": {"code": self.shot["code"]}, + }, ] result = self.sg.batch(requests)[0] @@ -112,12 +102,12 @@ def test_empty_batch(self): def test_create_update_delete(self): """Called create, update, delete, revive""" data = { - 'project': self.project, - 'code': 'JohnnyApple_Design01_FaceFinal', - 'description': 'fixed rig per director final notes', - 'sg_status_list': 'rev', - 'entity': self.asset, - 'user': self.human_user + "project": self.project, + "code": "JohnnyApple_Design01_FaceFinal", + "description": "fixed rig per director final notes", + "sg_status_list": "rev", + "entity": self.asset, + "user": self.human_user, } version = self.sg.create("Version", data, return_fields=["id"]) @@ -126,9 +116,7 @@ def test_create_update_delete(self): # TODO check results more thoroughly # TODO: test returned fields are requested fields - data = data = { - "description": "updated test" - } + data = data = {"description": "updated test"} version = self.sg.update("Version", version["id"], data) self.assertTrue(isinstance(version, dict)) self.assertTrue("id" in version) @@ -144,9 +132,9 @@ def test_create_update_delete(self): self.assertEqual(False, rv) def test_last_accessed(self): - page = self.sg.find('Page', [], fields=['last_accessed'], limit=1) - self.assertEqual("Page", page[0]['type']) - self.assertEqual(datetime.datetime, type(page[0]['last_accessed'])) + page = self.sg.find("Page", [], fields=["last_accessed"], limit=1) + self.assertEqual("Page", page[0]["type"]) + self.assertEqual(datetime.datetime, type(page[0]["last_accessed"])) def test_get_session_token(self): """Got session UUID""" @@ -158,18 +146,23 @@ def test_upload_download(self): """Upload and download an attachment tests""" # upload / download only works against a live server because it does # not use the standard http interface - if 'localhost' in self.server_url: + if "localhost" in self.server_url: print("upload / down tests skipped for localhost") return this_dir, _ = os.path.split(__file__) - path = os.path.abspath(os.path.expanduser( - os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) size = os.stat(path).st_size - attach_id = self.sg.upload("Version", - self.version['id'], path, 'sg_uploaded_movie', - tag_list="monkeys, everywhere, send, help") + attach_id = self.sg.upload( + "Version", + self.version["id"], + path, + "sg_uploaded_movie", + tag_list="monkeys, everywhere, send, help", + ) # test download with attachment_id attach_file = self.sg.download_attachment(attach_id) @@ -186,23 +179,30 @@ def test_upload_download(self): self.assertEqual(orig_file, attach_file) # test download with attachment_id (write to disk) - file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "sg_logo_download.jpg") + file_path = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "sg_logo_download.jpg" + ) result = self.sg.download_attachment(attach_id, file_path=file_path) self.assertEqual(result, file_path) # On windows read may not read to end of file unless opened 'rb' - fp = open(file_path, 'rb') + fp = open(file_path, "rb") attach_file = fp.read() fp.close() self.assertEqual(size, len(attach_file)) self.assertEqual(orig_file, attach_file) # test download with attachment hash - version = self.sg.find_one('Version', [['id', 'is', self.version['id']]], - ['sg_uploaded_movie']) + version = self.sg.find_one( + "Version", [["id", "is", self.version["id"]]], ["sg_uploaded_movie"] + ) # Look for the attachment we just uploaded, the attachments are not returned from latest # to earliest. - attachment = [v for k, v in version["sg_uploaded_movie"].items() if (k, v) == ("id", attach_id)] + attachment = [ + v + for k, v in version["sg_uploaded_movie"].items() + if (k, v) == ("id", attach_id) + ] self.assertEqual(len(attachment), 1) attachment = attachment[0] @@ -213,10 +213,9 @@ def test_upload_download(self): self.assertEqual(orig_file, attach_file) # test download with attachment hash (write to disk) - result = self.sg.download_attachment(attachment, - file_path=file_path) + result = self.sg.download_attachment(attachment, file_path=file_path) self.assertEqual(result, file_path) - fp = open(file_path, 'rb') + fp = open(file_path, "rb") attach_file = fp.read() fp.close() self.assertTrue(attach_file is not None) @@ -225,17 +224,23 @@ def test_upload_download(self): # test invalid requests INVALID_S3_URL = "https://sg-media-usor-01.s3.amazonaws.com/ada3de3ee3873875e1dd44f2eb0882c75ae36a4a/cd31346421dbeef781e0e480f259a3d36652d7f2/IMG_0465.MOV?AWSAccessKeyId=AKIAIQGOBSVN3FSQ5QFA&Expires=1371789959&Signature=SLbzv7DuVlZ8XAoOSQQAiGpF3u8%3D" # noqa - self.assertRaises(shotgun_api3.ShotgunFileDownloadError, - self.sg.download_attachment, - {"url": INVALID_S3_URL}) + self.assertRaises( + shotgun_api3.ShotgunFileDownloadError, + self.sg.download_attachment, + {"url": INVALID_S3_URL}, + ) INVALID_ATTACHMENT_ID = 99999999 - self.assertRaises(shotgun_api3.ShotgunFileDownloadError, - self.sg.download_attachment, - INVALID_ATTACHMENT_ID) - self.assertRaises(TypeError, self.sg.download_attachment, - "/path/to/some/file.jpg") - self.assertRaises(ValueError, self.sg.download_attachment, - {"id": 123, "type": "Shot"}) + self.assertRaises( + shotgun_api3.ShotgunFileDownloadError, + self.sg.download_attachment, + INVALID_ATTACHMENT_ID, + ) + self.assertRaises( + TypeError, self.sg.download_attachment, "/path/to/some/file.jpg" + ) + self.assertRaises( + ValueError, self.sg.download_attachment, {"id": 123, "type": "Shot"} + ) self.assertRaises(TypeError, self.sg.download_attachment) # test upload of non-ascii, unicode path @@ -251,10 +256,10 @@ def test_upload_download(self): # us up the way it used to. self.sg.upload( "Version", - self.version['id'], + self.version["id"], u_path, - 'sg_uploaded_movie', - tag_list="monkeys, everywhere, send, help" + "sg_uploaded_movie", + tag_list="monkeys, everywhere, send, help", ) # Also make sure that we can pass in a utf-8 encoded string path @@ -263,10 +268,10 @@ def test_upload_download(self): # situation as well as OS X and Linux. self.sg.upload( "Version", - self.version['id'], + self.version["id"], u_path.encode("utf-8"), - 'sg_uploaded_movie', - tag_list="monkeys, everywhere, send, help" + "sg_uploaded_movie", + tag_list="monkeys, everywhere, send, help", ) if six.PY2: # In Python2, make sure that non-utf-8 encoded paths raise when they @@ -280,26 +285,28 @@ def test_upload_download(self): file_path_s = os.path.join(this_dir, "./\xe3\x81\x94.shift-jis") file_path_u = file_path_s.decode("utf-8") - with open(file_path_u if sys.platform.startswith("win") else file_path_s, "w") as fh: + with open( + file_path_u if sys.platform.startswith("win") else file_path_s, "w" + ) as fh: fh.write("This is just a test file with some random data in it.") self.assertRaises( shotgun_api3.ShotgunError, self.sg.upload, "Version", - self.version['id'], + self.version["id"], file_path_u.encode("shift-jis"), - 'sg_uploaded_movie', - tag_list="monkeys, everywhere, send, help" + "sg_uploaded_movie", + tag_list="monkeys, everywhere, send, help", ) # But it should work in all cases if a unicode string is used. self.sg.upload( "Version", - self.version['id'], + self.version["id"], file_path_u, - 'sg_uploaded_movie', - tag_list="monkeys, everywhere, send, help" + "sg_uploaded_movie", + tag_list="monkeys, everywhere, send, help", ) # cleanup @@ -308,7 +315,7 @@ def test_upload_download(self): # cleanup os.remove(file_path) - @patch('shotgun_api3.Shotgun._send_form') + @patch("shotgun_api3.Shotgun._send_form") def test_upload_to_sg(self, mock_send_form): """ Upload an attachment tests for _upload_to_sg() @@ -324,24 +331,23 @@ def test_upload_to_sg(self, mock_send_form): ) upload_id = self.sg.upload( "Version", - self.version['id'], + self.version["id"], u_path, - 'attachments', - tag_list="monkeys, everywhere, send, help" + "attachments", + tag_list="monkeys, everywhere, send, help", ) mock_send_form_args, _ = mock_send_form.call_args display_name_to_send = mock_send_form_args[1].get("display_name", "") self.assertTrue(isinstance(upload_id, int)) self.assertFalse( - display_name_to_send.startswith("b'") and - display_name_to_send.endswith("'") + display_name_to_send.startswith("b'") and display_name_to_send.endswith("'") ) upload_id = self.sg.upload( "Version", - self.version['id'], + self.version["id"], u_path, - 'filmstrip_image', + "filmstrip_image", tag_list="monkeys, everywhere, send, help", ) self.assertTrue(isinstance(upload_id, int)) @@ -349,8 +355,7 @@ def test_upload_to_sg(self, mock_send_form): display_name_to_send = mock_send_form_args[1].get("display_name", "") self.assertTrue(isinstance(upload_id, int)) self.assertFalse( - display_name_to_send.startswith("b'") and - display_name_to_send.endswith("'") + display_name_to_send.startswith("b'") and display_name_to_send.endswith("'") ) mock_send_form.method.assert_called_once() @@ -359,23 +364,23 @@ def test_upload_to_sg(self, mock_send_form): shotgun_api3.ShotgunError, self.sg.upload, "Version", - self.version['id'], + self.version["id"], u_path, - 'attachments', - tag_list="monkeys, everywhere, send, help" + "attachments", + tag_list="monkeys, everywhere, send, help", ) self.sg.server_info["s3_direct_uploads_enabled"] = True def test_upload_thumbnail_in_create(self): """Upload a thumbnail via the create method""" this_dir, _ = os.path.split(__file__) - path = os.path.abspath(os.path.expanduser( - os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) # test thumbnail upload - data = {'image': path, 'code': 'Test Version', - 'project': self.project} - new_version = self.sg.create("Version", data, return_fields=['image']) + data = {"image": path, "code": "Test Version", "project": self.project} + new_version = self.sg.create("Version", data, return_fields=["image"]) new_version = self.find_one_await_thumbnail( "Version", [["id", "is", new_version["id"]]], @@ -384,104 +389,126 @@ def test_upload_thumbnail_in_create(self): self.assertTrue(new_version is not None) self.assertTrue(isinstance(new_version, dict)) - self.assertTrue(isinstance(new_version.get('id'), int)) - self.assertEqual(new_version.get('type'), 'Version') - self.assertEqual(new_version.get('project'), self.project) - self.assertTrue(new_version.get('image') is not None) + self.assertTrue(isinstance(new_version.get("id"), int)) + self.assertEqual(new_version.get("type"), "Version") + self.assertEqual(new_version.get("project"), self.project) + self.assertTrue(new_version.get("image") is not None) h = Http(".cache") - thumb_resp, content = h.request(new_version.get('image'), "GET") - self.assertIn(thumb_resp['status'], ['200', '304']) - self.assertIn(thumb_resp['content-type'], ['image/jpeg', 'image/png']) + thumb_resp, content = h.request(new_version.get("image"), "GET") + self.assertIn(thumb_resp["status"], ["200", "304"]) + self.assertIn(thumb_resp["content-type"], ["image/jpeg", "image/png"]) - self.sg.delete("Version", new_version['id']) + self.sg.delete("Version", new_version["id"]) # test filmstrip image upload - data = {'filmstrip_image': path, 'code': 'Test Version', - 'project': self.project} - new_version = self.sg.create("Version", data, return_fields=['filmstrip_image']) + data = { + "filmstrip_image": path, + "code": "Test Version", + "project": self.project, + } + new_version = self.sg.create("Version", data, return_fields=["filmstrip_image"]) self.assertTrue(new_version is not None) self.assertTrue(isinstance(new_version, dict)) - self.assertTrue(isinstance(new_version.get('id'), int)) - self.assertEqual(new_version.get('type'), 'Version') - self.assertEqual(new_version.get('project'), self.project) - self.assertTrue(new_version.get('filmstrip_image') is not None) + self.assertTrue(isinstance(new_version.get("id"), int)) + self.assertEqual(new_version.get("type"), "Version") + self.assertEqual(new_version.get("project"), self.project) + self.assertTrue(new_version.get("filmstrip_image") is not None) - url = new_version.get('filmstrip_image') - data = self.sg.download_attachment({'url': url}) + url = new_version.get("filmstrip_image") + data = self.sg.download_attachment({"url": url}) self.assertTrue(isinstance(data, six.binary_type)) - self.sg.delete("Version", new_version['id']) + self.sg.delete("Version", new_version["id"]) + # end test_upload_thumbnail_in_create def test_upload_thumbnail_for_version(self): """simple upload thumbnail for version test.""" this_dir, _ = os.path.split(__file__) - path = os.path.abspath(os.path.expanduser( - os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) # upload thumbnail - thumb_id = self.sg.upload_thumbnail("Version", self.version['id'], path) + thumb_id = self.sg.upload_thumbnail("Version", self.version["id"], path) self.assertTrue(isinstance(thumb_id, int)) # check result on version - version_with_thumbnail = self.sg.find_one('Version', [['id', 'is', self.version['id']]]) + version_with_thumbnail = self.sg.find_one( + "Version", [["id", "is", self.version["id"]]] + ) version_with_thumbnail = self.find_one_await_thumbnail( "Version", [["id", "is", self.version["id"]]] ) - self.assertEqual(version_with_thumbnail.get('type'), 'Version') - self.assertEqual(version_with_thumbnail.get('id'), self.version['id']) + self.assertEqual(version_with_thumbnail.get("type"), "Version") + self.assertEqual(version_with_thumbnail.get("id"), self.version["id"]) h = Http(".cache") - thumb_resp, content = h.request(version_with_thumbnail.get('image'), "GET") - self.assertIn(thumb_resp['status'], ['200', '304']) - self.assertIn(thumb_resp['content-type'], ['image/jpeg', 'image/png']) + thumb_resp, content = h.request(version_with_thumbnail.get("image"), "GET") + self.assertIn(thumb_resp["status"], ["200", "304"]) + self.assertIn(thumb_resp["content-type"], ["image/jpeg", "image/png"]) # clear thumbnail - response_clear_thumbnail = self.sg.update("Version", self.version['id'], {'image': None}) - expected_clear_thumbnail = {'id': self.version['id'], 'image': None, 'type': 'Version'} + response_clear_thumbnail = self.sg.update( + "Version", self.version["id"], {"image": None} + ) + expected_clear_thumbnail = { + "id": self.version["id"], + "image": None, + "type": "Version", + } self.assertEqual(expected_clear_thumbnail, response_clear_thumbnail) def test_upload_thumbnail_for_task(self): """simple upload thumbnail for task test.""" this_dir, _ = os.path.split(__file__) - path = os.path.abspath(os.path.expanduser( - os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) # upload thumbnail - thumb_id = self.sg.upload_thumbnail("Task", self.task['id'], path) + thumb_id = self.sg.upload_thumbnail("Task", self.task["id"], path) self.assertTrue(isinstance(thumb_id, int)) # check result on version - task_with_thumbnail = self.sg.find_one('Task', [['id', 'is', self.task['id']]]) + task_with_thumbnail = self.sg.find_one("Task", [["id", "is", self.task["id"]]]) task_with_thumbnail = self.find_one_await_thumbnail( "Task", [["id", "is", self.task["id"]]] ) - self.assertEqual(task_with_thumbnail.get('type'), 'Task') - self.assertEqual(task_with_thumbnail.get('id'), self.task['id']) + self.assertEqual(task_with_thumbnail.get("type"), "Task") + self.assertEqual(task_with_thumbnail.get("id"), self.task["id"]) h = Http(".cache") - thumb_resp, content = h.request(task_with_thumbnail.get('image'), "GET") - self.assertIn(thumb_resp['status'], ['200', '304']) - self.assertIn(thumb_resp['content-type'], ['image/jpeg', 'image/png']) + thumb_resp, content = h.request(task_with_thumbnail.get("image"), "GET") + self.assertIn(thumb_resp["status"], ["200", "304"]) + self.assertIn(thumb_resp["content-type"], ["image/jpeg", "image/png"]) # clear thumbnail - response_clear_thumbnail = self.sg.update("Version", self.version['id'], {'image': None}) - expected_clear_thumbnail = {'id': self.version['id'], 'image': None, 'type': 'Version'} + response_clear_thumbnail = self.sg.update( + "Version", self.version["id"], {"image": None} + ) + expected_clear_thumbnail = { + "id": self.version["id"], + "image": None, + "type": "Version", + } self.assertEqual(expected_clear_thumbnail, response_clear_thumbnail) def test_upload_thumbnail_with_upload_function(self): """Upload thumbnail via upload function test""" - path = os.path.abspath(os.path.expanduser(os.path.join(os.path.dirname(__file__), "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(os.path.dirname(__file__), "sg_logo.jpg")) + ) # upload thumbnail - thumb_id = self.sg.upload("Task", self.task['id'], path, 'image') + thumb_id = self.sg.upload("Task", self.task["id"], path, "image") self.assertTrue(isinstance(thumb_id, int)) # upload filmstrip thumbnail - f_thumb_id = self.sg.upload("Task", self.task['id'], path, 'filmstrip_image') + f_thumb_id = self.sg.upload("Task", self.task["id"], path, "filmstrip_image") self.assertTrue(isinstance(f_thumb_id, int)) def test_requires_direct_s3_upload(self): @@ -494,19 +521,25 @@ def test_requires_direct_s3_upload(self): self.sg.server_info["s3_direct_uploads_enabled"] = None # Test s3_enabled_upload_types and s3_direct_uploads_enabled not set - self.assertFalse(self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie")) + self.assertFalse( + self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie") + ) self.sg.server_info["s3_enabled_upload_types"] = { "Version": ["sg_uploaded_movie"] } # Test direct_uploads_enabled not set - self.assertFalse(self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie")) + self.assertFalse( + self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie") + ) self.sg.server_info["s3_direct_uploads_enabled"] = True # Test regular path - self.assertTrue(self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie")) + self.assertTrue( + self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie") + ) self.assertFalse(self.sg._requires_direct_s3_upload("Version", "abc")) self.assertFalse(self.sg._requires_direct_s3_upload("Abc", "abc")) @@ -514,10 +547,12 @@ def test_requires_direct_s3_upload(self): self.sg.server_info["s3_enabled_upload_types"] = { "Version": ["sg_uploaded_movie", "test", "other"], "Test": ["*"], - "Asset": "*" + "Asset": "*", } - self.assertTrue(self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie")) + self.assertTrue( + self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie") + ) self.assertTrue(self.sg._requires_direct_s3_upload("Version", "test")) self.assertTrue(self.sg._requires_direct_s3_upload("Version", "other")) self.assertTrue(self.sg._requires_direct_s3_upload("Test", "abc")) @@ -525,22 +560,26 @@ def test_requires_direct_s3_upload(self): # Test default allowed upload type self.sg.server_info["s3_enabled_upload_types"] = None - self.assertTrue(self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie")) + self.assertTrue( + self.sg._requires_direct_s3_upload("Version", "sg_uploaded_movie") + ) self.assertFalse(self.sg._requires_direct_s3_upload("Version", "test")) # Test star entity_type self.sg.server_info["s3_enabled_upload_types"] = { "*": ["sg_uploaded_movie", "test"] } - self.assertTrue(self.sg._requires_direct_s3_upload("Something", "sg_uploaded_movie")) + self.assertTrue( + self.sg._requires_direct_s3_upload("Something", "sg_uploaded_movie") + ) self.assertTrue(self.sg._requires_direct_s3_upload("Version", "test")) self.assertFalse(self.sg._requires_direct_s3_upload("Version", "other")) # Test entity_type and field_name wildcard - self.sg.server_info["s3_enabled_upload_types"] = { - "*": "*" - } - self.assertTrue(self.sg._requires_direct_s3_upload("Something", "sg_uploaded_movie")) + self.sg.server_info["s3_enabled_upload_types"] = {"*": "*"} + self.assertTrue( + self.sg._requires_direct_s3_upload("Something", "sg_uploaded_movie") + ) self.assertTrue(self.sg._requires_direct_s3_upload("Version", "abc")) self.sg.server_info["s3_enabled_upload_types"] = upload_types @@ -548,10 +587,13 @@ def test_requires_direct_s3_upload(self): def test_linked_thumbnail_url(self): this_dir, _ = os.path.split(__file__) - path = os.path.abspath(os.path.expanduser( - os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) - thumb_id = self.sg.upload_thumbnail("Project", self.version['project']['id'], path) + thumb_id = self.sg.upload_thumbnail( + "Project", self.version["project"]["id"], path + ) response_version_with_project = self.find_one_await_thumbnail( "Version", @@ -562,23 +604,31 @@ def test_linked_thumbnail_url(self): if self.sg.server_caps.version and self.sg.server_caps.version >= (3, 3, 0): - self.assertEqual(response_version_with_project.get('type'), 'Version') - self.assertEqual(response_version_with_project.get('id'), self.version['id']) - self.assertEqual(response_version_with_project.get('code'), self.config.version_code) + self.assertEqual(response_version_with_project.get("type"), "Version") + self.assertEqual( + response_version_with_project.get("id"), self.version["id"] + ) + self.assertEqual( + response_version_with_project.get("code"), self.config.version_code + ) h = Http(".cache") - thumb_resp, content = h.request(response_version_with_project.get('project.Project.image'), "GET") - self.assertIn(thumb_resp['status'], ['200', '304']) - self.assertIn(thumb_resp['content-type'], ['image/jpeg', 'image/png']) + thumb_resp, content = h.request( + response_version_with_project.get("project.Project.image"), "GET" + ) + self.assertIn(thumb_resp["status"], ["200", "304"]) + self.assertIn(thumb_resp["content-type"], ["image/jpeg", "image/png"]) else: expected_version_with_project = { - 'code': self.config.version_code, - 'type': 'Version', - 'id': self.version['id'], - 'project.Project.image': thumb_id + "code": self.config.version_code, + "type": "Version", + "id": self.version["id"], + "project.Project.image": thumb_id, } - self.assertEqual(expected_version_with_project, response_version_with_project) + self.assertEqual( + expected_version_with_project, response_version_with_project + ) # For now skip tests that are erroneously failling on some sites to # allow CI to pass until the known issue causing this is resolved. @@ -601,49 +651,51 @@ def share_thumbnail_retry(*args, **kwargs): return thumbnail_id this_dir, _ = os.path.split(__file__) - path = os.path.abspath(os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) # upload thumbnail to first entity and share it with the rest share_thumbnail_retry([self.version, self.shot], thumbnail_path=path) response_version_thumbnail = self.find_one_await_thumbnail( - 'Version', - [['id', 'is', self.version['id']]], - fields=['id', 'code', 'image'], + "Version", + [["id", "is", self.version["id"]]], + fields=["id", "code", "image"], ) response_shot_thumbnail = self.find_one_await_thumbnail( - 'Shot', - [['id', 'is', self.shot['id']]], - fields=['id', 'code', 'image'], + "Shot", + [["id", "is", self.shot["id"]]], + fields=["id", "code", "image"], ) - shot_url = urllib.parse.urlparse(response_shot_thumbnail.get('image')) - version_url = urllib.parse.urlparse(response_version_thumbnail.get('image')) + shot_url = urllib.parse.urlparse(response_shot_thumbnail.get("image")) + version_url = urllib.parse.urlparse(response_version_thumbnail.get("image")) shot_path = _get_path(shot_url) version_path = _get_path(version_url) self.assertEqual(shot_path, version_path) # share thumbnail from source entity with entities - self.sg.upload_thumbnail("Version", self.version['id'], path) + self.sg.upload_thumbnail("Version", self.version["id"], path) share_thumbnail_retry([self.asset, self.shot], source_entity=self.version) response_version_thumbnail = self.find_one_await_thumbnail( - 'Version', - [['id', 'is', self.version['id']]], - fields=['id', 'code', 'image'], + "Version", + [["id", "is", self.version["id"]]], + fields=["id", "code", "image"], ) response_shot_thumbnail = self.find_one_await_thumbnail( - 'Shot', - [['id', 'is', self.shot['id']]], - fields=['id', 'code', 'image'], + "Shot", + [["id", "is", self.shot["id"]]], + fields=["id", "code", "image"], ) response_asset_thumbnail = self.find_one_await_thumbnail( - 'Asset', - [['id', 'is', self.asset['id']]], - fields=['id', 'code', 'image'], + "Asset", + [["id", "is", self.asset["id"]]], + fields=["id", "code", "image"], ) - shot_url = urllib.parse.urlparse(response_shot_thumbnail.get('image')) - version_url = urllib.parse.urlparse(response_version_thumbnail.get('image')) - asset_url = urllib.parse.urlparse(response_asset_thumbnail.get('image')) + shot_url = urllib.parse.urlparse(response_shot_thumbnail.get("image")) + version_url = urllib.parse.urlparse(response_version_thumbnail.get("image")) + asset_url = urllib.parse.urlparse(response_asset_thumbnail.get("image")) shot_path = _get_path(shot_url) version_path = _get_path(version_url) @@ -653,32 +705,48 @@ def share_thumbnail_retry(*args, **kwargs): self.assertEqual(version_path, asset_path) # raise errors when missing required params or providing conflicting ones - self.assertRaises(shotgun_api3.ShotgunError, self.sg.share_thumbnail, - [self.shot, self.asset], path, self.version) - self.assertRaises(shotgun_api3.ShotgunError, self.sg.share_thumbnail, - [self.shot, self.asset]) + self.assertRaises( + shotgun_api3.ShotgunError, + self.sg.share_thumbnail, + [self.shot, self.asset], + path, + self.version, + ) + self.assertRaises( + shotgun_api3.ShotgunError, self.sg.share_thumbnail, [self.shot, self.asset] + ) - @patch('shotgun_api3.Shotgun._send_form') + @patch("shotgun_api3.Shotgun._send_form") def test_share_thumbnail_not_ready(self, mock_send_form): """throw an exception if trying to share a transient thumbnail""" mock_send_form.method.assert_called_once() - mock_send_form.return_value = ("2" - "\nsource_entity image is a transient thumbnail that cannot be shared. " - "Try again later, when the final thumbnail is available\n") + mock_send_form.return_value = ( + "2" + "\nsource_entity image is a transient thumbnail that cannot be shared. " + "Try again later, when the final thumbnail is available\n" + ) - self.assertRaises(shotgun_api3.ShotgunThumbnailNotReady, self.sg.share_thumbnail, - [self.version, self.shot], source_entity=self.asset) + self.assertRaises( + shotgun_api3.ShotgunThumbnailNotReady, + self.sg.share_thumbnail, + [self.version, self.shot], + source_entity=self.asset, + ) - @patch('shotgun_api3.Shotgun._send_form') + @patch("shotgun_api3.Shotgun._send_form") def test_share_thumbnail_returns_error(self, mock_send_form): """throw an exception if server returns an error code""" mock_send_form.method.assert_called_once() mock_send_form.return_value = "1\nerror message.\n" - self.assertRaises(shotgun_api3.ShotgunError, self.sg.share_thumbnail, - [self.version, self.shot], source_entity=self.asset) + self.assertRaises( + shotgun_api3.ShotgunError, + self.sg.share_thumbnail, + [self.version, self.shot], + source_entity=self.asset, + ) def test_deprecated_functions(self): """Deprecated functions raise errors""" @@ -687,35 +755,36 @@ def test_deprecated_functions(self): def test_simple_summary(self): """Test simple call to summarize""" - summaries = [{'field': 'id', 'type': 'count'}] - grouping = [{'direction': 'asc', 'field': 'id', 'type': 'exact'}] - filters = [['project', 'is', self.project]] - result = self.sg.summarize('Shot', - filters=filters, - summary_fields=summaries, - grouping=grouping) - assert(result['groups']) - assert(result['groups'][0]['group_name']) - assert(result['groups'][0]['group_value']) - assert(result['groups'][0]['summaries']) - assert(result['summaries']) + summaries = [{"field": "id", "type": "count"}] + grouping = [{"direction": "asc", "field": "id", "type": "exact"}] + filters = [["project", "is", self.project]] + result = self.sg.summarize( + "Shot", filters=filters, summary_fields=summaries, grouping=grouping + ) + assert result["groups"] + assert result["groups"][0]["group_name"] + assert result["groups"][0]["group_value"] + assert result["groups"][0]["summaries"] + assert result["summaries"] def test_summary_include_archived_projects(self): """Test summarize with archived project""" if self.sg.server_caps.version > (5, 3, 13): # archive project - self.sg.update('Project', self.project['id'], {'archived': True}) + self.sg.update("Project", self.project["id"], {"archived": True}) # Ticket #25082 ability to hide archived projects in summary - summaries = [{'field': 'id', 'type': 'count'}] - grouping = [{'direction': 'asc', 'field': 'id', 'type': 'exact'}] - filters = [['project', 'is', self.project]] - result = self.sg.summarize('Shot', - filters=filters, - summary_fields=summaries, - grouping=grouping, - include_archived_projects=False) - self.assertEqual(result['summaries']['id'], 0) - self.sg.update('Project', self.project['id'], {'archived': False}) + summaries = [{"field": "id", "type": "count"}] + grouping = [{"direction": "asc", "field": "id", "type": "exact"}] + filters = [["project", "is", self.project]] + result = self.sg.summarize( + "Shot", + filters=filters, + summary_fields=summaries, + grouping=grouping, + include_archived_projects=False, + ) + self.assertEqual(result["summaries"]["id"], 0) + self.sg.update("Project", self.project["id"], {"archived": False}) def test_summary_values(self): """Test summarize return data""" @@ -729,28 +798,28 @@ def test_summary_values(self): "code": "%s Shot 1" % shot_prefix, "sg_status_list": "ip", "sg_cut_duration": 100, - "project": self.project + "project": self.project, } shot_data_2 = { "code": "%s Shot 2" % shot_prefix, "sg_status_list": "ip", "sg_cut_duration": 100, - "project": self.project + "project": self.project, } shot_data_3 = { "code": "%s Shot 3" % shot_prefix, "sg_status_list": "fin", "sg_cut_duration": 100, - "project": self.project + "project": self.project, } shot_data_4 = { "code": "%s Shot 4" % shot_prefix, "sg_status_list": "wtg", "sg_cut_duration": 0, - "project": self.project + "project": self.project, } shots.append(self.sg.create("Shot", shot_data_1)) @@ -758,140 +827,167 @@ def test_summary_values(self): shots.append(self.sg.create("Shot", shot_data_3)) shots.append(self.sg.create("Shot", shot_data_4)) - summaries = [{'field': 'id', 'type': 'count'}, - {'field': 'sg_cut_duration', 'type': 'sum'}] - grouping = [{'direction': 'asc', - 'field': 'sg_status_list', - 'type': 'exact'}] - filters = [['project', 'is', self.project], - ['code', 'starts_with', shot_prefix]] - result = self.sg.summarize('Shot', - filters=filters, - summary_fields=summaries, - grouping=grouping) - count = {'id': 4, 'sg_cut_duration': 300} + summaries = [ + {"field": "id", "type": "count"}, + {"field": "sg_cut_duration", "type": "sum"}, + ] + grouping = [{"direction": "asc", "field": "sg_status_list", "type": "exact"}] + filters = [ + ["project", "is", self.project], + ["code", "starts_with", shot_prefix], + ] + result = self.sg.summarize( + "Shot", filters=filters, summary_fields=summaries, grouping=grouping + ) + count = {"id": 4, "sg_cut_duration": 300} groups = [ { - 'group_name': 'fin', - 'group_value': 'fin', - 'summaries': {'id': 1, 'sg_cut_duration': 100} + "group_name": "fin", + "group_value": "fin", + "summaries": {"id": 1, "sg_cut_duration": 100}, }, { - 'group_name': 'ip', - 'group_value': 'ip', - 'summaries': {'id': 2, 'sg_cut_duration': 200} + "group_name": "ip", + "group_value": "ip", + "summaries": {"id": 2, "sg_cut_duration": 200}, }, { - 'group_name': 'wtg', - 'group_value': 'wtg', - 'summaries': {'id': 1, 'sg_cut_duration': 0} - } + "group_name": "wtg", + "group_value": "wtg", + "summaries": {"id": 1, "sg_cut_duration": 0}, + }, ] # clean up batch_data = [] for s in shots: - batch_data.append({ - "request_type": "delete", - "entity_type": "Shot", - "entity_id": s["id"] - }) + batch_data.append( + {"request_type": "delete", "entity_type": "Shot", "entity_id": s["id"]} + ) self.sg.batch(batch_data) - self.assertEqual(result['summaries'], count) + self.assertEqual(result["summaries"], count) # Do not assume the order of the summarized results. self.assertEqual( - sorted( - result['groups'], - key=lambda x: x["group_name"] - ), - groups + sorted(result["groups"], key=lambda x: x["group_name"]), groups ) def test_ensure_ascii(self): - '''test_ensure_ascii tests ensure_unicode flag.''' - sg_ascii = shotgun_api3.Shotgun(self.config.server_url, - ensure_ascii=True, - **self.auth_args) + """test_ensure_ascii tests ensure_unicode flag.""" + sg_ascii = shotgun_api3.Shotgun( + self.config.server_url, ensure_ascii=True, **self.auth_args + ) - result = sg_ascii.find_one('Note', [['id', 'is', self.note['id']]], fields=['content']) + result = sg_ascii.find_one( + "Note", [["id", "is", self.note["id"]]], fields=["content"] + ) if six.PY2: # In Python3 there isn't a separate unicode type. self.assertFalse(_has_unicode(result)) def test_ensure_unicode(self): - '''test_ensure_unicode tests ensure_unicode flag.''' - sg_unicode = shotgun_api3.Shotgun(self.config.server_url, - ensure_ascii=False, - **self.auth_args) - result = sg_unicode.find_one('Note', [['id', 'is', self.note['id']]], fields=['content']) + """test_ensure_unicode tests ensure_unicode flag.""" + sg_unicode = shotgun_api3.Shotgun( + self.config.server_url, ensure_ascii=False, **self.auth_args + ) + result = sg_unicode.find_one( + "Note", [["id", "is", self.note["id"]]], fields=["content"] + ) self.assertTrue(_has_unicode(result)) def test_work_schedule(self): - '''test_work_schedule tests WorkDayRules api''' + """test_work_schedule tests WorkDayRules api""" self.maxDiff = None - start_date = '2012-01-01' + start_date = "2012-01-01" start_date_obj = datetime.datetime(2012, 1, 1) - end_date = '2012-01-07' + end_date = "2012-01-07" end_date_obj = datetime.datetime(2012, 1, 7) project = self.project # We're going to be comparing this value with the value returned from the server, so extract only the type, id # and name - user = {"type": self.human_user["type"], "id": self.human_user["id"], "name": self.human_user["name"]} + user = { + "type": self.human_user["type"], + "id": self.human_user["id"], + "name": self.human_user["name"], + } work_schedule = self.sg.work_schedule_read(start_date, end_date, project, user) # Test that the work_schedule_read api method is called with the 'start_date' and 'end_date' arguments # in the 'YYYY-MM-DD' string format. - self.assertRaises(shotgun_api3.ShotgunError, self.sg.work_schedule_read, - start_date_obj, end_date_obj, project, user) + self.assertRaises( + shotgun_api3.ShotgunError, + self.sg.work_schedule_read, + start_date_obj, + end_date_obj, + project, + user, + ) - resp = self.sg.work_schedule_update('2012-01-02', False, 'Studio Holiday') + resp = self.sg.work_schedule_update("2012-01-02", False, "Studio Holiday") expected = { - 'date': '2012-01-02', - 'description': 'Studio Holiday', - 'project': None, - 'user': None, - 'working': False + "date": "2012-01-02", + "description": "Studio Holiday", + "project": None, + "user": None, + "working": False, } self.assertEqual(expected, resp) resp = self.sg.work_schedule_read(start_date, end_date, project, user) - work_schedule['2012-01-02'] = {"reason": "STUDIO_EXCEPTION", "working": False, "description": "Studio Holiday"} + work_schedule["2012-01-02"] = { + "reason": "STUDIO_EXCEPTION", + "working": False, + "description": "Studio Holiday", + } self.assertEqual(work_schedule, resp) - resp = self.sg.work_schedule_update('2012-01-03', False, 'Project Holiday', project) + resp = self.sg.work_schedule_update( + "2012-01-03", False, "Project Holiday", project + ) expected = { - 'date': '2012-01-03', - 'description': 'Project Holiday', - 'project': project, - 'user': None, - 'working': False + "date": "2012-01-03", + "description": "Project Holiday", + "project": project, + "user": None, + "working": False, } self.assertEqual(expected, resp) resp = self.sg.work_schedule_read(start_date, end_date, project, user) - work_schedule['2012-01-03'] = { + work_schedule["2012-01-03"] = { "reason": "PROJECT_EXCEPTION", "working": False, - "description": "Project Holiday" + "description": "Project Holiday", } self.assertEqual(work_schedule, resp) jan4 = datetime.datetime(2012, 1, 4) - self.assertRaises(shotgun_api3.ShotgunError, self.sg.work_schedule_update, - jan4, False, 'Artist Holiday', user=user) + self.assertRaises( + shotgun_api3.ShotgunError, + self.sg.work_schedule_update, + jan4, + False, + "Artist Holiday", + user=user, + ) - resp = self.sg.work_schedule_update("2012-01-04", False, 'Artist Holiday', user=user) + resp = self.sg.work_schedule_update( + "2012-01-04", False, "Artist Holiday", user=user + ) expected = { - 'date': '2012-01-04', - 'description': 'Artist Holiday', - 'project': None, - 'user': user, - 'working': False + "date": "2012-01-04", + "description": "Artist Holiday", + "project": None, + "user": user, + "working": False, } self.assertEqual(expected, resp) resp = self.sg.work_schedule_read(start_date, end_date, project, user) - work_schedule['2012-01-04'] = {"reason": "USER_EXCEPTION", "working": False, "description": "Artist Holiday"} + work_schedule["2012-01-04"] = { + "reason": "USER_EXCEPTION", + "working": False, + "description": "Artist Holiday", + } self.assertEqual(work_schedule, resp) # test_preferences_read fails when preferences don't match the expected @@ -908,21 +1004,21 @@ def test_preferences_read(self): resp = self.sg.preferences_read() expected = { - 'date_component_order': 'month_day', - 'duration_units': 'days', - 'format_currency_fields_decimal_options': '$1,000.99', - 'format_currency_fields_display_dollar_sign': False, - 'format_currency_fields_negative_options': '- $1,000', - 'format_date_fields': '08/04/22 OR 04/08/22 (depending on the Month order preference)', - 'format_float_fields': '9,999.99', - 'format_float_fields_rounding': '9.999999', - 'format_footage_fields': '10-05', - 'format_number_fields': '1,000', - 'format_time_hour_fields': '12 hour', - 'hours_per_day': 8.0, - 'support_local_storage': True, - 'enable_rv_integration': True, - 'enable_shotgun_review_for_rv': False, + "date_component_order": "month_day", + "duration_units": "days", + "format_currency_fields_decimal_options": "$1,000.99", + "format_currency_fields_display_dollar_sign": False, + "format_currency_fields_negative_options": "- $1,000", + "format_date_fields": "08/04/22 OR 04/08/22 (depending on the Month order preference)", + "format_float_fields": "9,999.99", + "format_float_fields_rounding": "9.999999", + "format_footage_fields": "10-05", + "format_number_fields": "1,000", + "format_time_hour_fields": "12 hour", + "hours_per_day": 8.0, + "support_local_storage": True, + "enable_rv_integration": True, + "enable_shotgun_review_for_rv": False, } # Simply make sure viewmaster settings are there. These change frequently and we # don't want to have the test break because Viewmaster changed or because we didn't @@ -933,253 +1029,238 @@ def test_preferences_read(self): self.assertEqual(expected, resp) # all filtered - resp = self.sg.preferences_read(['date_component_order', 'support_local_storage']) + resp = self.sg.preferences_read( + ["date_component_order", "support_local_storage"] + ) - expected = { - 'date_component_order': 'month_day', - 'support_local_storage': True - } + expected = {"date_component_order": "month_day", "support_local_storage": True} self.assertEqual(expected, resp) # all filtered with invalid pref - resp = self.sg.preferences_read(['date_component_order', 'support_local_storage', 'email_notifications']) + resp = self.sg.preferences_read( + ["date_component_order", "support_local_storage", "email_notifications"] + ) - expected = { - 'date_component_order': 'month_day', - 'support_local_storage': True - } + expected = {"date_component_order": "month_day", "support_local_storage": True} self.assertEqual(expected, resp) class TestDataTypes(base.LiveTestBase): - '''Test fields representing the different data types mapped on the server side. + """Test fields representing the different data types mapped on the server side. - Untested data types: password, percent, pivot_column, serializable, image, currency - system_task_type, timecode, url, uuid, url_template - ''' + Untested data types: password, percent, pivot_column, serializable, image, currency + system_task_type, timecode, url, uuid, url_template + """ def setUp(self): super(TestDataTypes, self).setUp() def test_set_checkbox(self): - entity = 'HumanUser' - entity_id = self.human_user['id'] - field_name = 'email_notes' + entity = "HumanUser" + entity_id = self.human_user["id"] + field_name = "email_notes" pos_values = [False, True] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_color(self): - entity = 'Task' - entity_id = self.task['id'] - field_name = 'color' - pos_values = ['pipeline_step', '222,0,0'] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + entity = "Task" + entity_id = self.task["id"] + field_name = "color" + pos_values = ["pipeline_step", "222,0,0"] + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_date(self): - entity = 'Task' - entity_id = self.task['id'] - field_name = 'due_date' - pos_values = ['2008-05-08', '2011-05-05'] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + entity = "Task" + entity_id = self.task["id"] + field_name = "due_date" + pos_values = ["2008-05-08", "2011-05-05"] + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_date_time(self): if self.config.jenkins: self.skipTest("Jenkins. locked_until not updating.") - entity = 'HumanUser' - entity_id = self.human_user['id'] - field_name = 'locked_until' + entity = "HumanUser" + entity_id = self.human_user["id"] + field_name = "locked_until" local = shotgun_api3.shotgun.SG_TIMEZONE.local dt_1 = datetime.datetime(2008, 10, 13, 23, 10, tzinfo=local) dt_2 = datetime.datetime(2009, 10, 13, 23, 10, tzinfo=local) pos_values = [dt_1, dt_2] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_duration(self): - entity = 'Task' - entity_id = self.task['id'] - field_name = 'duration' + entity = "Task" + entity_id = self.task["id"] + field_name = "duration" pos_values = [2100, 1300] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_entity(self): - entity = 'Task' - entity_id = self.task['id'] - field_name = 'entity' + entity = "Task" + entity_id = self.task["id"] + field_name = "entity" pos_values = [self.asset, self.shot] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) - self.assertEqual(expected['id'], actual['id']) + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) + self.assertEqual(expected["id"], actual["id"]) def test_set_float(self): - entity = 'Version' - entity_id = self.version['id'] - field_name = 'sg_movie_aspect_ratio' + entity = "Version" + entity_id = self.version["id"] + field_name = "sg_movie_aspect_ratio" pos_values = [2.0, 3.0] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_list(self): - entity = 'Note' - entity_id = self.note['id'] - field_name = 'sg_note_type' - pos_values = ['Internal', 'Client'] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + entity = "Note" + entity_id = self.note["id"] + field_name = "sg_note_type" + pos_values = ["Internal", "Client"] + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_multi_entity(self): - sg = shotgun_api3.Shotgun(self.config.server_url, - **self.auth_args) - keys = ['project', 'user', 'code'] - data = {'project': self.project, - 'user': self.human_user, - 'code': 'Alpha'} - version_1 = base._find_or_create_entity(sg, 'Version', data, keys) - data = {'project': self.project, - 'user': self.human_user, - 'code': 'Beta'} - version_2 = base._find_or_create_entity(sg, 'Version', data, keys) - - entity = 'Playlist' - entity_id = self.playlist['id'] - field_name = 'versions' + sg = shotgun_api3.Shotgun(self.config.server_url, **self.auth_args) + keys = ["project", "user", "code"] + data = {"project": self.project, "user": self.human_user, "code": "Alpha"} + version_1 = base._find_or_create_entity(sg, "Version", data, keys) + data = {"project": self.project, "user": self.human_user, "code": "Beta"} + version_2 = base._find_or_create_entity(sg, "Version", data, keys) + + entity = "Playlist" + entity_id = self.playlist["id"] + field_name = "versions" # Default set behaviour pos_values = [[version_1, version_2]] - expected, actual = self.assert_set_field(entity, entity_id, field_name, pos_values) + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(len(expected), len(actual)) self.assertEqual( - sorted([x['id'] for x in expected]), - sorted([x['id'] for x in actual]) + sorted([x["id"] for x in expected]), sorted([x["id"] for x in actual]) ) # Multi-entity remove mode pos_values = [[version_1]] - expected, actual = self.assert_set_field(entity, entity_id, field_name, pos_values, - multi_entity_update_mode='remove') + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values, multi_entity_update_mode="remove" + ) self.assertEqual(1, len(actual)) self.assertEqual(len(expected), len(actual)) - self.assertNotEqual(expected[0]['id'], actual[0]['id']) - self.assertEqual(version_2['id'], actual[0]['id']) + self.assertNotEqual(expected[0]["id"], actual[0]["id"]) + self.assertEqual(version_2["id"], actual[0]["id"]) # Multi-entity add mode pos_values = [[version_1]] - expected, actual = self.assert_set_field(entity, entity_id, field_name, pos_values, - multi_entity_update_mode='add') + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values, multi_entity_update_mode="add" + ) self.assertEqual(2, len(actual)) - self.assertTrue(version_1['id'] in [x['id'] for x in actual]) + self.assertTrue(version_1["id"] in [x["id"] for x in actual]) # Multi-entity set mode pos_values = [[version_1, version_2]] - expected, actual = self.assert_set_field(entity, entity_id, field_name, pos_values, - multi_entity_update_mode='set') + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values, multi_entity_update_mode="set" + ) self.assertEqual(len(expected), len(actual)) self.assertEqual( - sorted([x['id'] for x in expected]), - sorted([x['id'] for x in actual]) + sorted([x["id"] for x in expected]), sorted([x["id"] for x in actual]) ) def test_set_number(self): - entity = 'Shot' - entity_id = self.shot['id'] - field_name = 'head_in' + entity = "Shot" + entity_id = self.shot["id"] + field_name = "head_in" pos_values = [2300, 1300] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_status_list(self): - entity = 'Task' - entity_id = self.task['id'] - field_name = 'sg_status_list' - pos_values = ['wtg', 'fin'] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + entity = "Task" + entity_id = self.task["id"] + field_name = "sg_status_list" + pos_values = ["wtg", "fin"] + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_tag_list(self): - entity = 'Task' - entity_id = self.task['id'] - field_name = 'tag_list' - pos_values = [['a', 'b'], ['c']] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + entity = "Task" + entity_id = self.task["id"] + field_name = "tag_list" + pos_values = [["a", "b"], ["c"]] + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_text(self): - entity = 'Note' - entity_id = self.note['id'] - field_name = 'content' - pos_values = ['this content', 'that content'] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + entity = "Note" + entity_id = self.note["id"] + field_name = "content" + pos_values = ["this content", "that content"] + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) def test_set_text_html_entity(self): - entity = 'Note' - entity_id = self.note['id'] - field_name = 'content' - pos_values = ['<', '<'] - expected, actual = self.assert_set_field(entity, - entity_id, - field_name, - pos_values) + entity = "Note" + entity_id = self.note["id"] + field_name = "content" + pos_values = ["<", "<"] + expected, actual = self.assert_set_field( + entity, entity_id, field_name, pos_values + ) self.assertEqual(expected, actual) - def assert_set_field(self, entity, entity_id, field_name, pos_values, multi_entity_update_mode=None): - query_result = self.sg.find_one(entity, - [['id', 'is', entity_id]], - [field_name]) + def assert_set_field( + self, entity, entity_id, field_name, pos_values, multi_entity_update_mode=None + ): + query_result = self.sg.find_one(entity, [["id", "is", entity_id]], [field_name]) initial_value = query_result[field_name] new_value = (initial_value == pos_values[0] and pos_values[1]) or pos_values[0] if multi_entity_update_mode: - self.sg.update(entity, entity_id, {field_name: new_value}, - multi_entity_update_modes={field_name: multi_entity_update_mode}) + self.sg.update( + entity, + entity_id, + {field_name: new_value}, + multi_entity_update_modes={field_name: multi_entity_update_mode}, + ) else: self.sg.update(entity, entity_id, {field_name: new_value}) - new_values = self.sg.find_one(entity, - [['id', 'is', entity_id]], - [field_name]) + new_values = self.sg.find_one(entity, [["id", "is", entity_id]], [field_name]) return new_value, new_values[field_name] class TestUtc(base.LiveTestBase): - '''Test utc options''' + """Test utc options""" def setUp(self): super(TestUtc, self).setUp() @@ -1192,29 +1273,33 @@ def setUp(self): def test_convert_to_utc(self): if self.config.jenkins: self.skipTest("Jenkins. locked_until not updating.") - sg_utc = shotgun_api3.Shotgun(self.config.server_url, - http_proxy=self.config.http_proxy, - convert_datetimes_to_utc=True, - **self.auth_args) + sg_utc = shotgun_api3.Shotgun( + self.config.server_url, + http_proxy=self.config.http_proxy, + convert_datetimes_to_utc=True, + **self.auth_args, + ) self._assert_expected(sg_utc, self.datetime_none, self.datetime_local) self._assert_expected(sg_utc, self.datetime_local, self.datetime_local) def test_no_convert_to_utc(self): if self.config.jenkins: self.skipTest("Jenkins. locked_until not updating.") - sg_no_utc = shotgun_api3.Shotgun(self.config.server_url, - http_proxy=self.config.http_proxy, - convert_datetimes_to_utc=False, - **self.auth_args) + sg_no_utc = shotgun_api3.Shotgun( + self.config.server_url, + http_proxy=self.config.http_proxy, + convert_datetimes_to_utc=False, + **self.auth_args, + ) self._assert_expected(sg_no_utc, self.datetime_none, self.datetime_none) self._assert_expected(sg_no_utc, self.datetime_utc, self.datetime_none) def _assert_expected(self, sg, date_time, expected): - entity_name = 'HumanUser' - entity_id = self.human_user['id'] - field_name = 'locked_until' + entity_name = "HumanUser" + entity_id = self.human_user["id"] + field_name = "locked_until" sg.update(entity_name, entity_id, {field_name: date_time}) - result = sg.find_one(entity_name, [['id', 'is', entity_id]], [field_name]) + result = sg.find_one(entity_name, [["id", "is", entity_id]], [field_name]) self.assertEqual(result[field_name], expected) @@ -1223,31 +1308,33 @@ def setUp(self): super(TestFind, self).setUp() # We will need the created_at field for the shot fields = list(self.shot.keys())[:] - fields.append('created_at') - self.shot = self.sg.find_one('Shot', [['id', 'is', self.shot['id']]], fields) + fields.append("created_at") + self.shot = self.sg.find_one("Shot", [["id", "is", self.shot["id"]]], fields) # We will need the uuid field for our LocalStorage fields = list(self.local_storage.keys())[:] - fields.append('uuid') - self.local_storage = self.sg.find_one('LocalStorage', [['id', 'is', self.local_storage['id']]], fields) + fields.append("uuid") + self.local_storage = self.sg.find_one( + "LocalStorage", [["id", "is", self.local_storage["id"]]], fields + ) def test_find(self): """Called find, find_one for known entities""" filters = [] - filters.append(['project', 'is', self.project]) - filters.append(['id', 'is', self.version['id']]) + filters.append(["project", "is", self.project]) + filters.append(["id", "is", self.version["id"]]) - fields = ['id'] + fields = ["id"] versions = self.sg.find("Version", filters, fields=fields) self.assertTrue(isinstance(versions, list)) version = versions[0] self.assertEqual("Version", version["type"]) - self.assertEqual(self.version['id'], version["id"]) + self.assertEqual(self.version["id"], version["id"]) version = self.sg.find_one("Version", filters, fields=fields) self.assertEqual("Version", version["type"]) - self.assertEqual(self.version['id'], version["id"]) + self.assertEqual(self.version["id"], version["id"]) def _id_in_result(self, entity_type, filters, expected_id): """ @@ -1255,126 +1342,138 @@ def _id_in_result(self, entity_type, filters, expected_id): for particular filters. """ results = self.sg.find(entity_type, filters) - return any(result['id'] == expected_id for result in results) + return any(result["id"] == expected_id for result in results) # TODO test all applicable data types for 'in' - # 'currency' => [BigDecimal, Float, NilClass], - # 'image' => [Hash, NilClass], - # 'percent' => [Bignum, Fixnum, NilClass], - # 'serializable' => [Hash, Array, NilClass], - # 'system_task_type' => [String, NilClass], - # 'timecode' => [Bignum, Fixnum, NilClass], - # 'footage' => [Bignum, Fixnum, NilClass, String, Float, BigDecimal], - # 'url' => [Hash, NilClass], + # 'currency' => [BigDecimal, Float, NilClass], + # 'image' => [Hash, NilClass], + # 'percent' => [Bignum, Fixnum, NilClass], + # 'serializable' => [Hash, Array, NilClass], + # 'system_task_type' => [String, NilClass], + # 'timecode' => [Bignum, Fixnum, NilClass], + # 'footage' => [Bignum, Fixnum, NilClass, String, Float, BigDecimal], + # 'url' => [Hash, NilClass], - # 'uuid' => [String], + # 'uuid' => [String], def test_in_relation_comma_id(self): """ Test that 'in' relation using commas (old format) works with ids. """ - filters = [['id', 'in', self.project['id'], 99999]] - result = self._id_in_result('Project', filters, self.project['id']) + filters = [["id", "in", self.project["id"], 99999]] + result = self._id_in_result("Project", filters, self.project["id"]) self.assertTrue(result) def test_in_relation_list_id(self): """ Test that 'in' relation using list (new format) works with ids. """ - filters = [['id', 'in', [self.project['id'], 99999]]] - result = self._id_in_result('Project', filters, self.project['id']) + filters = [["id", "in", [self.project["id"], 99999]]] + result = self._id_in_result("Project", filters, self.project["id"]) self.assertTrue(result) def test_not_in_relation_id(self): """ Test that 'not_in' relation using commas (old format) works with ids. """ - filters = [['id', 'not_in', self.project['id'], 99999]] - result = self._id_in_result('Project', filters, self.project['id']) + filters = [["id", "not_in", self.project["id"], 99999]] + result = self._id_in_result("Project", filters, self.project["id"]) self.assertFalse(result) def test_in_relation_comma_text(self): """ Test that 'in' relation using commas (old format) works with text fields. """ - filters = [['name', 'in', self.project['name'], 'fake project name']] - result = self._id_in_result('Project', filters, self.project['id']) + filters = [["name", "in", self.project["name"], "fake project name"]] + result = self._id_in_result("Project", filters, self.project["id"]) self.assertTrue(result) def test_in_relation_list_text(self): """ Test that 'in' relation using list (new format) works with text fields. """ - filters = [['name', 'in', [self.project['name'], 'fake project name']]] - result = self._id_in_result('Project', filters, self.project['id']) + filters = [["name", "in", [self.project["name"], "fake project name"]]] + result = self._id_in_result("Project", filters, self.project["id"]) self.assertTrue(result) def test_not_in_relation_text(self): """ Test that 'not_in' relation using commas (old format) works with ids. """ - filters = [['name', 'not_in', [self.project['name'], 'fake project name']]] - result = self._id_in_result('Project', filters, self.project['id']) + filters = [["name", "not_in", [self.project["name"], "fake project name"]]] + result = self._id_in_result("Project", filters, self.project["id"]) self.assertFalse(result) def test_in_relation_comma_color(self): """ Test that 'in' relation using commas (old format) works with color fields. """ - filters = [['color', 'in', self.task['color'], 'Green'], - ['project', 'is', self.project]] + filters = [ + ["color", "in", self.task["color"], "Green"], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_in_relation_list_color(self): """ Test that 'in' relation using list (new format) works with color fields. """ - filters = [['color', 'in', [self.task['color'], 'Green']], - ['project', 'is', self.project]] + filters = [ + ["color", "in", [self.task["color"], "Green"]], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_not_in_relation_color(self): """ Test that 'not_in' relation using commas (old format) works with color fields. """ - filters = [['color', 'not_in', [self.task['color'], 'Green']], - ['project', 'is', self.project]] + filters = [ + ["color", "not_in", [self.task["color"], "Green"]], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertFalse(result) def test_in_relation_comma_date(self): """ Test that 'in' relation using commas (old format) works with date fields. """ - filters = [['due_date', 'in', self.task['due_date'], '2012-11-25'], - ['project', 'is', self.project]] + filters = [ + ["due_date", "in", self.task["due_date"], "2012-11-25"], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_in_relation_list_date(self): """ Test that 'in' relation using list (new format) works with date fields. """ - filters = [['due_date', 'in', [self.task['due_date'], '2012-11-25']], - ['project', 'is', self.project]] + filters = [ + ["due_date", "in", [self.task["due_date"], "2012-11-25"]], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_not_in_relation_date(self): """ Test that 'not_in' relation using commas (old format) works with date fields. """ - filters = [['due_date', 'not_in', [self.task['due_date'], '2012-11-25']], - ['project', 'is', self.project]] + filters = [ + ["due_date", "not_in", [self.task["due_date"], "2012-11-25"]], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertFalse(result) # TODO add datetime test for in and not_in @@ -1385,12 +1484,16 @@ def test_in_relation_comma_duration(self): """ # we need to get the duration value new_task_keys = list(self.task.keys())[:] - new_task_keys.append('duration') - self.task = self.sg.find_one('Task', [['id', 'is', self.task['id']]], new_task_keys) - filters = [['duration', 'in', self.task['duration']], - ['project', 'is', self.project]] + new_task_keys.append("duration") + self.task = self.sg.find_one( + "Task", [["id", "is", self.task["id"]]], new_task_keys + ) + filters = [ + ["duration", "in", self.task["duration"]], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_in_relation_list_duration(self): @@ -1399,12 +1502,22 @@ def test_in_relation_list_duration(self): """ # we need to get the duration value new_task_keys = list(self.task.keys())[:] - new_task_keys.append('duration') - self.task = self.sg.find_one('Task', [['id', 'is', self.task['id']]], new_task_keys) - filters = [['duration', 'in', [self.task['duration'], ]], - ['project', 'is', self.project]] + new_task_keys.append("duration") + self.task = self.sg.find_one( + "Task", [["id", "is", self.task["id"]]], new_task_keys + ) + filters = [ + [ + "duration", + "in", + [ + self.task["duration"], + ], + ], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_not_in_relation_duration(self): @@ -1413,339 +1526,473 @@ def test_not_in_relation_duration(self): """ # we need to get the duration value new_task_keys = list(self.task.keys())[:] - new_task_keys.append('duration') - self.task = self.sg.find_one('Task', [['id', 'is', self.task['id']]], new_task_keys) + new_task_keys.append("duration") + self.task = self.sg.find_one( + "Task", [["id", "is", self.task["id"]]], new_task_keys + ) - filters = [['duration', 'not_in', [self.task['duration'], ]], - ['project', 'is', self.project]] + filters = [ + [ + "duration", + "not_in", + [ + self.task["duration"], + ], + ], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertFalse(result) def test_in_relation_comma_entity(self): """ Test that 'in' relation using commas (old format) works with entity fields. """ - filters = [['entity', 'in', self.task['entity'], self.asset], - ['project', 'is', self.project]] + filters = [ + ["entity", "in", self.task["entity"], self.asset], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_in_relation_list_entity(self): """ Test that 'in' relation using list (new format) works with entity fields. """ - filters = [['entity', 'in', [self.task['entity'], self.asset]], - ['project', 'is', self.project]] + filters = [ + ["entity", "in", [self.task["entity"], self.asset]], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_not_in_relation_entity(self): """ Test that 'not_in' relation using commas (old format) works with entity fields. """ - filters = [['entity', 'not_in', [self.task['entity'], self.asset]], - ['project', 'is', self.project]] + filters = [ + ["entity", "not_in", [self.task["entity"], self.asset]], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertFalse(result) def test_in_relation_comma_entity_type(self): """ Test that 'in' relation using commas (old format) works with entity_type fields. """ - filters = [['entity_type', 'in', self.step['entity_type'], 'something else']] + filters = [["entity_type", "in", self.step["entity_type"], "something else"]] - result = self._id_in_result('Step', filters, self.step['id']) + result = self._id_in_result("Step", filters, self.step["id"]) self.assertTrue(result) def test_in_relation_list_entity_type(self): """ Test that 'in' relation using list (new format) works with entity_type fields. """ - filters = [['entity_type', 'in', [self.step['entity_type'], 'something else']]] + filters = [["entity_type", "in", [self.step["entity_type"], "something else"]]] - result = self._id_in_result('Step', filters, self.step['id']) + result = self._id_in_result("Step", filters, self.step["id"]) self.assertTrue(result) def test_not_in_relation_entity_type(self): """ Test that 'not_in' relation using commas (old format) works with entity_type fields. """ - filters = [['entity_type', 'not_in', [self.step['entity_type'], 'something else']]] + filters = [ + ["entity_type", "not_in", [self.step["entity_type"], "something else"]] + ] - result = self._id_in_result('Step', filters, self.step['id']) + result = self._id_in_result("Step", filters, self.step["id"]) self.assertFalse(result) def test_in_relation_comma_float(self): """ Test that 'in' relation using commas (old format) works with float fields. """ - filters = [['sg_frames_aspect_ratio', 'in', self.version['sg_frames_aspect_ratio'], 44.0], - ['project', 'is', self.project]] + filters = [ + [ + "sg_frames_aspect_ratio", + "in", + self.version["sg_frames_aspect_ratio"], + 44.0, + ], + ["project", "is", self.project], + ] - result = self._id_in_result('Version', filters, self.version['id']) + result = self._id_in_result("Version", filters, self.version["id"]) self.assertTrue(result) def test_in_relation_list_float(self): """ Test that 'in' relation using list (new format) works with float fields. """ - filters = [['sg_frames_aspect_ratio', 'in', [self.version['sg_frames_aspect_ratio'], 30.0]], - ['project', 'is', self.project]] + filters = [ + [ + "sg_frames_aspect_ratio", + "in", + [self.version["sg_frames_aspect_ratio"], 30.0], + ], + ["project", "is", self.project], + ] - result = self._id_in_result('Version', filters, self.version['id']) + result = self._id_in_result("Version", filters, self.version["id"]) self.assertTrue(result) def test_not_in_relation_float(self): """ Test that 'not_in' relation using commas (old format) works with float fields. """ - filters = [['sg_frames_aspect_ratio', 'not_in', [self.version['sg_frames_aspect_ratio'], 4.4]], - ['project', 'is', self.project]] + filters = [ + [ + "sg_frames_aspect_ratio", + "not_in", + [self.version["sg_frames_aspect_ratio"], 4.4], + ], + ["project", "is", self.project], + ] - result = self._id_in_result('Version', filters, self.version['id']) + result = self._id_in_result("Version", filters, self.version["id"]) self.assertFalse(result) def test_in_relation_comma_list(self): """ Test that 'in' relation using commas (old format) works with list fields. """ - filters = [['frame_count', 'in', self.version['frame_count'], 33], - ['project', 'is', self.project]] + filters = [ + ["frame_count", "in", self.version["frame_count"], 33], + ["project", "is", self.project], + ] - result = self._id_in_result('Version', filters, self.version['id']) + result = self._id_in_result("Version", filters, self.version["id"]) self.assertTrue(result) def test_in_relation_list_list(self): """ Test that 'in' relation using list (new format) works with list fields. """ - filters = [['frame_count', 'in', [self.version['frame_count'], 33]], - ['project', 'is', self.project]] + filters = [ + ["frame_count", "in", [self.version["frame_count"], 33]], + ["project", "is", self.project], + ] - result = self._id_in_result('Version', filters, self.version['id']) + result = self._id_in_result("Version", filters, self.version["id"]) self.assertTrue(result) def test_not_in_relation_list(self): """ Test that 'not_in' relation using commas (old format) works with list fields. """ - filters = [['frame_count', 'not_in', [self.version['frame_count'], 33]], - ['project', 'is', self.project]] + filters = [ + ["frame_count", "not_in", [self.version["frame_count"], 33]], + ["project", "is", self.project], + ] - result = self._id_in_result('Version', filters, self.version['id']) + result = self._id_in_result("Version", filters, self.version["id"]) self.assertFalse(result) def test_in_relation_comma_multi_entity(self): """ Test that 'in' relation using commas (old format) works with multi_entity fields. """ - filters = [['task_assignees', 'in', self.human_user, ], - ['project', 'is', self.project]] + filters = [ + [ + "task_assignees", + "in", + self.human_user, + ], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_in_relation_list_multi_entity(self): """ Test that 'in' relation using list (new format) works with multi_entity fields. """ - filters = [['task_assignees', 'in', [self.human_user, ]], - ['project', 'is', self.project]] + filters = [ + [ + "task_assignees", + "in", + [ + self.human_user, + ], + ], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_not_in_relation_multi_entity(self): """ Test that 'not_in' relation using commas (old format) works with multi_entity fields. """ - filters = [['task_assignees', 'not_in', [self.human_user, ]], - ['project', 'is', self.project]] + filters = [ + [ + "task_assignees", + "not_in", + [ + self.human_user, + ], + ], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertFalse(result) def test_in_relation_comma_number(self): """ Test that 'in' relation using commas (old format) works with number fields. """ - filters = [['frame_count', 'in', self.version['frame_count'], 1], - ['project', 'is', self.project]] + filters = [ + ["frame_count", "in", self.version["frame_count"], 1], + ["project", "is", self.project], + ] - result = self._id_in_result('Version', filters, self.version['id']) + result = self._id_in_result("Version", filters, self.version["id"]) self.assertTrue(result) def test_in_relation_list_number(self): """ Test that 'in' relation using list (new format) works with number fields. """ - filters = [['frame_count', 'in', [self.version['frame_count'], 1]], - ['project', 'is', self.project]] + filters = [ + ["frame_count", "in", [self.version["frame_count"], 1]], + ["project", "is", self.project], + ] - result = self._id_in_result('Version', filters, self.version['id']) + result = self._id_in_result("Version", filters, self.version["id"]) self.assertTrue(result) def test_not_in_relation_number(self): """ Test that 'not_in' relation using commas (old format) works with number fields. """ - filters = [['frame_count', 'not_in', [self.version['frame_count'], 1]], - ['project', 'is', self.project]] + filters = [ + ["frame_count", "not_in", [self.version["frame_count"], 1]], + ["project", "is", self.project], + ] - result = self._id_in_result('Version', filters, self.version['id']) + result = self._id_in_result("Version", filters, self.version["id"]) self.assertFalse(result) def test_in_relation_comma_status_list(self): """ Test that 'in' relation using commas (old format) works with status_list fields. """ - filters = [['sg_status_list', 'in', self.task['sg_status_list'], 'fin'], - ['project', 'is', self.project]] + filters = [ + ["sg_status_list", "in", self.task["sg_status_list"], "fin"], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_in_relation_list_status_list(self): """ Test that 'in' relation using list (new format) works with status_list fields. """ - filters = [['sg_status_list', 'in', [self.task['sg_status_list'], 'fin']], - ['project', 'is', self.project]] + filters = [ + ["sg_status_list", "in", [self.task["sg_status_list"], "fin"]], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertTrue(result) def test_not_in_relation_status_list(self): """ Test that 'not_in' relation using commas (old format) works with status_list fields. """ - filters = [['sg_status_list', 'not_in', [self.task['sg_status_list'], 'fin']], - ['project', 'is', self.project]] + filters = [ + ["sg_status_list", "not_in", [self.task["sg_status_list"], "fin"]], + ["project", "is", self.project], + ] - result = self._id_in_result('Task', filters, self.task['id']) + result = self._id_in_result("Task", filters, self.task["id"]) self.assertFalse(result) def test_in_relation_comma_uuid(self): """ Test that 'in' relation using commas (old format) works with uuid fields. """ - filters = [['uuid', 'in', self.local_storage['uuid'], ]] + filters = [ + [ + "uuid", + "in", + self.local_storage["uuid"], + ] + ] - result = self._id_in_result('LocalStorage', filters, self.local_storage['id']) + result = self._id_in_result("LocalStorage", filters, self.local_storage["id"]) self.assertTrue(result) def test_in_relation_list_uuid(self): """ Test that 'in' relation using list (new format) works with uuid fields. """ - filters = [['uuid', 'in', [self.local_storage['uuid'], ]]] + filters = [ + [ + "uuid", + "in", + [ + self.local_storage["uuid"], + ], + ] + ] - result = self._id_in_result('LocalStorage', filters, self.local_storage['id']) + result = self._id_in_result("LocalStorage", filters, self.local_storage["id"]) self.assertTrue(result) def test_not_in_relation_uuid(self): """ Test that 'not_in' relation using commas (old format) works with uuid fields. """ - filters = [['uuid', 'not_in', [self.local_storage['uuid'], ]]] + filters = [ + [ + "uuid", + "not_in", + [ + self.local_storage["uuid"], + ], + ] + ] - result = self._id_in_result('LocalStorage', filters, self.local_storage['id']) + result = self._id_in_result("LocalStorage", filters, self.local_storage["id"]) self.assertFalse(result) def test_find_in(self): """Test use of 'in' relation with find.""" # id # old comma seperated format - filters = [['id', 'in', self.project['id'], 99999]] - projects = self.sg.find('Project', filters) + filters = [["id", "in", self.project["id"], 99999]] + projects = self.sg.find("Project", filters) # can't use 'any' in py 2.4 match = False for project in projects: - if project['id'] == self.project['id']: + if project["id"] == self.project["id"]: match = True self.assertTrue(match) # new list format - filters = [['id', 'in', [self.project['id'], 99999]]] - projects = self.sg.find('Project', filters) + filters = [["id", "in", [self.project["id"], 99999]]] + projects = self.sg.find("Project", filters) # can't use 'any' in py 2.4 match = False for project in projects: - if project['id'] == self.project['id']: + if project["id"] == self.project["id"]: match = True self.assertTrue(match) # text field - filters = [['name', 'in', [self.project['name'], 'fake project name']]] - projects = self.sg.find('Project', filters) + filters = [["name", "in", [self.project["name"], "fake project name"]]] + projects = self.sg.find("Project", filters) project = projects[0] - self.assertEqual(self.project['id'], project['id']) + self.assertEqual(self.project["id"], project["id"]) def test_unsupported_filters(self): - self.assertRaises(shotgun_api3.Fault, self.sg.find_one, 'Shot', - [['image', 'is_not', [{"type": "Thumbnail", "id": 9}]]]) - self.assertRaises(shotgun_api3.Fault, self.sg.find_one, 'HumanUser', [['password_proxy', 'is_not', [None]]]) - self.assertRaises(shotgun_api3.Fault, self.sg.find_one, 'EventLogEntry', [['meta', 'is_not', [None]]]) - self.assertRaises(shotgun_api3.Fault, self.sg.find_one, 'Revision', [['meta', 'attachment', [None]]]) + self.assertRaises( + shotgun_api3.Fault, + self.sg.find_one, + "Shot", + [["image", "is_not", [{"type": "Thumbnail", "id": 9}]]], + ) + self.assertRaises( + shotgun_api3.Fault, + self.sg.find_one, + "HumanUser", + [["password_proxy", "is_not", [None]]], + ) + self.assertRaises( + shotgun_api3.Fault, + self.sg.find_one, + "EventLogEntry", + [["meta", "is_not", [None]]], + ) + self.assertRaises( + shotgun_api3.Fault, + self.sg.find_one, + "Revision", + [["meta", "attachment", [None]]], + ) def test_zero_is_not_none(self): - '''Test the zero and None are differentiated using "is_not" filter. - Ticket #25127 - ''' + """Test the zero and None are differentiated using "is_not" filter. + Ticket #25127 + """ # Create a number field if it doesn't already exist - num_field = 'sg_api_tests_number_field' - if num_field not in list(self.sg.schema_field_read('Asset').keys()): - self.sg.schema_field_create('Asset', 'number', num_field.replace('sg_', '').replace('_', ' ')) + num_field = "sg_api_tests_number_field" + if num_field not in list(self.sg.schema_field_read("Asset").keys()): + self.sg.schema_field_create( + "Asset", "number", num_field.replace("sg_", "").replace("_", " ") + ) # Set to None - self.sg.update('Asset', self.asset['id'], {num_field: None}) + self.sg.update("Asset", self.asset["id"], {num_field: None}) # Should be filtered out - result = self.sg.find('Asset', [['id', 'is', self.asset['id']], [num_field, 'is_not', None]], [num_field]) + result = self.sg.find( + "Asset", + [["id", "is", self.asset["id"]], [num_field, "is_not", None]], + [num_field], + ) self.assertEqual([], result) # Set it to zero - self.sg.update('Asset', self.asset['id'], {num_field: 0}) + self.sg.update("Asset", self.asset["id"], {num_field: 0}) # Should not be filtered out - result = self.sg.find_one('Asset', [['id', 'is', self.asset['id']], [num_field, 'is_not', None]], [num_field]) + result = self.sg.find_one( + "Asset", + [["id", "is", self.asset["id"]], [num_field, "is_not", None]], + [num_field], + ) self.assertFalse(result is None) # Set it to some other number - self.sg.update('Asset', self.asset['id'], {num_field: 1}) + self.sg.update("Asset", self.asset["id"], {num_field: 1}) # Should not be filtered out - result = self.sg.find_one('Asset', [['id', 'is', self.asset['id']], [num_field, 'is_not', None]], [num_field]) + result = self.sg.find_one( + "Asset", + [["id", "is", self.asset["id"]], [num_field, "is_not", None]], + [num_field], + ) self.assertFalse(result is None) def test_include_archived_projects(self): if self.sg.server_caps.version > (5, 3, 13): # Ticket #25082 - result = self.sg.find_one('Shot', [['id', 'is', self.shot['id']]]) - self.assertEqual(self.shot['id'], result['id']) + result = self.sg.find_one("Shot", [["id", "is", self.shot["id"]]]) + self.assertEqual(self.shot["id"], result["id"]) # archive project - self.sg.update('Project', self.project['id'], {'archived': True}) + self.sg.update("Project", self.project["id"], {"archived": True}) # setting defaults to True, so we should get result - result = self.sg.find_one('Shot', [['id', 'is', self.shot['id']]]) - self.assertEqual(self.shot['id'], result['id']) + result = self.sg.find_one("Shot", [["id", "is", self.shot["id"]]]) + self.assertEqual(self.shot["id"], result["id"]) - result = self.sg.find_one('Shot', [['id', 'is', self.shot['id']]], include_archived_projects=False) + result = self.sg.find_one( + "Shot", [["id", "is", self.shot["id"]]], include_archived_projects=False + ) self.assertEqual(None, result) # unarchive project - self.sg.update('Project', self.project['id'], {'archived': False}) + self.sg.update("Project", self.project["id"], {"archived": False}) class TestFollow(base.LiveTestBase): def test_follow_unfollow(self): - '''Test follow method''' + """Test follow method""" if not self.sg.server_caps.version or self.sg.server_caps.version < (5, 1, 22): return @@ -1758,13 +2005,13 @@ def test_follow_unfollow(self): project=self.project, ) as shot: result = self.sg.follow(human_user, shot) - assert(result['followed']) + assert result["followed"] result = self.sg.unfollow(human_user, shot) - assert(result['unfollowed']) + assert result["unfollowed"] def test_followers(self): - '''Test followers method''' + """Test followers method""" if not self.sg.server_caps.version or self.sg.server_caps.version < (5, 1, 22): return @@ -1777,18 +2024,21 @@ def test_followers(self): project=self.project, ) as shot: result = self.sg.follow(human_user, shot) - assert(result['followed']) + assert result["followed"] result = self.sg.followers(shot) self.assertEqual(1, len(result)) - self.assertEqual(human_user['id'], result[0]['id']) + self.assertEqual(human_user["id"], result[0]["id"]) def test_following(self): - '''Test following method''' + """Test following method""" if not self.sg.server_caps.version or self.sg.server_caps.version < (7, 0, 12): - warnings.warn("Test bypassed because PTR server used does not support this feature.", FutureWarning) + warnings.warn( + "Test bypassed because PTR server used does not support this feature.", + FutureWarning, + ) return with self.gen_entity( @@ -1802,14 +2052,14 @@ def test_following(self): project=self.project, ) as task: result = self.sg.follow(human_user, shot) - assert(result['followed']) + assert result["followed"] result = self.sg.following(human_user) self.assertEqual(1, len(result)) result = self.sg.follow(human_user, task) - assert(result['followed']) + assert result["followed"] result = self.sg.following(human_user) @@ -1819,24 +2069,32 @@ def test_following(self): result = self.sg.following(human_user, entity_type="Shot") self.assertEqual(1, len(result)) - shot_project_id = self.sg.find_one("Shot", - [["id", "is", shot["id"]]], - ["project.Project.id"])["project.Project.id"] - task_project_id = self.sg.find_one("Task", - [["id", "is", task["id"]]], - ["project.Project.id"])["project.Project.id"] + shot_project_id = self.sg.find_one( + "Shot", [["id", "is", shot["id"]]], ["project.Project.id"] + )["project.Project.id"] + task_project_id = self.sg.find_one( + "Task", [["id", "is", task["id"]]], ["project.Project.id"] + )["project.Project.id"] project_count = 2 if shot_project_id == task_project_id else 1 - result = self.sg.following(human_user, project={"type": "Project", "id": shot_project_id}) + result = self.sg.following( + human_user, project={"type": "Project", "id": shot_project_id} + ) self.assertEqual(project_count, len(result)) - result = self.sg.following(human_user, project={"type": "Project", "id": task_project_id}) + result = self.sg.following( + human_user, project={"type": "Project", "id": task_project_id} + ) self.assertEqual(project_count, len(result)) - result = self.sg.following(human_user, - project={"type": "Project", "id": shot_project_id}, - entity_type="Shot") + result = self.sg.following( + human_user, + project={"type": "Project", "id": shot_project_id}, + entity_type="Shot", + ) self.assertEqual(1, len(result)) - result = self.sg.following(human_user, - project={"type": "Project", "id": task_project_id}, - entity_type="Task") + result = self.sg.following( + human_user, + project={"type": "Project", "id": task_project_id}, + entity_type="Task", + ) self.assertEqual(1, len(result)) @@ -1846,9 +2104,9 @@ def setUp(self): super(TestErrors, self).setUp(auth_mode) def test_bad_auth(self): - '''test_bad_auth invalid script name or api key raises fault''' + """test_bad_auth invalid script name or api key raises fault""" server_url = self.config.server_url - script_name = 'not_real_script_name' + script_name = "not_real_script_name" api_key = self.config.api_key login = self.config.human_login password = self.config.human_password @@ -1857,48 +2115,94 @@ def test_bad_auth(self): # Test various combinations of illegal arguments self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url) self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, None, api_key) - self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, script_name, None) - self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, script_name, - api_key, login=login, password=password) + self.assertRaises( + ValueError, shotgun_api3.Shotgun, server_url, script_name, None + ) + self.assertRaises( + ValueError, + shotgun_api3.Shotgun, + server_url, + script_name, + api_key, + login=login, + password=password, + ) self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, login=login) - self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, password=password) - self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, script_name, login=login, password=password) - self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, login=login, auth_token=auth_token) - self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, password=password, auth_token=auth_token) - self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, script_name, login=login, - password=password, auth_token=auth_token) - self.assertRaises(ValueError, shotgun_api3.Shotgun, server_url, api_key=api_key, login=login, - password=password, auth_token=auth_token) + self.assertRaises( + ValueError, shotgun_api3.Shotgun, server_url, password=password + ) + self.assertRaises( + ValueError, + shotgun_api3.Shotgun, + server_url, + script_name, + login=login, + password=password, + ) + self.assertRaises( + ValueError, + shotgun_api3.Shotgun, + server_url, + login=login, + auth_token=auth_token, + ) + self.assertRaises( + ValueError, + shotgun_api3.Shotgun, + server_url, + password=password, + auth_token=auth_token, + ) + self.assertRaises( + ValueError, + shotgun_api3.Shotgun, + server_url, + script_name, + login=login, + password=password, + auth_token=auth_token, + ) + self.assertRaises( + ValueError, + shotgun_api3.Shotgun, + server_url, + api_key=api_key, + login=login, + password=password, + auth_token=auth_token, + ) # Test failed authentications sg = shotgun_api3.Shotgun(server_url, script_name, api_key) - self.assertRaises(shotgun_api3.AuthenticationFault, sg.find_one, 'Shot', []) + self.assertRaises(shotgun_api3.AuthenticationFault, sg.find_one, "Shot", []) script_name = self.config.script_name - api_key = 'notrealapikey' + api_key = "notrealapikey" sg = shotgun_api3.Shotgun(server_url, script_name, api_key) - self.assertRaises(shotgun_api3.AuthenticationFault, sg.find_one, 'Shot', []) + self.assertRaises(shotgun_api3.AuthenticationFault, sg.find_one, "Shot", []) - sg = shotgun_api3.Shotgun(server_url, login=login, password='not a real password') - self.assertRaises(shotgun_api3.AuthenticationFault, sg.find_one, 'Shot', []) + sg = shotgun_api3.Shotgun( + server_url, login=login, password="not a real password" + ) + self.assertRaises(shotgun_api3.AuthenticationFault, sg.find_one, "Shot", []) # This may trigger an account lockdown. Make sure it is not locked anymore. user = self.sg.find_one("HumanUser", [["login", "is", login]]) self.sg.update("HumanUser", user["id"], {"locked_until": None}) - @patch('shotgun_api3.shotgun.Http.request') + @patch("shotgun_api3.shotgun.Http.request") def test_status_not_200(self, mock_request): response = MagicMock(name="response mock", spec=dict) response.status = 300 - response.reason = 'reason' + response.reason = "reason" mock_request.return_value = (response, {}) - self.assertRaises(shotgun_api3.ProtocolError, self.sg.find_one, 'Shot', []) + self.assertRaises(shotgun_api3.ProtocolError, self.sg.find_one, "Shot", []) - @patch('shotgun_api3.shotgun.Http.request') + @patch("shotgun_api3.shotgun.Http.request") def test_make_call_retry(self, mock_request): response = MagicMock(name="response mock", spec=dict) response.status = 200 - response.reason = 'reason' + response.reason = "reason" mock_request.return_value = (response, {}) bak_rpc_attempt_interval = self.sg.config.rpc_attempt_interval @@ -1907,15 +2211,13 @@ def test_make_call_retry(self, mock_request): # First: make the request raise a consistent exception mock_request.side_effect = Exception("not working") with self.assertLogs( - 'shotgun_api3', level='DEBUG' - ) as cm1, self.assertRaises( - Exception - ) as cm2: + "shotgun_api3", level="DEBUG" + ) as cm1, self.assertRaises(Exception) as cm2: self.sg.info() self.assertEqual(cm2.exception.args[0], "not working") log_content = "\n".join(cm1.output) - for i in [1,2]: + for i in [1, 2]: self.assertIn( f"Request failed, attempt {i} of 3. Retrying", log_content, @@ -1929,7 +2231,7 @@ def test_make_call_retry(self, mock_request): # retry works def my_side_effect(*args, **kwargs): try: - if my_side_effect.counter<1: + if my_side_effect.counter < 1: raise Exception("not working") return mock.DEFAULT @@ -1938,7 +2240,7 @@ def my_side_effect(*args, **kwargs): my_side_effect.counter = 0 mock_request.side_effect = my_side_effect - with self.assertLogs('shotgun_api3', level='DEBUG') as cm: + with self.assertLogs("shotgun_api3", level="DEBUG") as cm: self.assertIsInstance( self.sg.info(), dict, @@ -1957,7 +2259,7 @@ def my_side_effect(*args, **kwargs): # Last: raise a SSLEOFError exception - SG-34910 def my_side_effect2(*args, **kwargs): try: - if my_side_effect2.counter<1: + if my_side_effect2.counter < 1: raise ssl.SSLEOFError( "EOF occurred in violation of protocol (_ssl.c:2426)" ) @@ -1969,7 +2271,7 @@ def my_side_effect2(*args, **kwargs): my_side_effect2.counter = 0 mock_request.side_effect = my_side_effect2 - with self.assertLogs('shotgun_api3', level='DEBUG') as cm: + with self.assertLogs("shotgun_api3", level="DEBUG") as cm: self.assertIsInstance( self.sg.info(), dict, @@ -1988,7 +2290,7 @@ def my_side_effect2(*args, **kwargs): finally: self.sg.config.rpc_attempt_interval = bak_rpc_attempt_interval - @patch('shotgun_api3.shotgun.Http.request') + @patch("shotgun_api3.shotgun.Http.request") def test_sha2_error(self, mock_request): # Simulate the exception raised with SHA-2 errors mock_request.side_effect = ShotgunSSLError( @@ -2028,7 +2330,7 @@ def test_sha2_error(self, mock_request): if original_env_val is not None: os.environ["SHOTGUN_FORCE_CERTIFICATE_VALIDATION"] = original_env_val - @patch('shotgun_api3.shotgun.Http.request') + @patch("shotgun_api3.shotgun.Http.request") def test_sha2_error_with_strict(self, mock_request): # Simulate the exception raised with SHA-2 errors mock_request.side_effect = ShotgunSSLError( @@ -2059,17 +2361,17 @@ def test_sha2_error_with_strict(self, mock_request): if original_env_val is not None: os.environ["SHOTGUN_FORCE_CERTIFICATE_VALIDATION"] = original_env_val - @patch.object(urllib.request.OpenerDirector, 'open') + @patch.object(urllib.request.OpenerDirector, "open") def test_sanitized_auth_params(self, mock_open): # Simulate the server blowing up and giving us a 500 error - mock_open.side_effect = urllib.error.HTTPError('url', 500, 'message', {}, None) + mock_open.side_effect = urllib.error.HTTPError("url", 500, "message", {}, None) this_dir, _ = os.path.split(__file__) thumbnail_path = os.path.abspath(os.path.join(this_dir, "sg_logo.jpg")) try: # Try to upload a bogus file - self.sg.upload('Note', 1234, thumbnail_path) + self.sg.upload("Note", 1234, thumbnail_path) except shotgun_api3.ShotgunError as e: self.assertFalse(str(self.api_key) in str(e)) return @@ -2084,20 +2386,39 @@ def test_upload_empty_file(self): """ this_dir, _ = os.path.split(__file__) path = os.path.abspath(os.path.expanduser(os.path.join(this_dir, "empty.txt"))) - self.assertRaises(shotgun_api3.ShotgunError, self.sg.upload, 'Version', 123, path) - self.assertRaises(shotgun_api3.ShotgunError, self.sg.upload_thumbnail, 'Version', 123, path) - self.assertRaises(shotgun_api3.ShotgunError, self.sg.upload_filmstrip_thumbnail, 'Version', - 123, path) + self.assertRaises( + shotgun_api3.ShotgunError, self.sg.upload, "Version", 123, path + ) + self.assertRaises( + shotgun_api3.ShotgunError, self.sg.upload_thumbnail, "Version", 123, path + ) + self.assertRaises( + shotgun_api3.ShotgunError, + self.sg.upload_filmstrip_thumbnail, + "Version", + 123, + path, + ) def test_upload_missing_file(self): """ Test uploading an missing file raises an error. """ path = "/path/to/nowhere/foo.txt" - self.assertRaises(shotgun_api3.ShotgunError, self.sg.upload, 'Version', 123, path) - self.assertRaises(shotgun_api3.ShotgunError, self.sg.upload_thumbnail, 'Version', 123, path) - self.assertRaises(shotgun_api3.ShotgunError, self.sg.upload_filmstrip_thumbnail, 'Version', - 123, path) + self.assertRaises( + shotgun_api3.ShotgunError, self.sg.upload, "Version", 123, path + ) + self.assertRaises( + shotgun_api3.ShotgunError, self.sg.upload_thumbnail, "Version", 123, path + ) + self.assertRaises( + shotgun_api3.ShotgunError, + self.sg.upload_filmstrip_thumbnail, + "Version", + 123, + path, + ) + # def test_malformed_response(self): # # TODO ResponseError @@ -2109,9 +2430,9 @@ def setUp(self): super(TestScriptUserSudoAuth, self).setUp() self.sg.update( - 'HumanUser', - self.human_user['id'], - {'projects': [self.project]}, + "HumanUser", + self.human_user["id"], + {"projects": [self.project]}, ) def test_user_is_creator(self): @@ -2122,30 +2443,32 @@ def test_user_is_creator(self): if not self.sg.server_caps.version or self.sg.server_caps.version < (5, 3, 12): return - x = shotgun_api3.Shotgun(self.config.server_url, - http_proxy=self.config.http_proxy, - sudo_as_login=self.config.human_login, - **self.auth_args) + x = shotgun_api3.Shotgun( + self.config.server_url, + http_proxy=self.config.http_proxy, + sudo_as_login=self.config.human_login, + **self.auth_args, + ) data = { - 'project': self.project, - 'code': 'JohnnyApple_Design01_FaceFinal', - 'description': 'fixed rig per director final notes', - 'sg_status_list': 'na', - 'entity': self.asset, - 'user': self.human_user + "project": self.project, + "code": "JohnnyApple_Design01_FaceFinal", + "description": "fixed rig per director final notes", + "sg_status_list": "na", + "entity": self.asset, + "user": self.human_user, } version = x.create("Version", data, return_fields=["id", "created_by"]) self.assertTrue(isinstance(version, dict)) self.assertTrue("id" in version) self.assertTrue("created_by" in version) - self.assertEqual(self.config.human_name, version['created_by']['name']) + self.assertEqual(self.config.human_name, version["created_by"]["name"]) class TestHumanUserSudoAuth(base.TestBase): def setUp(self): - super(TestHumanUserSudoAuth, self).setUp('HumanUser') + super(TestHumanUserSudoAuth, self).setUp("HumanUser") def test_human_user_sudo_auth_fails(self): """ @@ -2158,18 +2481,20 @@ def test_human_user_sudo_auth_fails(self): if not self.sg.server_caps.version or self.sg.server_caps.version < (5, 3, 12): return - x = shotgun_api3.Shotgun(self.config.server_url, - login=self.config.human_login, - password=self.config.human_password, - http_proxy=self.config.http_proxy, - sudo_as_login="blah") - self.assertRaises(shotgun_api3.Fault, x.find_one, 'Shot', []) + x = shotgun_api3.Shotgun( + self.config.server_url, + login=self.config.human_login, + password=self.config.human_password, + http_proxy=self.config.http_proxy, + sudo_as_login="blah", + ) + self.assertRaises(shotgun_api3.Fault, x.find_one, "Shot", []) expected = "The user does not have permission to 'sudo':" try: - x.find_one('Shot', []) + x.find_one("Shot", []) except shotgun_api3.Fault as e: # py24 exceptions don't have message attr - if hasattr(e, 'message'): + if hasattr(e, "message"): self.assertTrue(e.message.startswith(expected)) else: self.assertTrue(e.args[0].startswith(expected)) @@ -2183,30 +2508,31 @@ class TestHumanUserAuth(base.HumanUserAuthLiveTestBase): def test_humanuser_find(self): """Called find, find_one for known entities as human user""" filters = [] - filters.append(['project', 'is', self.project]) - filters.append(['id', 'is', self.version['id']]) + filters.append(["project", "is", self.project]) + filters.append(["id", "is", self.version["id"]]) - fields = ['id'] + fields = ["id"] versions = self.sg.find("Version", filters, fields=fields) self.assertTrue(isinstance(versions, list)) version = versions[0] self.assertEqual("Version", version["type"]) - self.assertEqual(self.version['id'], version["id"]) + self.assertEqual(self.version["id"], version["id"]) version = self.sg.find_one("Version", filters, fields=fields) self.assertEqual("Version", version["type"]) - self.assertEqual(self.version['id'], version["id"]) + self.assertEqual(self.version["id"], version["id"]) def test_humanuser_upload_thumbnail_for_version(self): """simple upload thumbnail for version test as human user.""" this_dir, _ = os.path.split(__file__) - path = os.path.abspath(os.path.expanduser( - os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) # upload thumbnail - thumb_id = self.sg.upload_thumbnail("Version", self.version['id'], path) + thumb_id = self.sg.upload_thumbnail("Version", self.version["id"], path) self.assertTrue(isinstance(thumb_id, int)) # check result on version @@ -2215,17 +2541,23 @@ def test_humanuser_upload_thumbnail_for_version(self): [["id", "is", self.version["id"]]], ) - self.assertEqual(version_with_thumbnail.get('type'), 'Version') - self.assertEqual(version_with_thumbnail.get('id'), self.version['id']) + self.assertEqual(version_with_thumbnail.get("type"), "Version") + self.assertEqual(version_with_thumbnail.get("id"), self.version["id"]) h = Http(".cache") - thumb_resp, content = h.request(version_with_thumbnail.get('image'), "GET") - self.assertIn(thumb_resp['status'], ['200', '304']) - self.assertIn(thumb_resp['content-type'], ['image/jpeg', 'image/png']) + thumb_resp, content = h.request(version_with_thumbnail.get("image"), "GET") + self.assertIn(thumb_resp["status"], ["200", "304"]) + self.assertIn(thumb_resp["content-type"], ["image/jpeg", "image/png"]) # clear thumbnail - response_clear_thumbnail = self.sg.update("Version", self.version['id'], {'image': None}) - expected_clear_thumbnail = {'id': self.version['id'], 'image': None, 'type': 'Version'} + response_clear_thumbnail = self.sg.update( + "Version", self.version["id"], {"image": None} + ) + expected_clear_thumbnail = { + "id": self.version["id"], + "image": None, + "type": "Version", + } self.assertEqual(expected_clear_thumbnail, response_clear_thumbnail) @@ -2240,21 +2572,21 @@ def test_humanuser_find(self): if self.sg.server_caps.version >= (5, 4, 1): filters = [] - filters.append(['project', 'is', self.project]) - filters.append(['id', 'is', self.version['id']]) + filters.append(["project", "is", self.project]) + filters.append(["id", "is", self.version["id"]]) - fields = ['id'] + fields = ["id"] versions = self.sg.find("Version", filters, fields=fields) self.assertTrue(isinstance(versions, list)) version = versions[0] self.assertEqual("Version", version["type"]) - self.assertEqual(self.version['id'], version["id"]) + self.assertEqual(self.version["id"], version["id"]) version = self.sg.find_one("Version", filters, fields=fields) self.assertEqual("Version", version["type"]) - self.assertEqual(self.version['id'], version["id"]) + self.assertEqual(self.version["id"], version["id"]) def test_humanuser_upload_thumbnail_for_version(self): """simple upload thumbnail for version test as session based token user.""" @@ -2262,11 +2594,12 @@ def test_humanuser_upload_thumbnail_for_version(self): if self.sg.server_caps.version >= (5, 4, 1): this_dir, _ = os.path.split(__file__) - path = os.path.abspath(os.path.expanduser( - os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) # upload thumbnail - thumb_id = self.sg.upload_thumbnail("Version", self.version['id'], path) + thumb_id = self.sg.upload_thumbnail("Version", self.version["id"], path) self.assertTrue(isinstance(thumb_id, int)) # check result on version @@ -2275,17 +2608,23 @@ def test_humanuser_upload_thumbnail_for_version(self): [["id", "is", self.version["id"]]], ) - self.assertEqual(version_with_thumbnail.get('type'), 'Version') - self.assertEqual(version_with_thumbnail.get('id'), self.version['id']) + self.assertEqual(version_with_thumbnail.get("type"), "Version") + self.assertEqual(version_with_thumbnail.get("id"), self.version["id"]) h = Http(".cache") - thumb_resp, content = h.request(version_with_thumbnail.get('image'), "GET") - self.assertIn(thumb_resp['status'], ['200', '304']) - self.assertIn(thumb_resp['content-type'], ['image/jpeg', 'image/png']) + thumb_resp, content = h.request(version_with_thumbnail.get("image"), "GET") + self.assertIn(thumb_resp["status"], ["200", "304"]) + self.assertIn(thumb_resp["content-type"], ["image/jpeg", "image/png"]) # clear thumbnail - response_clear_thumbnail = self.sg.update("Version", self.version['id'], {'image': None}) - expected_clear_thumbnail = {'id': self.version['id'], 'image': None, 'type': 'Version'} + response_clear_thumbnail = self.sg.update( + "Version", self.version["id"], {"image": None} + ) + expected_clear_thumbnail = { + "id": self.version["id"], + "image": None, + "type": "Version", + } self.assertEqual(expected_clear_thumbnail, response_clear_thumbnail) @@ -2295,64 +2634,103 @@ def test_logged_in_user(self): if self.sg.server_caps.version and self.sg.server_caps.version < (5, 3, 20): return - sg = shotgun_api3.Shotgun(self.config.server_url, - login=self.config.human_login, - password=self.config.human_password, - http_proxy=self.config.http_proxy) + sg = shotgun_api3.Shotgun( + self.config.server_url, + login=self.config.human_login, + password=self.config.human_password, + http_proxy=self.config.http_proxy, + ) sg.update_project_last_accessed(self.project) - initial = sg.find_one('Project', [['id', 'is', self.project['id']]], ['last_accessed_by_current_user']) + initial = sg.find_one( + "Project", + [["id", "is", self.project["id"]]], + ["last_accessed_by_current_user"], + ) # Make sure time has elapsed so there is a difference between the two time stamps. time.sleep(2) sg.update_project_last_accessed(self.project) - current = sg.find_one('Project', [['id', 'is', self.project['id']]], ['last_accessed_by_current_user']) + current = sg.find_one( + "Project", + [["id", "is", self.project["id"]]], + ["last_accessed_by_current_user"], + ) self.assertNotEqual(initial, current) # it's possible initial is None - assert(initial['last_accessed_by_current_user'] < current['last_accessed_by_current_user']) + assert ( + initial["last_accessed_by_current_user"] + < current["last_accessed_by_current_user"] + ) def test_pass_in_user(self): if self.sg.server_caps.version and self.sg.server_caps.version < (5, 3, 20): return - sg = shotgun_api3.Shotgun(self.config.server_url, - login=self.config.human_login, - password=self.config.human_password, - http_proxy=self.config.http_proxy) + sg = shotgun_api3.Shotgun( + self.config.server_url, + login=self.config.human_login, + password=self.config.human_password, + http_proxy=self.config.http_proxy, + ) - initial = sg.find_one('Project', [['id', 'is', self.project['id']]], ['last_accessed_by_current_user']) + initial = sg.find_one( + "Project", + [["id", "is", self.project["id"]]], + ["last_accessed_by_current_user"], + ) time.sleep(1) # this instance of the api is not logged in as a user self.sg.update_project_last_accessed(self.project, user=self.human_user) - current = sg.find_one('Project', [['id', 'is', self.project['id']]], ['last_accessed_by_current_user']) + current = sg.find_one( + "Project", + [["id", "is", self.project["id"]]], + ["last_accessed_by_current_user"], + ) self.assertNotEqual(initial, current) # it's possible initial is None if initial: - assert(initial['last_accessed_by_current_user'] < current['last_accessed_by_current_user']) + assert ( + initial["last_accessed_by_current_user"] + < current["last_accessed_by_current_user"] + ) def test_sudo_as_user(self): if self.sg.server_caps.version and self.sg.server_caps.version < (5, 3, 20): return - sg = shotgun_api3.Shotgun(self.config.server_url, - http_proxy=self.config.http_proxy, - sudo_as_login=self.config.human_login, - **self.auth_args) + sg = shotgun_api3.Shotgun( + self.config.server_url, + http_proxy=self.config.http_proxy, + sudo_as_login=self.config.human_login, + **self.auth_args, + ) - initial = sg.find_one('Project', [['id', 'is', self.project['id']]], ['last_accessed_by_current_user']) + initial = sg.find_one( + "Project", + [["id", "is", self.project["id"]]], + ["last_accessed_by_current_user"], + ) time.sleep(1) sg.update_project_last_accessed(self.project) - current = sg.find_one('Project', [['id', 'is', self.project['id']]], ['last_accessed_by_current_user']) + current = sg.find_one( + "Project", + [["id", "is", self.project["id"]]], + ["last_accessed_by_current_user"], + ) self.assertNotEqual(initial, current) # it's possible initial is None if initial: - assert(initial['last_accessed_by_current_user'] < current['last_accessed_by_current_user']) + assert ( + initial["last_accessed_by_current_user"] + < current["last_accessed_by_current_user"] + ) class TestActivityStream(base.LiveTestBase): @@ -2364,36 +2742,51 @@ def setUp(self): super(TestActivityStream, self).setUp() self._prefix = uuid.uuid4().hex - self._shot = self.sg.create("Shot", {"code": "%s activity stream test" % self._prefix, - "project": self.project}) + self._shot = self.sg.create( + "Shot", + {"code": "%s activity stream test" % self._prefix, "project": self.project}, + ) - self._note = self.sg.create("Note", {"content": "Test!", - "project": self.project, - "note_links": [self._shot]}) + self._note = self.sg.create( + "Note", + {"content": "Test!", "project": self.project, "note_links": [self._shot]}, + ) # check that if the created_by is a script user, we want to ensure # that event log generation is enabled for this user. If it has been # disabled, these tests will fail because the activity stream is # connected to events. In this case, print a warning to the user - d = self.sg.find_one("Shot", - [["id", "is", self._shot["id"]]], - ["created_by.ApiUser.generate_event_log_entries"]) + d = self.sg.find_one( + "Shot", + [["id", "is", self._shot["id"]]], + ["created_by.ApiUser.generate_event_log_entries"], + ) if d["created_by.ApiUser.generate_event_log_entries"] is False: # events are turned off! warn the user - print("WARNING! Looks like the script user that is running these " - "tests has got the generate event log entries setting set to " - "off. This will cause the activity stream tests to fail. " - "Please enable event log generation for the script user.") + print( + "WARNING! Looks like the script user that is running these " + "tests has got the generate event log entries setting set to " + "off. This will cause the activity stream tests to fail. " + "Please enable event log generation for the script user." + ) def tearDown(self): batch_data = [] - batch_data.append({"request_type": "delete", - "entity_type": self._note["type"], - "entity_id": self._note["id"]}) - batch_data.append({"request_type": "delete", - "entity_type": self._shot["type"], - "entity_id": self._shot["id"]}) + batch_data.append( + { + "request_type": "delete", + "entity_type": self._note["type"], + "entity_id": self._note["id"], + } + ) + batch_data.append( + { + "request_type": "delete", + "entity_type": self._shot["type"], + "entity_id": self._shot["id"], + } + ) self.sg.batch(batch_data) super(TestActivityStream, self).tearDown() @@ -2406,14 +2799,15 @@ def test_simple(self): if not self.sg.server_caps.version or self.sg.server_caps.version < (6, 2, 0): return - result = self.sg.activity_stream_read(self._shot["type"], - self._shot["id"]) + result = self.sg.activity_stream_read(self._shot["type"], self._shot["id"]) - expected_keys = ["earliest_update_id", - "entity_id", - "entity_type", - "latest_update_id", - "updates"] + expected_keys = [ + "earliest_update_id", + "entity_id", + "entity_type", + "latest_update_id", + "updates", + ] self.assertEqual(set(expected_keys), set(result.keys())) self.assertEqual(len(result["updates"]), 2) @@ -2428,9 +2822,9 @@ def test_limit(self): if not self.sg.server_caps.version or self.sg.server_caps.version < (6, 2, 0): return - result = self.sg.activity_stream_read(self._shot["type"], - self._shot["id"], - limit=1) + result = self.sg.activity_stream_read( + self._shot["type"], self._shot["id"], limit=1 + ) self.assertEqual(len(result["updates"]), 1) self.assertEqual(result["updates"][0]["update_type"], "create") @@ -2444,25 +2838,22 @@ def test_extra_fields(self): if not self.sg.server_caps.version or self.sg.server_caps.version < (6, 2, 0): return - result = self.sg.activity_stream_read(self._shot["type"], - self._shot["id"], - entity_fields={"Shot": ["created_by.HumanUser.image"], - "Note": ["content"]}) + result = self.sg.activity_stream_read( + self._shot["type"], + self._shot["id"], + entity_fields={"Shot": ["created_by.HumanUser.image"], "Note": ["content"]}, + ) self.assertEqual(len(result["updates"]), 2) - self.assertEqual(set(result["updates"][0]["primary_entity"].keys()), - set(["content", - "id", - "name", - "status", - "type"])) + self.assertEqual( + set(result["updates"][0]["primary_entity"].keys()), + set(["content", "id", "name", "status", "type"]), + ) - self.assertEqual(set(result["updates"][1]["primary_entity"].keys()), - set(["created_by.HumanUser.image", - "id", - "name", - "status", - "type"])) + self.assertEqual( + set(result["updates"][1]["primary_entity"].keys()), + set(["created_by.HumanUser.image", "id", "name", "status", "type"]), + ) class TestNoteThreadRead(base.LiveTestBase): @@ -2480,14 +2871,16 @@ def setUp(self): def _check_note(self, data, note_id, additional_fields): # check the expected fields - expected_fields = set(["content", "created_at", "created_by", "id", "type"] + additional_fields) + expected_fields = set( + ["content", "created_at", "created_by", "id", "type"] + additional_fields + ) self.assertEqual(expected_fields, set(data.keys())) # check that the data matches the data we get from a find call - note_data = self.sg.find_one("Note", - [["id", "is", note_id]], - list(expected_fields)) + note_data = self.sg.find_one( + "Note", [["id", "is", note_id]], list(expected_fields) + ) # remove images before comparison if ( "created_by.HumanUser.image" in note_data @@ -2500,13 +2893,15 @@ def _check_note(self, data, note_id, additional_fields): def _check_reply(self, data, reply_id, additional_fields): # check the expected fields - expected_fields = set(["content", "created_at", "user", "id", "type"] + additional_fields) + expected_fields = set( + ["content", "created_at", "user", "id", "type"] + additional_fields + ) self.assertEqual(expected_fields, set(data.keys())) # check that the data matches the data we get from a find call - reply_data = self.sg.find_one("Reply", - [["id", "is", reply_id]], - list(expected_fields)) + reply_data = self.sg.find_one( + "Reply", [["id", "is", reply_id]], list(expected_fields) + ) # the reply stream adds an image to the user fields in order # to include thumbnails for users, so remove this before we compare @@ -2517,13 +2912,15 @@ def _check_reply(self, data, reply_id, additional_fields): def _check_attachment(self, data, attachment_id, additional_fields): # check the expected fields - expected_fields = set(["created_at", "created_by", "id", "type"] + additional_fields) + expected_fields = set( + ["created_at", "created_by", "id", "type"] + additional_fields + ) self.assertEqual(expected_fields, set(data.keys())) # check that the data matches the data we get from a find call - attachment_data = self.sg.find_one("Attachment", - [["id", "is", attachment_id]], - list(expected_fields)) + attachment_data = self.sg.find_one( + "Attachment", [["id", "is", attachment_id]], list(expected_fields) + ) # remove images before comparison if "this_file" in attachment_data and "this_file" in data: @@ -2551,21 +2948,25 @@ def test_simple(self): # reply. For this, make sure that there is a thumbnail # associated with the current user - d = self.sg.find_one("Note", - [["id", "is", note["id"]]], - ["created_by", f"created_by.{user_entity}.image"]) + d = self.sg.find_one( + "Note", + [["id", "is", note["id"]]], + ["created_by", f"created_by.{user_entity}.image"], + ) current_thumbnail = d[f"created_by.{user_entity}.image"] if current_thumbnail is None: # upload thumbnail - self.sg.upload_thumbnail(user_entity, - d["created_by"]["id"], - self._thumbnail_path) + self.sg.upload_thumbnail( + user_entity, d["created_by"]["id"], self._thumbnail_path + ) - d = self.sg.find_one("Note", - [["id", "is", note["id"]]], - ["created_by", f"created_by.{user_entity}.image"]) + d = self.sg.find_one( + "Note", + [["id", "is", note["id"]]], + ["created_by", f"created_by.{user_entity}.image"], + ) current_thumbnail = d[f"created_by.{user_entity}.image"] @@ -2587,8 +2988,10 @@ def test_simple(self): reply_thumb = result[1]["user"]["image"] url_obj_a = urllib.parse.urlparse(current_thumbnail) url_obj_b = urllib.parse.urlparse(reply_thumb) - self.assertEqual("%s/%s" % (url_obj_a.netloc, url_obj_a.path), - "%s/%s" % (url_obj_b.netloc, url_obj_b.path),) + self.assertEqual( + "%s/%s" % (url_obj_a.netloc, url_obj_a.path), + "%s/%s" % (url_obj_b.netloc, url_obj_b.path), + ) # and check ther rest of the data self._check_note(result[0], note["id"], additional_fields=[]) @@ -2615,18 +3018,25 @@ def test_complex(self): return additional_fields = { - "Note": ["created_by.HumanUser.image", - "addressings_to", - "playlist", - "user"], + "Note": [ + "created_by.HumanUser.image", + "addressings_to", + "playlist", + "user", + ], "Reply": ["content"], - "Attachment": ["this_file"] + "Attachment": ["this_file"], } # create note - note = self.sg.create("Note", {"content": "Test!", - "project": self.project, - "addressings_to": [self.human_user]}) + note = self.sg.create( + "Note", + { + "content": "Test!", + "project": self.project, + "addressings_to": [self.human_user], + }, + ) # get thread result = self.sg.note_thread_read(note["id"], additional_fields) @@ -2652,7 +3062,9 @@ def test_complex(self): self._check_note(result[0], note["id"], additional_fields["Note"]) self._check_reply(result[1], reply["id"], additional_fields["Reply"]) - self._check_attachment(result[2], attachment_id, additional_fields["Attachment"]) + self._check_attachment( + result[2], attachment_id, additional_fields["Attachment"] + ) class TestTextSearch(base.LiveTestBase): @@ -2668,14 +3080,16 @@ def setUp(self): batch_data = [] for i in range(5): - data = {"code": "%s Text Search %s" % (self._prefix, i), - "project": self.project} - batch_data.append({"request_type": "create", - "entity_type": "Shot", - "data": data}) - batch_data.append({"request_type": "create", - "entity_type": "Asset", - "data": data}) + data = { + "code": "%s Text Search %s" % (self._prefix, i), + "project": self.project, + } + batch_data.append( + {"request_type": "create", "entity_type": "Shot", "data": data} + ) + batch_data.append( + {"request_type": "create", "entity_type": "Asset", "data": data} + ) data = self.sg.batch(batch_data) self._shot_ids = [x["id"] for x in data if x["type"] == "Shot"] @@ -2686,13 +3100,17 @@ def tearDown(self): # clean up batch_data = [] for shot_id in self._shot_ids: - batch_data.append({"request_type": "delete", - "entity_type": "Shot", - "entity_id": shot_id}) + batch_data.append( + {"request_type": "delete", "entity_type": "Shot", "entity_id": shot_id} + ) for asset_id in self._asset_ids: - batch_data.append({"request_type": "delete", - "entity_type": "Asset", - "entity_id": asset_id}) + batch_data.append( + { + "request_type": "delete", + "entity_type": "Asset", + "entity_id": asset_id, + } + ) self.sg.batch(batch_data) super(TestTextSearch, self).tearDown() @@ -2724,7 +3142,9 @@ def test_limit(self): if not self.sg.server_caps.version or self.sg.server_caps.version < (6, 2, 0): return - result = self.sg.text_search("%s Text Search" % self._prefix, {"Shot": []}, limit=3) + result = self.sg.text_search( + "%s Text Search" % self._prefix, {"Shot": []}, limit=3 + ) matches = result["matches"] self.assertEqual(len(matches), 3) @@ -2735,8 +3155,9 @@ def test_entity_filter(self): if not self.sg.server_caps.version or self.sg.server_caps.version < (6, 2, 0): return - result = self.sg.text_search("%s Text Search" % self._prefix, - {"Shot": [], "Asset": []}) + result = self.sg.text_search( + "%s Text Search" % self._prefix, {"Shot": [], "Asset": []} + ) matches = result["matches"] @@ -2750,12 +3171,15 @@ def test_complex_entity_filter(self): if not self.sg.server_caps.version or self.sg.server_caps.version < (6, 2, 0): return - result = self.sg.text_search("%s Text Search" % self._prefix, - { - "Shot": [["code", "ends_with", "3"]], - "Asset": [{"filter_operator": "any", - "filters": [["code", "ends_with", "4"]]}] - }) + result = self.sg.text_search( + "%s Text Search" % self._prefix, + { + "Shot": [["code", "ends_with", "3"]], + "Asset": [ + {"filter_operator": "any", "filters": [["code", "ends_with", "4"]]} + ], + }, + ) matches = result["matches"] @@ -2775,131 +3199,175 @@ class TestReadAdditionalFilterPresets(base.LiveTestBase): def test_simple_case(self): if self.sg_version < (7, 0, 0): - warnings.warn("Test bypassed because PTR server used does not support this feature.", FutureWarning) + warnings.warn( + "Test bypassed because PTR server used does not support this feature.", + FutureWarning, + ) return - filters = [ - ["project", "is", self.project], - ["id", "is", self.version["id"]] - ] + filters = [["project", "is", self.project], ["id", "is", self.version["id"]]] fields = ["id"] - additional_filters = [{"preset_name": "LATEST", "latest_by": "ENTITIES_CREATED_AT"}] + additional_filters = [ + {"preset_name": "LATEST", "latest_by": "ENTITIES_CREATED_AT"} + ] - versions = self.sg.find("Version", filters, fields=fields, additional_filter_presets=additional_filters) + versions = self.sg.find( + "Version", + filters, + fields=fields, + additional_filter_presets=additional_filters, + ) version = versions[0] self.assertEqual("Version", version["type"]) self.assertEqual(self.version["id"], version["id"]) def test_find_one(self): if self.sg_version < (7, 0, 0): - warnings.warn("Test bypassed because PTR server used does not support this feature.", FutureWarning) + warnings.warn( + "Test bypassed because PTR server used does not support this feature.", + FutureWarning, + ) return - filters = [ - ["project", "is", self.project], - ["id", "is", self.version["id"]] - ] + filters = [["project", "is", self.project], ["id", "is", self.version["id"]]] fields = ["id"] - additional_filters = [{"preset_name": "LATEST", "latest_by": "ENTITIES_CREATED_AT"}] + additional_filters = [ + {"preset_name": "LATEST", "latest_by": "ENTITIES_CREATED_AT"} + ] - version = self.sg.find_one("Version", filters, fields=fields, additional_filter_presets=additional_filters) + version = self.sg.find_one( + "Version", + filters, + fields=fields, + additional_filter_presets=additional_filters, + ) self.assertEqual("Version", version["type"]) self.assertEqual(self.version["id"], version["id"]) def test_filter_with_no_name(self): if self.sg_version < (7, 0, 0): - warnings.warn("Test bypassed because PTR server used does not support this feature.", FutureWarning) + warnings.warn( + "Test bypassed because PTR server used does not support this feature.", + FutureWarning, + ) return - filters = [ - ["project", "is", self.project], - ["id", "is", self.version["id"]] - ] + filters = [["project", "is", self.project], ["id", "is", self.version["id"]]] fields = ["id"] additional_filters = [{}] - self.assertRaises(shotgun_api3.Fault, - self.sg.find, - "Version", filters, fields=fields, additional_filter_presets=additional_filters) + self.assertRaises( + shotgun_api3.Fault, + self.sg.find, + "Version", + filters, + fields=fields, + additional_filter_presets=additional_filters, + ) def test_invalid_filter(self): if self.sg_version < (7, 0, 0): - warnings.warn("Test bypassed because PTR server used does not support this feature.", FutureWarning) + warnings.warn( + "Test bypassed because PTR server used does not support this feature.", + FutureWarning, + ) return - filters = [ - ["project", "is", self.project], - ["id", "is", self.version["id"]] - ] + filters = [["project", "is", self.project], ["id", "is", self.version["id"]]] fields = ["id"] additional_filters = [{"preset_name": "BAD_FILTER"}] - self.assertRaises(shotgun_api3.Fault, - self.sg.find, - "Version", filters, fields=fields, additional_filter_presets=additional_filters) + self.assertRaises( + shotgun_api3.Fault, + self.sg.find, + "Version", + filters, + fields=fields, + additional_filter_presets=additional_filters, + ) def test_filter_not_iterable(self): if self.sg_version < (7, 0, 0): - warnings.warn("Test bypassed because PTR server used does not support this feature.", FutureWarning) + warnings.warn( + "Test bypassed because PTR server used does not support this feature.", + FutureWarning, + ) return - filters = [ - ["project", "is", self.project], - ["id", "is", self.version["id"]] - ] + filters = [["project", "is", self.project], ["id", "is", self.version["id"]]] fields = ["id"] additional_filters = 3 - self.assertRaises(shotgun_api3.Fault, - self.sg.find, - "Version", filters, fields=fields, additional_filter_presets=additional_filters) + self.assertRaises( + shotgun_api3.Fault, + self.sg.find, + "Version", + filters, + fields=fields, + additional_filter_presets=additional_filters, + ) def test_filter_not_list_of_iterable(self): if self.sg_version < (7, 0, 0): - warnings.warn("Test bypassed because PTR server used does not support this feature.", FutureWarning) + warnings.warn( + "Test bypassed because PTR server used does not support this feature.", + FutureWarning, + ) return - filters = [ - ["project", "is", self.project], - ["id", "is", self.version["id"]] - ] + filters = [["project", "is", self.project], ["id", "is", self.version["id"]]] fields = ["id"] additional_filters = [3] - self.assertRaises(shotgun_api3.Fault, - self.sg.find, - "Version", filters, fields=fields, additional_filter_presets=additional_filters) + self.assertRaises( + shotgun_api3.Fault, + self.sg.find, + "Version", + filters, + fields=fields, + additional_filter_presets=additional_filters, + ) def test_multiple_latest_filters(self): if self.sg_version < (7, 0, 0): - warnings.warn("Test bypassed because PTR server used does not support this feature.", FutureWarning) + warnings.warn( + "Test bypassed because PTR server used does not support this feature.", + FutureWarning, + ) return - filters = [ - ["project", "is", self.project], - ["id", "is", self.version["id"]] - ] + filters = [["project", "is", self.project], ["id", "is", self.version["id"]]] fields = ["id"] - additional_filters = ({"preset_name": "LATEST", "latest_by": "ENTITY_CREATED_AT"}, - {"preset_name": "LATEST", "latest_by": "PIPELINE_STEP_NUMBER_AND_ENTITIES_CREATED_AT"}) + additional_filters = ( + {"preset_name": "LATEST", "latest_by": "ENTITY_CREATED_AT"}, + { + "preset_name": "LATEST", + "latest_by": "PIPELINE_STEP_NUMBER_AND_ENTITIES_CREATED_AT", + }, + ) - self.assertRaises(shotgun_api3.Fault, - self.sg.find, - "Version", filters, fields=fields, additional_filter_presets=additional_filters) + self.assertRaises( + shotgun_api3.Fault, + self.sg.find, + "Version", + filters, + fields=fields, + additional_filter_presets=additional_filters, + ) def test_modify_visibility(self): """ @@ -2908,7 +3376,10 @@ def test_modify_visibility(self): # If the version of Shotgun is too old, do not run this test. # TODO: Update this with the real version number once the feature is released. if self.sg_version < (8, 5, 0): - warnings.warn("Test bypassed because PTR server used does not support this feature.", FutureWarning) + warnings.warn( + "Test bypassed because PTR server used does not support this feature.", + FutureWarning, + ) return field_display_name = "Project Visibility Test" @@ -2920,7 +3391,9 @@ def test_modify_visibility(self): self.sg.schema_field_create("Asset", "text", "Project Visibility Test") # Grab any two projects that we can use for toggling the visible property with. - projects = self.sg.find("Project", [], order=[{"field_name": "id", "direction": "asc"}]) + projects = self.sg.find( + "Project", [], order=[{"field_name": "id", "direction": "asc"}] + ) project_1 = projects[0] project_2 = projects[1] @@ -2929,21 +3402,27 @@ def test_modify_visibility(self): self.sg.schema_field_update("Asset", field_name, {"visible": True}, project_1) self.assertEqual( {"value": True, "editable": True}, - self.sg.schema_field_read("Asset", field_name, project_1)[field_name]["visible"] + self.sg.schema_field_read("Asset", field_name, project_1)[field_name][ + "visible" + ], ) self.sg.schema_field_update("Asset", field_name, {"visible": True}, project_2) self.assertEqual( {"value": True, "editable": True}, - self.sg.schema_field_read("Asset", field_name, project_2)[field_name]["visible"] + self.sg.schema_field_read("Asset", field_name, project_2)[field_name][ + "visible" + ], ) # Built-in fields should remain not editable. - self.assertFalse(self.sg.schema_field_read("Asset", "code")["code"]["visible"]["editable"]) + self.assertFalse( + self.sg.schema_field_read("Asset", "code")["code"]["visible"]["editable"] + ) # Custom fields should be editable self.assertEqual( {"value": True, "editable": True}, - self.sg.schema_field_read("Asset", field_name)[field_name]["visible"] + self.sg.schema_field_read("Asset", field_name)[field_name]["visible"], ) # Hide the field on project 1 @@ -2951,20 +3430,26 @@ def test_modify_visibility(self): # It should not be visible anymore. self.assertEqual( {"value": False, "editable": True}, - self.sg.schema_field_read("Asset", field_name, project_1)[field_name]["visible"] + self.sg.schema_field_read("Asset", field_name, project_1)[field_name][ + "visible" + ], ) # The field should be visible on the second project. self.assertEqual( {"value": True, "editable": True}, - self.sg.schema_field_read("Asset", field_name, project_2)[field_name]["visible"] + self.sg.schema_field_read("Asset", field_name, project_2)[field_name][ + "visible" + ], ) # Restore the visibility on the field. self.sg.schema_field_update("Asset", field_name, {"visible": True}, project_1) self.assertEqual( {"value": True, "editable": True}, - self.sg.schema_field_read("Asset", field_name, project_1)[field_name]["visible"] + self.sg.schema_field_read("Asset", field_name, project_1)[field_name][ + "visible" + ], ) @@ -2983,6 +3468,7 @@ def test_import_httplib(self): proxied to allow this. """ from shotgun_api3.lib import httplib2 + # Ensure that Http object is available. This is a good indication that # the httplib2 module contents are importable. self.assertTrue(hasattr(httplib2, "Http")) @@ -3003,6 +3489,7 @@ def test_import_httplib(self): # import -- this is a good indication that external httplib2 imports # from shotgun_api3 will work as expected. from shotgun_api3.lib.httplib2 import socks + self.assertTrue(isinstance(socks, types.ModuleType)) # Make sure that objects in socks are available as expected self.assertTrue(hasattr(socks, "HTTPError")) @@ -3025,7 +3512,7 @@ def _get_path(url): """ # url_parse returns native objects for older python versions (2.4) if isinstance(url, dict): - return url.get('path') + return url.get("path") elif isinstance(url, tuple): # 3rd component is the path return url[2] @@ -3033,5 +3520,5 @@ def _get_path(url): return url.path -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_client.py b/tests/test_client.py index dc3fa3ec5..e29c6158d 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -18,6 +18,7 @@ from shotgun_api3.lib.six.moves import urllib from shotgun_api3.lib import six, sgutils + try: import simplejson as json except ImportError: @@ -48,12 +49,12 @@ def b64encode(val): class TestShotgunClient(base.MockTestBase): - '''Test case for shotgun api with server interactions mocked.''' + """Test case for shotgun api with server interactions mocked.""" def setUp(self): super(TestShotgunClient, self).setUp() # get domain and uri scheme - match = re.search('(https?://)(.*)', self.server_url) + match = re.search("(https?://)(.*)", self.server_url) self.uri_prefix = match.group(1) self.domain = match.group(2) # always want the mock on @@ -75,8 +76,8 @@ def test_detect_client_caps(self): # todo test for version string (eg. "1.2.3ng") or "unknown" def test_detect_server_caps(self): - '''test_detect_server_caps tests that ServerCapabilities object is made - with appropriate settings for given server version.''' + """test_detect_server_caps tests that ServerCapabilities object is made + with appropriate settings for given server version.""" # has paging is tested else where. server_info = {"version": [9, 9, 9]} self._mock_http(server_info) @@ -94,12 +95,14 @@ def test_detect_server_caps(self): self.assertTrue(self.sg.server_caps.is_dev) def test_server_version_json(self): - '''test_server_version_json tests expected versions for json support.''' + """test_server_version_json tests expected versions for json support.""" sc = ServerCapabilities("foo", {"version": (2, 4, 0)}) sc.version = (2, 3, 99) self.assertRaises(api.ShotgunError, sc._ensure_json_supported) - self.assertRaises(api.ShotgunError, ServerCapabilities, "foo", {"version": (2, 2, 0)}) + self.assertRaises( + api.ShotgunError, ServerCapabilities, "foo", {"version": (2, 2, 0)} + ) sc.version = (0, 0, 0) self.assertRaises(api.ShotgunError, sc._ensure_json_supported) @@ -146,18 +149,20 @@ def auth_args(): self.assertRaises(api.Fault, self.sg.delete, "FakeType", 1) self.assertTrue("session_uuid" not in auth_args()) - my_uuid = '5a1d49b0-0c69-11e0-a24c-003048d17544' + my_uuid = "5a1d49b0-0c69-11e0-a24c-003048d17544" self.sg.set_session_uuid(my_uuid) self.assertRaises(api.Fault, self.sg.delete, "FakeType", 1) self.assertEqual(my_uuid, auth_args()["session_uuid"]) def test_url(self): """Server url is parsed correctly""" - login = self.human_user['login'] + login = self.human_user["login"] password = self.human_password self.assertRaises(ValueError, api.Shotgun, None, None, None, connect=False) - self.assertRaises(ValueError, api.Shotgun, "file://foo.com", None, None, connect=False) + self.assertRaises( + ValueError, api.Shotgun, "file://foo.com", None, None, connect=False + ) self.assertEqual("/api3/json", self.sg.config.api_path) @@ -174,7 +179,7 @@ def test_b64encode(self): login = "thelogin" password = "%thepassw0r#$" login_password = "%s:%s" % (login, password) - expected = 'dGhlbG9naW46JXRoZXBhc3N3MHIjJA==' + expected = "dGhlbG9naW46JXRoZXBhc3N3MHIjJA==" result = b64encode(urllib.parse.unquote(login_password)).strip() self.assertEqual(expected, result) @@ -192,8 +197,7 @@ def test_read_config(self): def test_split_url(self): """Validate that url parts are properly extracted.""" - sg = api.Shotgun("https://ci.shotgunstudio.com", - "foo", "bar", connect=False) + sg = api.Shotgun("https://ci.shotgunstudio.com", "foo", "bar", connect=False) base_url = "https://ci.shotgunstudio.com" expected_server = "ci.shotgunstudio.com" @@ -225,7 +229,7 @@ def test_split_url(self): def test_authorization(self): """Authorization passed to server""" - login = self.human_user['login'] + login = self.human_user["login"] password = self.human_password login_password = "%s:%s" % (login, password) # login:password@domain @@ -233,7 +237,7 @@ def test_authorization(self): self.sg = api.Shotgun(auth_url, "foo", "bar", connect=False) self._setup_mock() - self._mock_http({'version': [2, 4, 0, u'Dev']}) + self._mock_http({"version": [2, 4, 0, "Dev"]}) self.sg.info() @@ -279,7 +283,7 @@ def test_user_agent(self): client_caps.py_version, client_caps.platform.capitalize(), client_caps.ssl_version, - ssl_validate_lut[config.no_ssl_validation] + ssl_validate_lut[config.no_ssl_validation], ) self.assertEqual(expected, headers.get("user-agent")) @@ -293,7 +297,7 @@ def test_user_agent(self): client_caps.py_version, client_caps.platform.capitalize(), client_caps.ssl_version, - ssl_validate_lut[config.no_ssl_validation] + ssl_validate_lut[config.no_ssl_validation], ) self.assertEqual(expected, headers.get("user-agent")) @@ -307,7 +311,7 @@ def test_user_agent(self): client_caps.py_version, client_caps.platform.capitalize(), client_caps.ssl_version, - ssl_validate_lut[config.no_ssl_validation] + ssl_validate_lut[config.no_ssl_validation], ) self.assertEqual(expected, headers.get("user-agent")) @@ -327,14 +331,15 @@ def test_network_retry(self): self.assertRaises(httplib2.HttpLib2Error, self.sg.info) self.assertTrue( self.sg.config.max_rpc_attempts == self.sg._http_request.call_count, - "Call is repeated") + "Call is repeated", + ) # Ensure that sleep was called with the retry interval between each attempt attempt_interval = self.sg.config.rpc_attempt_interval / 1000.0 calls = [mock.callargs(((attempt_interval,), {}))] - calls *= (self.sg.config.max_rpc_attempts - 1) + calls *= self.sg.config.max_rpc_attempts - 1 self.assertTrue( mock_sleep.call_args_list == calls, - "Call is repeated at correct interval." + "Call is repeated at correct interval.", ) def test_set_retry_interval(self): @@ -342,12 +347,15 @@ def test_set_retry_interval(self): original_env_val = os.environ.pop("SHOTGUN_API_RETRY_INTERVAL", None) try: + def run_interval_test(expected_interval, interval_property=None): - self.sg = api.Shotgun(self.config.server_url, - self.config.script_name, - self.config.api_key, - http_proxy=self.config.http_proxy, - connect=self.connect) + self.sg = api.Shotgun( + self.config.server_url, + self.config.script_name, + self.config.api_key, + http_proxy=self.config.http_proxy, + connect=self.connect, + ) self._setup_mock() if interval_property: # if a value was provided for interval_property, set the @@ -424,7 +432,10 @@ def test_call_rpc(self): # Test unicode mixed with utf-8 as reported in Ticket #17959 d = {"results": ["foo", "bar"]} - a = {"utf_str": "\xe2\x88\x9a", "unicode_str": sgutils.ensure_text("\xe2\x88\x9a")} + a = { + "utf_str": "\xe2\x88\x9a", + "unicode_str": sgutils.ensure_text("\xe2\x88\x9a"), + } self._mock_http(d) rv = self.sg._call_rpc("list", a) expected = "rpc response with list result" @@ -460,11 +471,14 @@ def test_upload_s3_503(self): """ this_dir, _ = os.path.split(__file__) storage_url = "http://foo.com/" - path = os.path.abspath(os.path.expanduser( - os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) # Expected HTTPError exception error message - expected = "The server is currently down or to busy to reply." \ - "Please try again later." + expected = ( + "The server is currently down or to busy to reply." + "Please try again later." + ) # Test the Internal function that is used to upload each # data part in the context of multi-part uploads to S3, we @@ -474,8 +488,9 @@ def test_upload_s3_503(self): # Test the max retries attempt self.assertTrue( self.sg.MAX_ATTEMPTS == self.sg._make_upload_request.call_count, - f"Call is repeated up to {self.sg.MAX_ATTEMPTS} times") - + f"Call is repeated up to {self.sg.MAX_ATTEMPTS} times", + ) + def test_upload_s3_500(self): """ Test 500 response is retried when uploading to S3. @@ -483,11 +498,14 @@ def test_upload_s3_500(self): self._setup_mock(s3_status_code_error=500) this_dir, _ = os.path.split(__file__) storage_url = "http://foo.com/" - path = os.path.abspath(os.path.expanduser( - os.path.join(this_dir, "sg_logo.jpg"))) + path = os.path.abspath( + os.path.expanduser(os.path.join(this_dir, "sg_logo.jpg")) + ) # Expected HTTPError exception error message - expected = "The server is currently down or to busy to reply." \ - "Please try again later." + expected = ( + "The server is currently down or to busy to reply." + "Please try again later." + ) # Test the Internal function that is used to upload each # data part in the context of multi-part uploads to S3, we @@ -497,8 +515,9 @@ def test_upload_s3_500(self): # Test the max retries attempt self.assertTrue( self.sg.MAX_ATTEMPTS == self.sg._make_upload_request.call_count, - f"Call is repeated up to {self.sg.MAX_ATTEMPTS} times") - + f"Call is repeated up to {self.sg.MAX_ATTEMPTS} times", + ) + def test_upload_s3_urlerror__get_attachment_upload_info(self): """ Test URLError response is retried when invoking _send_form @@ -520,7 +539,7 @@ def test_upload_s3_urlerror__get_attachment_upload_info(self): self.assertEqual( self.sg.MAX_ATTEMPTS, mock_opener.return_value.open.call_count, - f"Call is repeated up to {self.sg.MAX_ATTEMPTS} times" + f"Call is repeated up to {self.sg.MAX_ATTEMPTS} times", ) # Test the exception message @@ -554,7 +573,7 @@ def test_upload_s3_urlerror__upload_to_storage(self): self.assertEqual( self.sg.MAX_ATTEMPTS, self.sg._make_upload_request.call_count, - f"Call is repeated up to {self.sg.MAX_ATTEMPTS} times" + f"Call is repeated up to {self.sg.MAX_ATTEMPTS} times", ) # Test the exception message @@ -566,19 +585,15 @@ def test_transform_data(self): timestamp = time.time() # microseconds will be last during transforms now = datetime.datetime.fromtimestamp(timestamp).replace( - microsecond=0, tzinfo=SG_TIMEZONE.local) - utc_now = datetime.datetime.utcfromtimestamp(timestamp).replace( - microsecond=0) - local = { - "date": now.strftime('%Y-%m-%d'), - "datetime": now, - "time": now.time() - } + microsecond=0, tzinfo=SG_TIMEZONE.local + ) + utc_now = datetime.datetime.utcfromtimestamp(timestamp).replace(microsecond=0) + local = {"date": now.strftime("%Y-%m-%d"), "datetime": now, "time": now.time()} # date will still be the local date, because they are not transformed utc = { - "date": now.strftime('%Y-%m-%d'), + "date": now.strftime("%Y-%m-%d"), "datetime": utc_now, - "time": utc_now.time() + "time": utc_now.time(), } def _datetime(s, f): @@ -587,7 +602,7 @@ def _datetime(s, f): def assert_wire(wire, match): self.assertTrue(isinstance(wire["date"], str)) d = _datetime(wire["date"], "%Y-%m-%d").date() - d = wire['date'] + d = wire["date"] self.assertEqual(match["date"], d) self.assertTrue(isinstance(wire["datetime"], str)) d = _datetime(wire["datetime"], "%Y-%m-%dT%H:%M:%SZ") @@ -619,33 +634,35 @@ def assert_wire(wire, match): def test_encode_payload(self): """Request body is encoded as JSON""" - d = {"this is ": u"my data \u00E0"} + d = {"this is ": "my data \u00e0"} j = self.sg._encode_payload(d) self.assertTrue(isinstance(j, bytes)) - d = { - "this is ": u"my data" - } + d = {"this is ": "my data"} j = self.sg._encode_payload(d) self.assertTrue(isinstance(j, bytes)) def test_decode_response_ascii(self): - self._assert_decode_resonse(True, sgutils.ensure_str(u"my data \u00E0", encoding='utf8')) + self._assert_decode_resonse( + True, sgutils.ensure_str("my data \u00e0", encoding="utf8") + ) def test_decode_response_unicode(self): - self._assert_decode_resonse(False, u"my data \u00E0") + self._assert_decode_resonse(False, "my data \u00e0") def _assert_decode_resonse(self, ensure_ascii, data): """HTTP Response is decoded as JSON or text""" headers = {"content-type": "application/json;charset=utf-8"} d = {"this is ": data} - sg = api.Shotgun(self.config.server_url, - self.config.script_name, - self.config.api_key, - http_proxy=self.config.http_proxy, - ensure_ascii=ensure_ascii, - connect=False) + sg = api.Shotgun( + self.config.server_url, + self.config.script_name, + self.config.api_key, + http_proxy=self.config.http_proxy, + ensure_ascii=ensure_ascii, + connect=False, + ) if six.PY3: j = json.dumps(d, ensure_ascii=ensure_ascii) @@ -663,11 +680,11 @@ def test_parse_records(self): """Parse records to replace thumbnail and local paths""" system = platform.system().lower() - if system == 'darwin': + if system == "darwin": local_path_field = "local_path_mac" - elif system in ['windows', 'microsoft']: + elif system in ["windows", "microsoft"]: local_path_field = "local_path_windows" - elif system == 'linux': + elif system == "linux": local_path_field = "local_path_linux" orig = { "type": "FakeAsset", @@ -676,11 +693,10 @@ def test_parse_records(self): "foo": { "link_type": "local", local_path_field: "/foo/bar.jpg", - } + }, } url = "http://foo/files/0000/0000/0012/232/shot_thumb.jpg" - self.sg._build_thumb_url = mock.Mock( - return_value=url) + self.sg._build_thumb_url = mock.Mock(return_value=url) modified, txt = self.sg._parse_records([orig, "plain text"]) self.assertEqual("plain text", txt, "non dict value is left as is") @@ -703,14 +719,15 @@ def test_thumb_url(self): url = self.sg._build_thumb_url("FakeAsset", 1234) - self.assertEqual( - "http://foo.com/files/0000/0000/0012/232/shot_thumb.jpg", url) + self.assertEqual("http://foo.com/files/0000/0000/0012/232/shot_thumb.jpg", url) self.assertTrue(self.sg._http_request.called, "http request made to get url") args, _ = self.sg._http_request.call_args verb, path, body, headers = args self.assertEqual( "/upload/get_thumbnail_url?entity_type=FakeAsset&entity_id=1234", - path, "thumbnail url called with correct args") + path, + "thumbnail url called with correct args", + ) resp = "0\nSome Error" self._mock_http(resp, headers={"content-type": "text/plain"}) @@ -722,27 +739,34 @@ def test_thumb_url(self): class TestShotgunClientInterface(base.MockTestBase): - '''Tests expected interface for shotgun module and client''' + """Tests expected interface for shotgun module and client""" def test_client_interface(self): - expected_attributes = ['base_url', - 'config', - 'client_caps', - 'server_caps'] + expected_attributes = ["base_url", "config", "client_caps", "server_caps"] for expected_attribute in expected_attributes: if not hasattr(self.sg, expected_attribute): - assert False, '%s not found on %s' % (expected_attribute, - self.sg) + assert False, "%s not found on %s" % (expected_attribute, self.sg) def test_module_interface(self): import shotgun_api3 - expected_contents = ['Shotgun', 'ShotgunError', 'Fault', - 'ProtocolError', 'ResponseError', 'Error', - 'sg_timezone', '__version__'] + + expected_contents = [ + "Shotgun", + "ShotgunError", + "Fault", + "ProtocolError", + "ResponseError", + "Error", + "sg_timezone", + "__version__", + ] for expected_content in expected_contents: if not hasattr(shotgun_api3, expected_content): - assert False, '%s not found on module %s' % (expected_content, shotgun_api3) + assert False, "%s not found on module %s" % ( + expected_content, + shotgun_api3, + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_config_file b/tests/test_config_file index 8215eecde..d642f96c2 100644 --- a/tests/test_config_file +++ b/tests/test_config_file @@ -4,4 +4,4 @@ script_name : xyz api_key : %%abce [TEST_DATA] -project_name : hjkl \ No newline at end of file +project_name : hjkl diff --git a/tests/test_mockgun.py b/tests/test_mockgun.py index 84e5cb2e7..1395355fa 100644 --- a/tests/test_mockgun.py +++ b/tests/test_mockgun.py @@ -42,14 +42,11 @@ from shotgun_api3 import ShotgunError -mockgun_schema_folder = os.path.join( - os.path.dirname(__file__), - "mockgun" -) +mockgun_schema_folder = os.path.join(os.path.dirname(__file__), "mockgun") Mockgun.set_schema_paths( os.path.join(mockgun_schema_folder, "schema.pickle"), - os.path.join(mockgun_schema_folder, "schema_entity.pickle") + os.path.join(mockgun_schema_folder, "schema_entity.pickle"), ) @@ -64,6 +61,7 @@ def test_interface_intact(self): """ from shotgun_api3.lib import mockgun + # Try to access everything. If something is missing, it will raise an # error. mockgun.MockgunError @@ -82,7 +80,9 @@ def setUp(self): """ super(TestValidateFilterSyntax, self).setUp() - self._mockgun = Mockgun("https://test.shotgunstudio.com", login="user", password="1234") + self._mockgun = Mockgun( + "https://test.shotgunstudio.com", login="user", password="1234" + ) self._mockgun.create("Shot", {"code": "shot"}) @@ -94,24 +94,16 @@ def test_filter_array_or_dict(self): self._mockgun.find( "Shot", [ - { - "filter_operator": "any", - "filters": [["code", "is", "shot"]] - }, - [ - "code", "is", "shot" - ] - ] + {"filter_operator": "any", "filters": [["code", "is", "shot"]]}, + ["code", "is", "shot"], + ], ) # We can't have not dict/list values for filters however. self.assertRaisesRegex( ShotgunError, "Filters can only be lists or dictionaries, not int.", - lambda: self._mockgun.find( - "Shot", - [1] - ) + lambda: self._mockgun.find("Shot", [1]), ) @@ -124,14 +116,21 @@ def setUp(self): """ Creates test data. """ - self._mockgun = Mockgun("https://test.shotgunstudio.com", login="user", password="1234") + self._mockgun = Mockgun( + "https://test.shotgunstudio.com", login="user", password="1234" + ) - self._project_link = self._mockgun.create("Project", {"name": "project", "archived": False}) + self._project_link = self._mockgun.create( + "Project", {"name": "project", "archived": False} + ) # This entity will ensure that a populated link field will be comparable. self._mockgun.create( "PipelineConfiguration", - {"code": "with_project", "project": self._project_link, } + { + "code": "with_project", + "project": self._project_link, + }, ) # This entity will ensure that an unpopulated link field will be comparable. @@ -145,17 +144,23 @@ def test_searching_for_none_entity_field(self): items = self._mockgun.find("PipelineConfiguration", [["project", "is", None]]) self.assertEqual(len(items), 1) - items = self._mockgun.find("PipelineConfiguration", [["project", "is_not", None]]) + items = self._mockgun.find( + "PipelineConfiguration", [["project", "is_not", None]] + ) self.assertEqual(len(items), 1) def test_searching_for_initialized_entity_field(self): """ Ensures that comparison with an entity works. """ - items = self._mockgun.find("PipelineConfiguration", [["project", "is", self._project_link]]) + items = self._mockgun.find( + "PipelineConfiguration", [["project", "is", self._project_link]] + ) self.assertEqual(len(items), 1) - items = self._mockgun.find("PipelineConfiguration", [["project", "is_not", self._project_link]]) + items = self._mockgun.find( + "PipelineConfiguration", [["project", "is_not", self._project_link]] + ) self.assertEqual(len(items), 1) def test_find_entity_with_none_link(self): @@ -164,7 +169,9 @@ def test_find_entity_with_none_link(self): """ # The pipeline configuration without_project doesn't have the project field set, so we're expecting # it to not be returned here. - items = self._mockgun.find("PipelineConfiguration", [["project.Project.archived", "is", False]]) + items = self._mockgun.find( + "PipelineConfiguration", [["project.Project.archived", "is", False]] + ) self.assertEqual(len(items), 1) self.assertEqual(items[0]["id"], self._project_link["id"]) @@ -173,11 +180,14 @@ class TestTextFieldOperators(unittest.TestCase): """ Checks if text field comparison work. """ + def setUp(self): """ Creates test data. """ - self._mockgun = Mockgun("https://test.shotgunstudio.com", login="user", password="1234") + self._mockgun = Mockgun( + "https://test.shotgunstudio.com", login="user", password="1234" + ) self._user = self._mockgun.create("HumanUser", {"login": "user"}) def test_operator_contains(self): @@ -198,7 +208,9 @@ def setUp(self): Creates test data. """ - self._mockgun = Mockgun("https://test.shotgunstudio.com", login="user", password="1234") + self._mockgun = Mockgun( + "https://test.shotgunstudio.com", login="user", password="1234" + ) # Create two users to assign to the pipeline configurations. self._user1 = self._mockgun.create("HumanUser", {"login": "user1"}) @@ -206,54 +218,67 @@ def setUp(self): # Create pipeline configurations that are assigned none, one or two users. self._mockgun.create( - "PipelineConfiguration", - {"code": "with_user1", "users": [self._user1]} + "PipelineConfiguration", {"code": "with_user1", "users": [self._user1]} ) self._mockgun.create( - "PipelineConfiguration", - {"code": "with_user2", "users": [self._user2]} + "PipelineConfiguration", {"code": "with_user2", "users": [self._user2]} ) self._mockgun.create( "PipelineConfiguration", - {"code": "with_both", "users": [self._user2, self._user1]} + {"code": "with_both", "users": [self._user2, self._user1]}, ) self._mockgun.create( - "PipelineConfiguration", - {"code": "with_none", "users": []} + "PipelineConfiguration", {"code": "with_none", "users": []} ) def test_find_by_sub_entity_field(self): """ Ensures that queries on linked entity fields works. """ - items = self._mockgun.find("PipelineConfiguration", [["users.HumanUser.login", "is", "user1"]]) + items = self._mockgun.find( + "PipelineConfiguration", [["users.HumanUser.login", "is", "user1"]] + ) self.assertEqual(len(items), 2) - items = self._mockgun.find("PipelineConfiguration", [["users.HumanUser.login", "is", "user2"]]) + items = self._mockgun.find( + "PipelineConfiguration", [["users.HumanUser.login", "is", "user2"]] + ) self.assertEqual(len(items), 2) - items = self._mockgun.find("PipelineConfiguration", [["users.HumanUser.login", "contains", "ser"]]) + items = self._mockgun.find( + "PipelineConfiguration", [["users.HumanUser.login", "contains", "ser"]] + ) self.assertEqual(len(items), 3) # Lets get fancy a bit. - items = self._mockgun.find("PipelineConfiguration", [{ - "filter_operator": "any", - "filters": [ - ["users.HumanUser.login", "is", "user1"], - ["users.HumanUser.login", "is", "user2"] - ]}] + items = self._mockgun.find( + "PipelineConfiguration", + [ + { + "filter_operator": "any", + "filters": [ + ["users.HumanUser.login", "is", "user1"], + ["users.HumanUser.login", "is", "user2"], + ], + } + ], ) self.assertEqual(len(items), 3) - items = self._mockgun.find("PipelineConfiguration", [{ - "filter_operator": "all", - "filters": [ - ["users.HumanUser.login", "is", "user1"], - ["users.HumanUser.login", "is", "user2"] - ]}] + items = self._mockgun.find( + "PipelineConfiguration", + [ + { + "filter_operator": "all", + "filters": [ + ["users.HumanUser.login", "is", "user1"], + ["users.HumanUser.login", "is", "user2"], + ], + } + ], ) self.assertEqual(len(items), 1) @@ -261,16 +286,20 @@ def test_find_with_none(self): """ Ensures comparison with multi-entity fields and None works. """ - items = self._mockgun.find("PipelineConfiguration", [["users", "is", None]], ["users"]) + items = self._mockgun.find( + "PipelineConfiguration", [["users", "is", None]], ["users"] + ) self.assertEqual(len(items), 1) self.assertEqual(items[0]["users"], []) - items = self._mockgun.find("PipelineConfiguration", [["users", "is_not", None]], ["users"]) + items = self._mockgun.find( + "PipelineConfiguration", [["users", "is_not", None]], ["users"] + ) self.assertEqual(len(items), 3) for item in items: self.assertTrue(len(item["users"]) > 0) - + class TestMultiEntityFieldUpdate(unittest.TestCase): """ Ensures multi entity field update modes work. @@ -281,13 +310,15 @@ def setUp(self): Creates test data. """ - self._mockgun = Mockgun("https://test.shotgunstudio.com", login="user", password="1234") + self._mockgun = Mockgun( + "https://test.shotgunstudio.com", login="user", password="1234" + ) # Create two versions to assign to the shot. self._version1 = self._mockgun.create("Version", {"code": "version1"}) self._version2 = self._mockgun.create("Version", {"code": "version2"}) self._version3 = self._mockgun.create("Version", {"code": "version3"}) - + # remove 'code' field for later comparisons del self._version1["code"] del self._version2["code"] @@ -296,15 +327,18 @@ def setUp(self): # Create playlists self._add_playlist = self._mockgun.create( "Playlist", - {"code": "playlist1", "versions": [self._version1, self._version2]} + {"code": "playlist1", "versions": [self._version1, self._version2]}, ) self._remove_playlist = self._mockgun.create( "Playlist", - {"code": "playlist1", "versions": [self._version1, self._version2, self._version3]} + { + "code": "playlist1", + "versions": [self._version1, self._version2, self._version3], + }, ) self._set_playlist = self._mockgun.create( "Playlist", - {"code": "playlist1", "versions": [self._version1, self._version2]} + {"code": "playlist1", "versions": [self._version1, self._version2]}, ) def test_update_add(self): @@ -312,8 +346,10 @@ def test_update_add(self): Ensures that "add" multi_entity_update_mode works. """ self._mockgun.update( - "Playlist", self._add_playlist["id"], {"versions": [self._version3]}, - multi_entity_update_modes={"versions": "add"} + "Playlist", + self._add_playlist["id"], + {"versions": [self._version3]}, + multi_entity_update_modes={"versions": "add"}, ) playlist = self._mockgun.find_one( @@ -328,8 +364,10 @@ def test_update_remove(self): Ensures that "remove" multi_entity_update_mode works. """ self._mockgun.update( - "Playlist", self._remove_playlist["id"], {"versions": [self._version2]}, - multi_entity_update_modes={"versions": "remove"} + "Playlist", + self._remove_playlist["id"], + {"versions": [self._version2]}, + multi_entity_update_modes={"versions": "remove"}, ) playlist = self._mockgun.find_one( @@ -345,14 +383,14 @@ def test_update_set(self): "Playlist", self._set_playlist["id"], {"versions": [self._version2, self._version3]}, - multi_entity_update_modes={"versions": "set"} + multi_entity_update_modes={"versions": "set"}, ) playlist = self._mockgun.find_one( "Playlist", [["id", "is", self._set_playlist["id"]]], ["versions"] ) self.assertEqual(playlist["versions"], [self._version2, self._version3]) - + def test_batch_update(self): self._mockgun.batch( [ @@ -361,7 +399,7 @@ def test_batch_update(self): "entity_type": "Playlist", "entity_id": self._set_playlist["id"], "data": {"versions": [self._version1, self._version2]}, - "multi_entity_update_modes": {"versions": "set"} + "multi_entity_update_modes": {"versions": "set"}, } ] ) @@ -382,44 +420,24 @@ def setUp(self): """ super(TestFilterOperator, self).setUp() - self._mockgun = Mockgun("https://test.shotgunstudio.com", login="user", password="1234") - - self._prj1_link = self._mockgun.create( - "Project", - { - "name": "prj1" - } + self._mockgun = Mockgun( + "https://test.shotgunstudio.com", login="user", password="1234" ) - self._prj2_link = self._mockgun.create( - "Project", - { - "name": "prj2" - } - ) + self._prj1_link = self._mockgun.create("Project", {"name": "prj1"}) + + self._prj2_link = self._mockgun.create("Project", {"name": "prj2"}) self._shot1 = self._mockgun.create( - "Shot", - { - "code": "shot1", - "project": self._prj1_link - } + "Shot", {"code": "shot1", "project": self._prj1_link} ) self._shot2 = self._mockgun.create( - "Shot", - { - "code": "shot2", - "project": self._prj1_link - } + "Shot", {"code": "shot2", "project": self._prj1_link} ) self._shot3 = self._mockgun.create( - "Shot", - { - "code": "shot3", - "project": self._prj2_link - } + "Shot", {"code": "shot3", "project": self._prj2_link} ) def test_simple_filter_operators(self): @@ -428,26 +446,24 @@ def test_simple_filter_operators(self): """ shots = self._mockgun.find( "Shot", - [{ - "filter_operator": "any", - "filters": [ - ["code", "is", "shot1"], - ["code", "is", "shot2"] - ] - }] + [ + { + "filter_operator": "any", + "filters": [["code", "is", "shot1"], ["code", "is", "shot2"]], + } + ], ) self.assertEqual(len(shots), 2) shots = self._mockgun.find( "Shot", - [{ - "filter_operator": "all", - "filters": [ - ["code", "is", "shot1"], - ["code", "is", "shot2"] - ] - }] + [ + { + "filter_operator": "all", + "filters": [["code", "is", "shot1"], ["code", "is", "shot2"]], + } + ], ) self.assertEqual(len(shots), 0) @@ -467,19 +483,19 @@ def test_nested_filter_operators(self): "filter_operator": "all", "filters": [ ["code", "is", "shot1"], - ["project", "is", self._prj1_link] - ] + ["project", "is", self._prj1_link], + ], }, { "filter_operator": "all", "filters": [ ["code", "is", "shot3"], - ["project", "is", self._prj2_link] - ] - } - ] + ["project", "is", self._prj2_link], + ], + }, + ], } - ] + ], ) self.assertEqual(len(shots), 2) @@ -490,24 +506,14 @@ def test_invalid_operator(self): ShotgunError, "Unknown filter_operator type: bad", lambda: self._mockgun.find( - "Shot", - [ - { - "filter_operator": "bad", - "filters": [] - } - ]) + "Shot", [{"filter_operator": "bad", "filters": []}] + ), ) self.assertRaisesRegex( ShotgunError, "Bad filter operator, requires keys 'filter_operator' and 'filters',", - lambda: self._mockgun.find( - "Shot", - [ - { - } - ]) + lambda: self._mockgun.find("Shot", [{}]), ) @@ -535,5 +541,5 @@ def test_set_server_params_with_url_with_path(self): self.assertEqual(mockgun.config.api_path, "/something/api3/json") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 85f70500a..cb713cd9d 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -15,22 +15,24 @@ class ServerConnectionTest(base.TestBase): - '''Tests for server connection''' + """Tests for server connection""" + def setUp(self): super(ServerConnectionTest, self).setUp() def test_connection(self): - '''Tests server connects and returns nothing''' + """Tests server connects and returns nothing""" result = self.sg.connect() self.assertEqual(result, None) def test_proxy_info(self): - '''check proxy value depending http_proxy setting in config''' + """check proxy value depending http_proxy setting in config""" self.sg.connect() if self.config.http_proxy: sys.stderr.write("[WITH PROXY] ") - self.assertTrue(isinstance(self.sg._connection.proxy_info, - api.lib.httplib2.ProxyInfo)) + self.assertTrue( + isinstance(self.sg._connection.proxy_info, api.lib.httplib2.ProxyInfo) + ) else: sys.stderr.write("[NO PROXY] ") self.assertEqual(self.sg._connection.proxy_info, None) diff --git a/tests/test_unit.py b/tests/test_unit.py index 84304cab7..de996c553 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -21,30 +21,35 @@ class TestShotgunInit(unittest.TestCase): - '''Test case for Shotgun.__init__''' + """Test case for Shotgun.__init__""" + def setUp(self): - self.server_path = 'http://server_path' - self.script_name = 'script_name' - self.api_key = 'api_key' + self.server_path = "http://server_path" + self.script_name = "script_name" + self.api_key = "api_key" # Proxy Server Tests def test_http_proxy_server(self): proxy_server = "someserver.com" http_proxy = proxy_server - sg = api.Shotgun(self.server_path, - self.script_name, - self.api_key, - http_proxy=http_proxy, - connect=False) + sg = api.Shotgun( + self.server_path, + self.script_name, + self.api_key, + http_proxy=http_proxy, + connect=False, + ) self.assertEqual(sg.config.proxy_server, proxy_server) self.assertEqual(sg.config.proxy_port, 8080) proxy_server = "123.456.789.012" http_proxy = proxy_server - sg = api.Shotgun(self.server_path, - self.script_name, - self.api_key, - http_proxy=http_proxy, - connect=False) + sg = api.Shotgun( + self.server_path, + self.script_name, + self.api_key, + http_proxy=http_proxy, + connect=False, + ) self.assertEqual(sg.config.proxy_server, proxy_server) self.assertEqual(sg.config.proxy_port, 8080) @@ -52,21 +57,25 @@ def test_http_proxy_server_and_port(self): proxy_server = "someserver.com" proxy_port = 1234 http_proxy = "%s:%d" % (proxy_server, proxy_port) - sg = api.Shotgun(self.server_path, - self.script_name, - self.api_key, - http_proxy=http_proxy, - connect=False) + sg = api.Shotgun( + self.server_path, + self.script_name, + self.api_key, + http_proxy=http_proxy, + connect=False, + ) self.assertEqual(sg.config.proxy_server, proxy_server) self.assertEqual(sg.config.proxy_port, proxy_port) proxy_server = "123.456.789.012" proxy_port = 1234 http_proxy = "%s:%d" % (proxy_server, proxy_port) - sg = api.Shotgun(self.server_path, - self.script_name, - self.api_key, - http_proxy=http_proxy, - connect=False) + sg = api.Shotgun( + self.server_path, + self.script_name, + self.api_key, + http_proxy=http_proxy, + connect=False, + ) self.assertEqual(sg.config.proxy_server, proxy_server) self.assertEqual(sg.config.proxy_port, proxy_port) @@ -75,13 +84,14 @@ def test_http_proxy_server_and_port_with_authentication(self): proxy_port = 1234 proxy_user = "user" proxy_pass = "password" - http_proxy = "%s:%s@%s:%d" % (proxy_user, proxy_pass, proxy_server, - proxy_port) - sg = api.Shotgun(self.server_path, - self.script_name, - self.api_key, - http_proxy=http_proxy, - connect=False) + http_proxy = "%s:%s@%s:%d" % (proxy_user, proxy_pass, proxy_server, proxy_port) + sg = api.Shotgun( + self.server_path, + self.script_name, + self.api_key, + http_proxy=http_proxy, + connect=False, + ) self.assertEqual(sg.config.proxy_server, proxy_server) self.assertEqual(sg.config.proxy_port, proxy_port) self.assertEqual(sg.config.proxy_user, proxy_user) @@ -90,13 +100,14 @@ def test_http_proxy_server_and_port_with_authentication(self): proxy_port = 1234 proxy_user = "user" proxy_pass = "password" - http_proxy = "%s:%s@%s:%d" % (proxy_user, proxy_pass, proxy_server, - proxy_port) - sg = api.Shotgun(self.server_path, - self.script_name, - self.api_key, - http_proxy=http_proxy, - connect=False) + http_proxy = "%s:%s@%s:%d" % (proxy_user, proxy_pass, proxy_server, proxy_port) + sg = api.Shotgun( + self.server_path, + self.script_name, + self.api_key, + http_proxy=http_proxy, + connect=False, + ) self.assertEqual(sg.config.proxy_server, proxy_server) self.assertEqual(sg.config.proxy_port, proxy_port) self.assertEqual(sg.config.proxy_user, proxy_user) @@ -107,13 +118,14 @@ def test_http_proxy_with_at_in_password(self): proxy_port = 1234 proxy_user = "user" proxy_pass = "p@ssword" - http_proxy = "%s:%s@%s:%d" % (proxy_user, proxy_pass, proxy_server, - proxy_port) - sg = api.Shotgun(self.server_path, - self.script_name, - self.api_key, - http_proxy=http_proxy, - connect=False) + http_proxy = "%s:%s@%s:%d" % (proxy_user, proxy_pass, proxy_server, proxy_port) + sg = api.Shotgun( + self.server_path, + self.script_name, + self.api_key, + http_proxy=http_proxy, + connect=False, + ) self.assertEqual(sg.config.proxy_server, proxy_server) self.assertEqual(sg.config.proxy_port, proxy_port) self.assertEqual(sg.config.proxy_user, proxy_user) @@ -121,63 +133,63 @@ def test_http_proxy_with_at_in_password(self): def test_malformatted_proxy_info(self): conn_info = { - 'base_url': self.server_path, - 'script_name': self.script_name, - 'api_key': self.api_key, - 'connect': False, + "base_url": self.server_path, + "script_name": self.script_name, + "api_key": self.api_key, + "connect": False, } - conn_info['http_proxy'] = 'http://someserver.com' + conn_info["http_proxy"] = "http://someserver.com" self.assertRaises(ValueError, api.Shotgun, **conn_info) - conn_info['http_proxy'] = 'user@someserver.com' + conn_info["http_proxy"] = "user@someserver.com" self.assertRaises(ValueError, api.Shotgun, **conn_info) - conn_info['http_proxy'] = 'someserver.com:1234:5678' + conn_info["http_proxy"] = "someserver.com:1234:5678" self.assertRaises(ValueError, api.Shotgun, **conn_info) class TestShotgunSummarize(unittest.TestCase): - '''Test case for _create_summary_request function and parameter + """Test case for _create_summary_request function and parameter validation as it exists in Shotgun.summarize. - Does not require database connection or test data.''' + Does not require database connection or test data.""" + def setUp(self): - self.sg = api.Shotgun('http://server_path', - 'script_name', - 'api_key', - connect=False) + self.sg = api.Shotgun( + "http://server_path", "script_name", "api_key", connect=False + ) def test_filter_operator_none(self): - expected_logical_operator = 'and' + expected_logical_operator = "and" filter_operator = None self._assert_filter_operator(expected_logical_operator, filter_operator) def _assert_filter_operator(self, expected_logical_operator, filter_operator): - result = self.get_call_rpc_params(None, {'filter_operator': filter_operator}) - actual_logical_operator = result['filters']['logical_operator'] + result = self.get_call_rpc_params(None, {"filter_operator": filter_operator}) + actual_logical_operator = result["filters"]["logical_operator"] self.assertEqual(expected_logical_operator, actual_logical_operator) def test_filter_operator_all(self): - expected_logical_operator = 'and' - filter_operator = 'all' + expected_logical_operator = "and" + filter_operator = "all" self._assert_filter_operator(expected_logical_operator, filter_operator) def test_filter_operator_or(self): - expected_logical_operator = 'or' - filter_operator = 'or' + expected_logical_operator = "or" + filter_operator = "or" self._assert_filter_operator(expected_logical_operator, filter_operator) def test_filters(self): - path = 'path' - relation = 'relation' - value = 'value' - expected_condition = {'path': path, 'relation': relation, 'values': [value]} - args = ['', [[path, relation, value]], None] + path = "path" + relation = "relation" + value = "value" + expected_condition = {"path": path, "relation": relation, "values": [value]} + args = ["", [[path, relation, value]], None] result = self.get_call_rpc_params(args, {}) - actual_condition = result['filters']['conditions'][0] + actual_condition = result["filters"]["conditions"][0] self.assertEqual(expected_condition, actual_condition) - @patch('shotgun_api3.Shotgun._call_rpc') + @patch("shotgun_api3.Shotgun._call_rpc") def get_call_rpc_params(self, args, kws, call_rpc): - '''Return params sent to _call_rpc from summarize.''' + """Return params sent to _call_rpc from summarize.""" if not args: args = [None, [], None] self.sg.summarize(*args, **kws) @@ -185,62 +197,72 @@ def get_call_rpc_params(self, args, kws, call_rpc): def test_grouping(self): result = self.get_call_rpc_params(None, {}) - self.assertFalse('grouping' in result) - grouping = ['something'] - kws = {'grouping': grouping} + self.assertFalse("grouping" in result) + grouping = ["something"] + kws = {"grouping": grouping} result = self.get_call_rpc_params(None, kws) - self.assertEqual(grouping, result['grouping']) + self.assertEqual(grouping, result["grouping"]) def test_grouping_type(self): - '''test_grouping_type tests that grouping parameter is a list or None''' - self.assertRaises(ValueError, self.sg.summarize, '', [], [], grouping='Not a list') + """test_grouping_type tests that grouping parameter is a list or None""" + self.assertRaises( + ValueError, self.sg.summarize, "", [], [], grouping="Not a list" + ) class TestShotgunBatch(unittest.TestCase): def setUp(self): - self.sg = api.Shotgun('http://server_path', - 'script_name', - 'api_key', - connect=False) + self.sg = api.Shotgun( + "http://server_path", "script_name", "api_key", connect=False + ) def test_missing_required_key(self): req = {} # requires keys request_type and entity_type self.assertRaises(api.ShotgunError, self.sg.batch, [req]) - req['entity_type'] = 'Entity' + req["entity_type"] = "Entity" self.assertRaises(api.ShotgunError, self.sg.batch, [req]) - req['request_type'] = 'not_real_type' + req["request_type"] = "not_real_type" self.assertRaises(api.ShotgunError, self.sg.batch, [req]) # create requires data key - req['request_type'] = 'create' + req["request_type"] = "create" self.assertRaises(api.ShotgunError, self.sg.batch, [req]) # update requires entity_id and data - req['request_type'] = 'update' - req['data'] = {} + req["request_type"] = "update" + req["data"] = {} self.assertRaises(api.ShotgunError, self.sg.batch, [req]) - del req['data'] - req['entity_id'] = 2334 + del req["data"] + req["entity_id"] = 2334 self.assertRaises(api.ShotgunError, self.sg.batch, [req]) # delete requires entity_id - req['request_type'] = 'delete' - del req['entity_id'] + req["request_type"] = "delete" + del req["entity_id"] self.assertRaises(api.ShotgunError, self.sg.batch, [req]) class TestServerCapabilities(unittest.TestCase): def test_no_server_version(self): - self.assertRaises(api.ShotgunError, api.shotgun.ServerCapabilities, 'host', {}) + self.assertRaises(api.ShotgunError, api.shotgun.ServerCapabilities, "host", {}) def test_bad_version(self): - '''test_bad_meta tests passing bad meta data type''' - self.assertRaises(api.ShotgunError, api.shotgun.ServerCapabilities, 'host', {'version': (0, 0, 0)}) + """test_bad_meta tests passing bad meta data type""" + self.assertRaises( + api.ShotgunError, + api.shotgun.ServerCapabilities, + "host", + {"version": (0, 0, 0)}, + ) def test_dev_version(self): - serverCapabilities = api.shotgun.ServerCapabilities('host', {'version': (3, 4, 0, 'Dev')}) + serverCapabilities = api.shotgun.ServerCapabilities( + "host", {"version": (3, 4, 0, "Dev")} + ) self.assertEqual(serverCapabilities.version, (3, 4, 0)) self.assertTrue(serverCapabilities.is_dev) - serverCapabilities = api.shotgun.ServerCapabilities('host', {'version': (2, 4, 0)}) + serverCapabilities = api.shotgun.ServerCapabilities( + "host", {"version": (2, 4, 0)} + ) self.assertEqual(serverCapabilities.version, (2, 4, 0)) self.assertFalse(serverCapabilities.is_dev) @@ -248,13 +270,13 @@ def test_dev_version(self): class TestClientCapabilities(unittest.TestCase): def test_darwin(self): - self.assert_platform('Darwin', 'mac') + self.assert_platform("Darwin", "mac") def test_windows(self): - self.assert_platform('win32', 'windows') + self.assert_platform("win32", "windows") def test_linux(self): - self.assert_platform('Linux', 'linux') + self.assert_platform("Linux", "linux") def assert_platform(self, sys_ret_val, expected): platform = api.shotgun.sys.platform @@ -278,12 +300,12 @@ def test_no_platform(self): finally: api.shotgun.sys.platform = platform - @patch('shotgun_api3.shotgun.sys') + @patch("shotgun_api3.shotgun.sys") def test_py_version(self, mock_sys): major = 2 minor = 7 micro = 3 - mock_sys.version_info = (major, minor, micro, 'final', 0) + mock_sys.version_info = (major, minor, micro, "final", 0) expected_py_version = "%s.%s" % (major, minor) client_caps = api.shotgun.ClientCapabilities() self.assertEqual(client_caps.py_version, expected_py_version) @@ -293,26 +315,20 @@ class TestFilters(unittest.TestCase): maxDiff = None def test_empty(self): - expected = { - "logical_operator": "and", - "conditions": [] - } + expected = {"logical_operator": "and", "conditions": []} result = api.shotgun._translate_filters([], None) self.assertEqual(result, expected) def test_simple(self): - filters = [ - ["code", "is", "test"], - ["sg_status_list", "is", "ip"] - ] + filters = [["code", "is", "test"], ["sg_status_list", "is", "ip"]] expected = { "logical_operator": "or", "conditions": [ {"path": "code", "relation": "is", "values": ["test"]}, - {"path": "sg_status_list", "relation": "is", "values": ["ip"]} - ] + {"path": "sg_status_list", "relation": "is", "values": ["ip"]}, + ], } result = api.shotgun._translate_filters(filters, "any") @@ -323,20 +339,20 @@ def test_arrays(self): expected = { "logical_operator": "and", "conditions": [ - {"path": "code", "relation": "in", "values": ["test1", "test2", "test3"]} - ] + { + "path": "code", + "relation": "in", + "values": ["test1", "test2", "test3"], + } + ], } - filters = [ - ["code", "in", "test1", "test2", "test3"] - ] + filters = [["code", "in", "test1", "test2", "test3"]] result = api.shotgun._translate_filters(filters, "all") self.assertEqual(result, expected) - filters = [ - ["code", "in", ["test1", "test2", "test3"]] - ] + filters = [["code", "in", ["test1", "test2", "test3"]]] result = api.shotgun._translate_filters(filters, "all") self.assertEqual(result, expected) @@ -353,11 +369,11 @@ def test_nested(self): "filter_operator": "all", "filters": [ ["sg_status_list", "is", "hld"], - ["assets", "is", {"type": "Asset", "id": 9}] - ] - } - ] - } + ["assets", "is", {"type": "Asset", "id": 9}], + ], + }, + ], + }, ] expected = { @@ -372,13 +388,21 @@ def test_nested(self): { "logical_operator": "and", "conditions": [ - {"path": "sg_status_list", "relation": "is", "values": ["hld"]}, - {"path": "assets", "relation": "is", "values": [{"type": "Asset", "id": 9}]}, - ] - } - ] - } - ] + { + "path": "sg_status_list", + "relation": "is", + "values": ["hld"], + }, + { + "path": "assets", + "relation": "is", + "values": [{"type": "Asset", "id": 9}], + }, + ], + }, + ], + }, + ], } result = api.shotgun._translate_filters(filters, "all") @@ -386,27 +410,27 @@ def test_nested(self): def test_invalid(self): self.assertRaises(api.ShotgunError, api.shotgun._translate_filters, [], "bogus") - self.assertRaises(api.ShotgunError, api.shotgun._translate_filters, ["bogus"], "all") + self.assertRaises( + api.ShotgunError, api.shotgun._translate_filters, ["bogus"], "all" + ) - filters = [{ - "filter_operator": "bogus", - "filters": [] - }] + filters = [{"filter_operator": "bogus", "filters": []}] - self.assertRaises(api.ShotgunError, api.shotgun._translate_filters, filters, "all") + self.assertRaises( + api.ShotgunError, api.shotgun._translate_filters, filters, "all" + ) - filters = [{ - "filters": [] - }] + filters = [{"filters": []}] - self.assertRaises(api.ShotgunError, api.shotgun._translate_filters, filters, "all") + self.assertRaises( + api.ShotgunError, api.shotgun._translate_filters, filters, "all" + ) - filters = [{ - "filter_operator": "all", - "filters": {"bogus": "bogus"} - }] + filters = [{"filter_operator": "all", "filters": {"bogus": "bogus"}}] - self.assertRaises(api.ShotgunError, api.shotgun._translate_filters, filters, "all") + self.assertRaises( + api.ShotgunError, api.shotgun._translate_filters, filters, "all" + ) @mock.patch.dict(os.environ, {"SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION": "1"}) def test_related_object(self): @@ -414,7 +438,13 @@ def test_related_object(self): [ "project", "is", - {"foo": "foo", "bar": "bar", "id": 999, "baz": "baz", "type": "Anything"}, + { + "foo": "foo", + "bar": "bar", + "id": 999, + "baz": "baz", + "type": "Anything", + }, ], ] expected = { @@ -445,7 +475,13 @@ def test_related_object_entity_optimization_is(self): [ "project", "is", - {"foo": "foo", "bar": "bar", "id": 999, "baz": "baz", "type": "Anything"}, + { + "foo": "foo", + "bar": "bar", + "id": 999, + "baz": "baz", + "type": "Anything", + }, ], ] expected = { @@ -481,7 +517,7 @@ def test_related_object_entity_optimization_is(self): { "path": "something", "relation": "is", - "values": [{'bar': 'bar', 'foo': 'foo'}], + "values": [{"bar": "bar", "foo": "foo"}], } ], } @@ -496,8 +532,20 @@ def test_related_object_entity_optimization_in(self): "project", "in", [ - {"foo1": "foo1", "bar1": "bar1", "id": 999, "baz1": "baz1", "type": "Anything"}, - {"foo2": "foo2", "bar2": "bar2", "id": 998, "baz2": "baz2", "type": "Anything"}, + { + "foo1": "foo1", + "bar1": "bar1", + "id": 999, + "baz1": "baz1", + "type": "Anything", + }, + { + "foo2": "foo2", + "bar2": "bar2", + "id": 998, + "baz2": "baz2", + "type": "Anything", + }, {"foo3": "foo3", "bar3": "bar3"}, ], ], @@ -520,7 +568,7 @@ def test_related_object_entity_optimization_in(self): { "foo3": "foo3", "bar3": "bar3", - } + }, ], } ], @@ -560,7 +608,9 @@ def test_related_object_update_entity(self): ], } sg = api.Shotgun("http://server_path", "script_name", "api_key", connect=False) - result = sg._translate_update_params(entity_type, entity_id, data, multi_entity_update_modes) + result = sg._translate_update_params( + entity_type, entity_id, data, multi_entity_update_modes + ) self.assertEqual(result, expected) @mock.patch("shotgun_api3.shotgun.SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION", False) @@ -611,7 +661,9 @@ def test_related_object_update_optimization_entity(self): ], } sg = api.Shotgun("http://server_path", "script_name", "api_key", connect=False) - result = sg._translate_update_params(entity_type, entity_id, data, multi_entity_update_modes) + result = sg._translate_update_params( + entity_type, entity_id, data, multi_entity_update_modes + ) self.assertEqual(result, expected) @mock.patch("shotgun_api3.shotgun.SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION", False) @@ -625,7 +677,11 @@ def test_related_object_update_optimization_entity_multi(self): {"id": 6441, "type": "Asset", "name": "disposable name 6441"}, {"id": 6440, "type": "Asset"}, ], - "sg_class": {"id": 1, "type": "CustomEntity53", "name": "disposable name 1"}, + "sg_class": { + "id": 1, + "type": "CustomEntity53", + "name": "disposable name 1", + }, } expected = { "type": "Asset", @@ -640,7 +696,10 @@ def test_related_object_update_optimization_entity_multi(self): {"id": 6440, "type": "Asset"}, ], }, - {"field_name": "sg_class", "value": {"type": "CustomEntity53", "id": 1}}, + { + "field_name": "sg_class", + "value": {"type": "CustomEntity53", "id": 1}, + }, ], } sg = api.Shotgun("http://server_path", "script_name", "api_key", connect=False) @@ -662,10 +721,9 @@ class TestCerts(unittest.TestCase): ] def setUp(self): - self.sg = api.Shotgun('http://server_path', - 'script_name', - 'api_key', - connect=False) + self.sg = api.Shotgun( + "http://server_path", "script_name", "api_key", connect=False + ) # Get the location of the certs file self.certs = self.sg._get_certs_file(None) @@ -712,7 +770,12 @@ def test_httplib(self): certificate with httplib. """ # First check that we get an error when trying to connect to a known dummy bad URL - self.assertRaises(ssl_error_classes, self._check_url_with_sg_api_httplib2, self.bad_url, self.certs) + self.assertRaises( + ssl_error_classes, + self._check_url_with_sg_api_httplib2, + self.bad_url, + self.certs, + ) # Now check that the good urls connect properly using the certs for url in self.test_urls: @@ -725,12 +788,14 @@ def test_urlib(self): certificate with urllib. """ # First check that we get an error when trying to connect to a known dummy bad URL - self.assertRaises(urllib.error.URLError, self._check_url_with_urllib, self.bad_url) + self.assertRaises( + urllib.error.URLError, self._check_url_with_urllib, self.bad_url + ) # Now check that the good urls connect properly using the certs for url in self.test_urls: response = self._check_url_with_urllib(url) - assert (response is not None) + assert response is not None class TestMimetypesFix(unittest.TestCase): @@ -738,8 +803,10 @@ class TestMimetypesFix(unittest.TestCase): Makes sure that the mimetypes fix will be imported. """ - @patch('shotgun_api3.shotgun.sys') - def _test_mimetypes_import(self, platform, major, minor, patch_number, result, mock): + @patch("shotgun_api3.shotgun.sys") + def _test_mimetypes_import( + self, platform, major, minor, patch_number, result, mock + ): """ Mocks sys.platform and sys.version_info to test the mimetypes import code. """ @@ -749,5 +816,5 @@ def _test_mimetypes_import(self, platform, major, minor, patch_number, result, m self.assertEqual(_is_mimetypes_broken(), result) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/update_httplib2.py b/update_httplib2.py index acfdee2b7..30422e0c2 100755 --- a/update_httplib2.py +++ b/update_httplib2.py @@ -17,18 +17,20 @@ class Utilities: def download_archive(self, file_path, file_name): """Download the archive from github.""" print(f"Downloading {file_name}") - subprocess.check_output([ - "curl", - "-L", - f"https://github.com/httplib2/httplib2/archive/{file_name}", - "-o", - file_path]) + subprocess.check_output( + [ + "curl", + "-L", + f"https://github.com/httplib2/httplib2/archive/{file_name}", + "-o", + file_path, + ] + ) def unzip_archive(self, file_path, file_name, temp_dir): """Unzip in a temp dir.""" print(f"Unzipping {file_name}") - subprocess.check_output( - ["unzip", str(file_path), "-d", str(temp_dir)]) + subprocess.check_output(["unzip", str(file_path), "-d", str(temp_dir)]) def remove_folder(self, path): """Remove a folder recursively.""" @@ -38,11 +40,14 @@ def remove_folder(self, path): def git_remove(self, target): print(f"Removing {target} in git.") try: - subprocess.check_output([ - "git", - "rm", - "-rf", - ] + target) + subprocess.check_output( + [ + "git", + "rm", + "-rf", + ] + + target + ) except Exception as e: pass @@ -58,8 +63,8 @@ def sanitize_file(self, file_path): contents = contents.replace("from httplib2.", "from .") contents = contents.replace("from httplib2", "from .") contents = contents.replace( - "import pyparsing as pp", - "from ... import pyparsing as pp") + "import pyparsing as pp", "from ... import pyparsing as pp" + ) with open(file_path, "w") as f: f.write(contents) @@ -88,18 +93,13 @@ def main(temp_path, repo_root, version): utilities.remove_folder(python3_dir) # Removes the previous version of httplib2 - utilities.git_remove([ - str(python2_dir), - str(python3_dir) - ]) + utilities.git_remove([str(python2_dir), str(python3_dir)]) # Copies a new version into place. print("Copying new version of httplib2") root_folder = unzipped_folder / f"httplib2-{version[1:]}" - utilities.copy_folder( - str(root_folder / "python2" / "httplib2"), python2_dir) - utilities.copy_folder( - str(root_folder / "python3" / "httplib2"), python3_dir) + utilities.copy_folder(str(root_folder / "python2" / "httplib2"), python2_dir) + utilities.copy_folder(str(root_folder / "python3" / "httplib2"), python3_dir) utilities.remove_folder(f"{python2_dir}/test") utilities.remove_folder(f"{python3_dir}/test") From b0d4a809aeea7713057c51a34dadef83d0e52d47 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Thu, 20 Feb 2025 11:55:27 -0800 Subject: [PATCH 11/59] SG-37280 Update Software Credits (#369) CY2024 Update --- software_credits | 28 +++++++++++++++++++++++----- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/software_credits b/software_credits index 819f81ee6..6fa0bf725 100644 --- a/software_credits +++ b/software_credits @@ -1,7 +1,21 @@ -The Flow Production Tracking Python API uses the following software. Thanks to their creators, license information below. +The Flow Production Tracking Python API uses the following software. +Thanks to their creators, license information below. ============================== PYTHON ============================== +Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, +2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Python Software Foundation. All +rights reserved. + +Copyright (c) 2000 BeOpen.com. +All rights reserved. + +Copyright (c) 1995-2001 Corporation for National Research Initiatives. +All rights reserved. + +Copyright (c) 1991-1995 Stichting Mathematisch Centrum. +All rights reserved. + 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using Python 2.7.17 software in source or binary form and its associated documentation. @@ -47,9 +61,13 @@ The Flow Production Tracking Python API uses the following software. Thanks to t ============================== Certifi ============================== -This Autodesk software contains the python-certifi package and is subject to the -terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not -distributed with this file, You can obtain one at https://mozilla.org/MPL/2.0/. +Copyright © 2024 Contributors +This Autodesk software contains the unmodified python-certifi 2024.07.04 +package. The use and distribution terms for this software are covered by the +Mozilla Public License 2.0 (https://www.mozilla.org/en-US/MPL/2.0/ ). By using +this software in any fashion, you are agreeing to be bound by the terms of this +license. The source code for python-certifi is available from +https://github.com/certifi/python-certifi/releases/tag/2024.07.04 ============================== Httplib2 ============================== @@ -104,7 +122,7 @@ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ============================== SIX ============================== -Copyright (c) 2010-2019 Benjamin Peterson +Copyright (c) 2010-2020 Benjamin Peterson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in From 8b037619e079616f60f45e860d3764c8b38a20bc Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Thu, 20 Feb 2025 14:58:03 -0500 Subject: [PATCH 12/59] Fix Changelog typo (#370) --- HISTORY.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HISTORY.rst b/HISTORY.rst index 9858f61c8..a2981b9c2 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,7 +4,7 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. -v3.8.0 (2024 Feb 7) +v3.8.0 (2025 Feb 7) =================== - Extend the payload optimizations to the ``in`` and ``not_in`` filters and From c0eec8f2ca6e75963f03c92ae417763983d7a3a3 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Tue, 25 Feb 2025 11:32:31 -0500 Subject: [PATCH 13/59] Packaging for 3.8.1 (#371) --- HISTORY.rst | 7 +++++++ setup.py | 2 +- shotgun_api3/shotgun.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index a2981b9c2..47b375bfb 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,6 +4,13 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.8.1 (2025 Feb 25) +==================== + +- Upgrade certifi to 2024.12.14. +- Apply black 25.1.0 formatting to the source code. +- Update Software Credits + v3.8.0 (2025 Feb 7) =================== diff --git a/setup.py b/setup.py index f92018fe1..2d982ec22 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( name="shotgun_api3", - version="3.8.0", + version="3.8.1", description="Flow Production Tracking Python API", long_description=readme, author="Autodesk", diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index a805fa5f4..455ed477e 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -122,7 +122,7 @@ def _is_mimetypes_broken(): # ---------------------------------------------------------------------------- # Version -__version__ = "3.8.0" +__version__ = "3.8.1" # ---------------------------------------------------------------------------- # Errors From c06aff4c3132dcb10d4ee4a3856cccea9f875ee9 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Thu, 27 Feb 2025 11:42:16 -0800 Subject: [PATCH 14/59] SG-38301 Review the documentation regarding the SHOTGUN_API_CACERTS variable (#373) Align wording with tk-core --- docs/reference.rst | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/docs/reference.rst b/docs/reference.rst index 658fb6b4a..77241f052 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -936,8 +936,19 @@ Environment Variables SHOTGUN_API_CACERTS =================== -Used to specify a path to an external SSL certificates file. This environment variable can be used in place of the ``ca_certs`` keyword argument to the :class:`~shotgun.Shotgun` constructor. In the case that both this environment variable is set and the keyword argument is provided, the value from the keyword argument will be used. - +Use this variable to override the default Trusted Root Certification Authorities +Certificate Store bundled with this library. +By default, the library relies on `certifi `_ +as its Root CA store. + +This environment variable can be used in place of the ``ca_certs`` keyword +argument to the :class:`~shotgun.Shotgun` constructor. +In the case that both this environment variable is set and the keyword argument +is provided, the value from the keyword argument will be used. + +For an example about using ``SHOTGUN_API_CACERTS`` to fix a certificate issue, +see the `SSLHandshakeError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed `_ +article. SHOTGUN_API_RETRY_INTERVAL ========================== From e4d5576431d61f8d98861245836337331bbe14cd Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Thu, 6 Mar 2025 13:19:41 -0500 Subject: [PATCH 15/59] Update README.md (#374) --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f960e941f..4c9e464df 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ -[![VFX Platform](https://img.shields.io/badge/vfxplatform-2024%20%7C%202023%20%7C%202022%20%7C%202021-blue.svg)](http://www.vfxplatform.com/) -[![Python](https://img.shields.io/badge/python-3.7%20%7C%203.9%20%7C%203.10%20%7C%203.11-blue.svg)](https://www.python.org/) +[![VFX Platform](https://img.shields.io/badge/vfxplatform-2025%20%7C%202024%20%7C%202023%20%7C%202022-blue.svg)](http://www.vfxplatform.com/) +[![Python](https://img.shields.io/badge/python-3.11%20%7C%203.10%20%7C%203.9-blue.svg)](https://www.python.org/)/) [![Reference Documentation](http://img.shields.io/badge/doc-reference-blue.svg)](http://developer.shotgridsoftware.com/python-api) [![Build Status](https://dev.azure.com/shotgun-ecosystem/Python%20API/_apis/build/status/shotgunsoftware.python-api?branchName=master)](https://dev.azure.com/shotgun-ecosystem/Python%20API/_build/latest?definitionId=108&branchName=master) [![Coverage Status](https://coveralls.io/repos/github/shotgunsoftware/python-api/badge.svg?branch=master)](https://coveralls.io/github/shotgunsoftware/python-api?branch=master) From 666fa0c4ee33c432d6e3b0228363c1f9c9a08e9e Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Tue, 11 Mar 2025 16:02:28 -0500 Subject: [PATCH 16/59] SG-35712 Prevent flaky disconnection when uploading thumbnails on publish (#368) * Handle opening the connection in the retry block * Update parent * Revert parent class for `CACertsHTTPSConnection` * Remove testing argument --- shotgun_api3/shotgun.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 455ed477e..e1beaac01 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -4504,16 +4504,16 @@ def _upload_data_to_storage(self, data, content_type, size, storage_url): :returns: upload url. :rtype: str """ - opener = self._build_opener(urllib.request.HTTPHandler) - - request = urllib.request.Request(storage_url, data=data) - request.add_header("Content-Type", content_type) - request.add_header("Content-Length", size) - request.get_method = lambda: "PUT" attempt = 1 while attempt <= self.MAX_ATTEMPTS: try: + opener = self._build_opener(urllib.request.HTTPHandler) + + request = urllib.request.Request(storage_url, data=data) + request.add_header("Content-Type", content_type) + request.add_header("Content-Length", size) + request.get_method = lambda: "PUT" result = self._make_upload_request(request, opener) LOG.debug("Completed request to %s" % request.get_method()) @@ -4634,12 +4634,12 @@ def _send_form(self, url, params): params.update(self._auth_params()) - opener = self._build_opener(FormPostHandler) attempt = 1 while attempt <= self.MAX_ATTEMPTS: # Perform the request try: + opener = self._build_opener(FormPostHandler) resp = opener.open(url, params) result = resp.read() # response headers are in str(resp.info()).splitlines() @@ -4679,11 +4679,11 @@ def __init__(self, *args, **kwargs): """ # Pop that argument, self.__ca_certs = kwargs.pop("ca_certs") - http_client.HTTPConnection.__init__(self, *args, **kwargs) + super().__init__(self, *args, **kwargs) def connect(self): "Connect to a host on a given (SSL) port." - http_client.HTTPConnection.connect(self) + super().connect(self) # Now that the regular HTTP socket has been created, wrap it with our SSL certs. if six.PY38: context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) @@ -4698,13 +4698,13 @@ def connect(self): ) -class CACertsHTTPSHandler(urllib.request.HTTPSHandler): +class CACertsHTTPSHandler(urllib.request.HTTPHandler): """ Handler that ensures https connections are created with the custom CA certs. """ def __init__(self, cacerts): - urllib.request.HTTPSHandler.__init__(self) + super().__init__(self) self.__ca_certs = cacerts def https_open(self, req): From 4508d1cd937caee631ad3041447978aeb7a012ac Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Tue, 11 Mar 2025 16:21:17 -0500 Subject: [PATCH 17/59] Packaging for 3.8.2 (#375) * Packaging for 3.8.2 * Format file * Fix format --- HISTORY.rst | 7 +++++++ setup.py | 2 +- shotgun_api3/shotgun.py | 3 +-- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index 47b375bfb..54b30f217 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,6 +4,13 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.8.2 (2025 Mar 11) +==================== + +- Prevent flaky disconnection when uploading thumbnails on publish. + There's a flaky disconnection when the publisher uploads the thumbnail to the server. + The most common errors were: ``Connection closed by peer`` and ``URLopen error EOF occurred in violation of protocol ssl.c:1006``. + v3.8.1 (2025 Feb 25) ==================== diff --git a/setup.py b/setup.py index 2d982ec22..cf3304f91 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( name="shotgun_api3", - version="3.8.1", + version="3.8.2", description="Flow Production Tracking Python API", long_description=readme, author="Autodesk", diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index e1beaac01..b682e87eb 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -122,7 +122,7 @@ def _is_mimetypes_broken(): # ---------------------------------------------------------------------------- # Version -__version__ = "3.8.1" +__version__ = "3.8.2" # ---------------------------------------------------------------------------- # Errors @@ -4634,7 +4634,6 @@ def _send_form(self, url, params): params.update(self._auth_params()) - attempt = 1 while attempt <= self.MAX_ATTEMPTS: # Perform the request From 418d72e37c518a67abee10cf8094a633e59e1caf Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Mon, 14 Apr 2025 09:26:07 -0500 Subject: [PATCH 18/59] Remove Python 3.7 from CI (#377) * Remove Python 3.7 from CI * Update changes from backend --- azure-pipelines-templates/run-tests.yml | 2 -- tests/test_api.py | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/azure-pipelines-templates/run-tests.yml b/azure-pipelines-templates/run-tests.yml index 831c276ec..bc402c42c 100644 --- a/azure-pipelines-templates/run-tests.yml +++ b/azure-pipelines-templates/run-tests.yml @@ -44,8 +44,6 @@ jobs: strategy: matrix: # We support these versions of Python. - Python37: - python.version: '3.7' Python39: python.version: '3.9' Python310: diff --git a/tests/test_api.py b/tests/test_api.py index 0e611316a..d07442e96 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1004,6 +1004,7 @@ def test_preferences_read(self): resp = self.sg.preferences_read() expected = { + "creative_review_settings": "", "date_component_order": "month_day", "duration_units": "days", "format_currency_fields_decimal_options": "$1,000.99", From 17620c9eaa058545cb75796679b15c3654a0a3e7 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Mon, 14 Apr 2025 09:57:58 -0500 Subject: [PATCH 19/59] SG-37203 Apply mockgun improvements (#376) * Apply #217 * Fix dict * Apply #376 * Apply #364 --- shotgun_api3/lib/mockgun/mockgun.py | 46 +++++- tests/test_mockgun.py | 231 +++++++++++++++++++++++++++- 2 files changed, 266 insertions(+), 11 deletions(-) diff --git a/shotgun_api3/lib/mockgun/mockgun.py b/shotgun_api3/lib/mockgun/mockgun.py index 36b98dd5d..18e4a142c 100644 --- a/shotgun_api3/lib/mockgun/mockgun.py +++ b/shotgun_api3/lib/mockgun/mockgun.py @@ -293,6 +293,25 @@ def find( # handle the ordering of the recordset if order: # order: [{"field_name": "code", "direction": "asc"}, ... ] + def sort_none(k, order_field): + """ + Handle sorting of None consistently. + Note: Doesn't handle [checkbox, serializable, url]. + """ + field_type = self._get_field_type(k["type"], order_field) + value = k[order_field] + if value is not None: + return value + elif field_type in ("number", "percent", "duration"): + return 0 + elif field_type == "float": + return 0.0 + elif field_type in ("text", "entity_type", "date", "list", "status_list"): + return "" + elif field_type == "date_time": + return datetime.datetime(datetime.MINYEAR, 1, 1) + return None + for order_entry in order: if "field_name" not in order_entry: raise ValueError("Order clauses must be list of dicts with keys 'field_name' and 'direction'!") @@ -305,7 +324,11 @@ def find( else: raise ValueError("Unknown ordering direction") - results = sorted(results, key=lambda k: k[order_field], reverse=desc_order) + results = sorted( + results, + key=lambda k: sort_none(k, order_field), + reverse=desc_order, + ) if fields is None: fields = set(["type", "id"]) @@ -608,6 +631,20 @@ def _compare(self, field_type, lval, operator, rval): if operator == "is": return lval == rval elif field_type == "text": + # Some operations expect a list but can deal with a single value + if operator in ("in", "not_in") and not isinstance(rval, list): + rval = [rval] + # Some operation expect a string but can deal with None + elif operator in ("starts_with", "ends_with", "contains", "not_contains"): + lval = lval or '' + rval = rval or '' + # Shotgun string comparison is case insensitive + lval = lval.lower() if lval is not None else None + if isinstance(rval, list): + rval = [val.lower() if val is not None else None for val in rval] + else: + rval = rval.lower() if rval is not None else None + if operator == "is": return lval == rval elif operator == "is_not": @@ -617,7 +654,7 @@ def _compare(self, field_type, lval, operator, rval): elif operator == "contains": return rval in lval elif operator == "not_contains": - return lval not in rval + return rval not in lval elif operator == "starts_with": return lval.startswith(rval) elif operator == "ends_with": @@ -831,7 +868,10 @@ def _update_row(self, entity_type, row, data, multi_entity_update_modes=None): update_mode = multi_entity_update_modes.get(field, "set") if multi_entity_update_modes else "set" if update_mode == "add": - row[field] += [{"type": item["type"], "id": item["id"]} for item in data[field]] + for item in data[field]: + new_item = {"type": item["type"], "id": item["id"]} + if new_item not in row[field]: + row[field].append(new_item) elif update_mode == "remove": row[field] = [ item diff --git a/tests/test_mockgun.py b/tests/test_mockgun.py index 1395355fa..e7e4295e4 100644 --- a/tests/test_mockgun.py +++ b/tests/test_mockgun.py @@ -35,6 +35,7 @@ and can be run on their own by typing "python test_mockgun.py". """ +import datetime import re import os import unittest @@ -188,14 +189,171 @@ def setUp(self): self._mockgun = Mockgun( "https://test.shotgunstudio.com", login="user", password="1234" ) - self._user = self._mockgun.create("HumanUser", {"login": "user"}) + self._user1 = self._mockgun.create("HumanUser", {"login": "user"}) + self._user2 = self._mockgun.create("HumanUser", {"login": None}) + + def test_operator_is(self): + """ + Ensure is operator work. + """ + actual = self._mockgun.find("HumanUser", [["login", "is", "user"]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_is_none(self): + """ + Ensure is operator work when used with None. + """ + actual = self._mockgun.find("HumanUser", [["login", "is", None]]) + expected = [{"type": "HumanUser", "id": self._user2["id"]}] + self.assertEqual(expected, actual) + + def test_operator_is_case_sensitivity(self): + """ + Ensure is operator is case insensitive. + """ + actual = self._mockgun.find("HumanUser", [["login", "is", "USER"]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_is_not(self): + """ + Ensure the is_not operator works. + """ + actual = self._mockgun.find("HumanUser", [["login", "is_not", "user"]]) + expected = [{"type": "HumanUser", "id": self._user2["id"]}] + self.assertEqual(expected, actual) + + def test_operator_is_not_none(self): + """ + Ensure the is_not operator works when used with None. + """ + actual = self._mockgun.find("HumanUser", [["login", "is_not", None]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_is_not_case_sensitivity(self): + """ + Ensure the is_not operator is case insensitive. + """ + actual = self._mockgun.find("HumanUser", [["login", "is_not", "USER"]]) + expected = [{"type": "HumanUser", "id": self._user2["id"]}] + self.assertEqual(expected, actual) + + def test_operator_in(self): + """ + Ensure the in operator works. + """ + actual = self._mockgun.find("HumanUser", [["login", "in", ["user"]]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_in_none(self): + """ + Ensure the in operator works with a list containing None. + """ + actual = self._mockgun.find("HumanUser", [["login", "in", [None]]]) + expected = [{"type": "HumanUser", "id": self._user2["id"]}] + self.assertEqual(expected, actual) + + def test_operator_in_case_sensitivity(self): + """ + Ensure the in operator is case insensitive. + """ + actual = self._mockgun.find("HumanUser", [["login", "in", ["USER"]]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_not_in(self): + """ + Ensure the not_in operator works. + """ + actual = self._mockgun.find("HumanUser", [["login", "not_in", ["foo"]]]) + expected = [ + {"type": "HumanUser", "id": self._user1["id"]}, + {"type": "HumanUser", "id": self._user2["id"]}, + ] + self.assertEqual(expected, actual) + + def test_operator_not_in_none(self): + """ + Ensure the not_not operator works with a list containing None. + """ + actual = self._mockgun.find("HumanUser", [["login", "not_in", [None]]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_not_in_case_sensitivity(self): + """ + Ensure the not_in operator is case insensitive. + """ + actual = self._mockgun.find("HumanUser", [["login", "not_in", ["USER"]]]) + expected = [{"type": "HumanUser", "id": self._user2["id"]}] + self.assertEqual(expected, actual) def test_operator_contains(self): """ - Ensures contains operator works. + Ensures the contains operator works. """ - item = self._mockgun.find_one("HumanUser", [["login", "contains", "se"]]) - self.assertTrue(item) + actual = self._mockgun.find("HumanUser", [["login", "contains", "se"]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_contains_case_sensitivity(self): + """ + Ensure the contains operator is case insensitive. + """ + actual = self._mockgun.find("HumanUser", [["login", "contains", "SE"]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_not_contains(self): + """ + Ensure the not_contains operator works. + """ + actual = self._mockgun.find("HumanUser", [["login", "not_contains", "user"]]) + expected = [{"type": "HumanUser", "id": self._user2["id"]}] + self.assertEqual(expected, actual) + + def test_operator_not_contains_case_sensitivity(self): + """ + Ensure the not_contains operator is case insensitive. + """ + actual = self._mockgun.find("HumanUser", [["login", "not_contains", "USER"]]) + expected = [{"type": "HumanUser", "id": self._user2["id"]}] + self.assertEqual(expected, actual) + + def test_operator_starts_with(self): + """ + Ensure the starts_with operator works. + """ + actual = self._mockgun.find("HumanUser", [["login", "starts_with", "us"]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_starts_with_case_sensitivity(self): + """ + Ensure the starts_with operator is case insensitive. + """ + actual = self._mockgun.find("HumanUser", [["login", "starts_with", "US"]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_ends_with(self): + """ + Ensure the ends_with operator works. + """ + actual = self._mockgun.find("HumanUser", [["login", "ends_with", "er"]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) + + def test_operator_ends_with_case_sensitivity(self): + """ + Ensure the starts_with operator is case insensitive. + """ + actual = self._mockgun.find("HumanUser", [["login", "ends_with", "ER"]]) + expected = [{"type": "HumanUser", "id": self._user1["id"]}] + self.assertEqual(expected, actual) class TestMultiEntityFieldComparison(unittest.TestCase): @@ -345,10 +503,12 @@ def test_update_add(self): """ Ensures that "add" multi_entity_update_mode works. """ + # Attempts to add _version2 + # It already exists on the playlist and should not be duplicated self._mockgun.update( "Playlist", self._add_playlist["id"], - {"versions": [self._version3]}, + {"versions": [self._version2, self._version3]}, multi_entity_update_modes={"versions": "add"}, ) @@ -429,15 +589,29 @@ def setUp(self): self._prj2_link = self._mockgun.create("Project", {"name": "prj2"}) self._shot1 = self._mockgun.create( - "Shot", {"code": "shot1", "project": self._prj1_link} + "Shot", + { + "code": "shot1", + "project": self._prj1_link, + "description": "a", + "sg_cut_order": 2, + }, ) self._shot2 = self._mockgun.create( - "Shot", {"code": "shot2", "project": self._prj1_link} + "Shot", {"code": "shot2", "project": self._prj1_link, "sg_cut_order": 1} ) self._shot3 = self._mockgun.create( - "Shot", {"code": "shot3", "project": self._prj2_link} + "Shot", {"code": "shot3", "project": self._prj2_link, "description": "b"} + ) + + self._user1 = self._mockgun.create( + "HumanUser", {"login": "user1", "password_strength": 0.2} + ) + + self._user2 = self._mockgun.create( + "HumanUser", {"login": "user2", "created_at": datetime.datetime(2025, 1, 1)} ) def test_simple_filter_operators(self): @@ -468,6 +642,47 @@ def test_simple_filter_operators(self): self.assertEqual(len(shots), 0) + def test_ordered_filter_operator(self): + """ + Test use of the order feature of filter_operator on supported data types. + """ + find_args = ["Shot", [], ["code"]] + + # str field + shots = self._mockgun.find( + *find_args, order=[{"field_name": "description", "direction": "asc"}] + ) + self.assertEqual([s["code"] for s in shots], ["shot2", "shot1", "shot3"]) + + shots = self._mockgun.find( + *find_args, order=[{"field_name": "description", "direction": "desc"}] + ) + self.assertEqual([s["code"] for s in shots], ["shot3", "shot1", "shot2"]) + + # int field + shots = self._mockgun.find( + *find_args, order=[{"field_name": "sg_cut_order", "direction": "asc"}] + ) + self.assertEqual([s["code"] for s in shots], ["shot3", "shot2", "shot1"]) + + # float field + users = self._mockgun.find( + "HumanUser", + [], + ["login"], + order=[{"field_name": "password_strength", "direction": "asc"}], + ) + self.assertEqual([u["login"] for u in users], ["user2", "user1"]) + + # date_time field + users = self._mockgun.find( + "HumanUser", + [], + ["login"], + order=[{"field_name": "created_at", "direction": "asc"}], + ) + self.assertEqual([u["login"] for u in users], ["user1", "user2"]) + def test_nested_filter_operators(self): """ Tests a the use of the filter_operator nested From 7f5f9c15119682de3141e54c6ca181fa6ed5479d Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Tue, 15 Apr 2025 11:13:46 -0500 Subject: [PATCH 20/59] Fix `creative_review_settings` test (#378) --- tests/test_api.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/test_api.py b/tests/test_api.py index d07442e96..788c9751b 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1004,7 +1004,6 @@ def test_preferences_read(self): resp = self.sg.preferences_read() expected = { - "creative_review_settings": "", "date_component_order": "month_day", "duration_units": "days", "format_currency_fields_decimal_options": "$1,000.99", @@ -1027,6 +1026,12 @@ def test_preferences_read(self): self.assertIn("view_master_settings", resp) resp.pop("view_master_settings") + # Simply make sure creative review settings are there. These change frequently and we + # don't want to have the test break because Creative Review changed or because we didn't + # update the test. + self.assertIn("creative_review_settings", resp) + resp.pop("creative_review_settings") + self.assertEqual(expected, resp) # all filtered From 5b3cf59fe13c96cc0639e6a501b5f84adf515e0e Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Thu, 15 May 2025 10:18:20 -0500 Subject: [PATCH 21/59] Fix reStructuredText format (#380) * Fix rst issue * Revert line * Update labels --- HISTORY.rst | 2 +- docs/cookbook/examples/basic_create_shot_task_template.rst | 4 ++-- docs/cookbook/examples/basic_delete_shot.rst | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index 54b30f217..ad4ebda7a 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -61,7 +61,7 @@ v3.5.1 (2024 Apr 3) - Mockgun: add support for ``add_user_agent`` and ``set_session_uuid`` methods v3.5.0 (2024 Mar 26) -=================== +==================== - Rebranding component for Flow Production Tracking v3.4.2 (2024 Feb 6) diff --git a/docs/cookbook/examples/basic_create_shot_task_template.rst b/docs/cookbook/examples/basic_create_shot_task_template.rst index ab6248227..18722be96 100644 --- a/docs/cookbook/examples/basic_create_shot_task_template.rst +++ b/docs/cookbook/examples/basic_create_shot_task_template.rst @@ -42,8 +42,8 @@ created. wish to create by default on this Shot. We found the specific template we wanted to assign in the previous block by searching -Result ------- +Create Shot Result +------------------ The variable ``result`` now contains the dictionary of the new Shot that was created. :: diff --git a/docs/cookbook/examples/basic_delete_shot.rst b/docs/cookbook/examples/basic_delete_shot.rst index 4f2e91018..c79215e25 100644 --- a/docs/cookbook/examples/basic_delete_shot.rst +++ b/docs/cookbook/examples/basic_delete_shot.rst @@ -7,8 +7,8 @@ Deleting an entity in Flow Production Tracking is pretty straight-forward. No ex result = sg.delete("Shot", 40435) -Result ------- +Delete Shot Result +------------------ If the Shot was deleted successfully ``result`` will contain:: True From 8b8fdef211c5d19c4a15e17199b796249c416fb8 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Thu, 15 May 2025 11:04:39 -0500 Subject: [PATCH 22/59] SG-27368 Fix more RSTs files for Sphinx (#381) * Fix more RSTs files for Sphinx * Update ref * Remove duplicate label --- docs/cookbook/examples/basic_create_shot.rst | 4 ++-- docs/cookbook/examples/basic_delete_shot.rst | 4 ++-- docs/cookbook/examples/basic_find_shot.rst | 4 ++-- docs/cookbook/examples/basic_update_shot.rst | 4 ++-- docs/cookbook/examples/svn_integration.rst | 2 +- docs/cookbook/tasks/split_tasks.rst | 4 ++-- docs/cookbook/tasks/updating_tasks.rst | 6 +++--- docs/reference.rst | 1 - 8 files changed, 14 insertions(+), 15 deletions(-) diff --git a/docs/cookbook/examples/basic_create_shot.rst b/docs/cookbook/examples/basic_create_shot.rst index 7513305fa..f7d412f89 100644 --- a/docs/cookbook/examples/basic_create_shot.rst +++ b/docs/cookbook/examples/basic_create_shot.rst @@ -51,8 +51,8 @@ this dictionary represents. It does not correspond to any field in Flow Producti Flow Production Tracking will *always* return the ``id`` and ``type`` keys in the dictionary when there are results representing an entity. -The Complete Example --------------------- +The Complete Example for creating a Shot +---------------------------------------- :: #!/usr/bin/env python diff --git a/docs/cookbook/examples/basic_delete_shot.rst b/docs/cookbook/examples/basic_delete_shot.rst index c79215e25..886b962d3 100644 --- a/docs/cookbook/examples/basic_delete_shot.rst +++ b/docs/cookbook/examples/basic_delete_shot.rst @@ -13,8 +13,8 @@ If the Shot was deleted successfully ``result`` will contain:: True -The Complete Example --------------------- +The Complete Example for deleting a Shot +---------------------------------------- :: #!/usr/bin/env python diff --git a/docs/cookbook/examples/basic_find_shot.rst b/docs/cookbook/examples/basic_find_shot.rst index 945eb1be6..4f5d73934 100644 --- a/docs/cookbook/examples/basic_find_shot.rst +++ b/docs/cookbook/examples/basic_find_shot.rst @@ -37,8 +37,8 @@ easier to read. So we'll add that to the import section of our script.:: import shotgun_api3 from pprint import pprint # useful for debugging -The Complete Example --------------------- +The Complete Example for finding a Shot +--------------------------------------- :: #!/usr/bin/env python diff --git a/docs/cookbook/examples/basic_update_shot.rst b/docs/cookbook/examples/basic_update_shot.rst index c2413c3ee..4e2055d49 100644 --- a/docs/cookbook/examples/basic_update_shot.rst +++ b/docs/cookbook/examples/basic_update_shot.rst @@ -40,8 +40,8 @@ It does not correspond to any field in Flow Production Tracking. Flow Production Tracking will *always* return the ``id`` and ``type`` keys in the dictionary when there are results representing an entity. -The Complete Example --------------------- +The Complete Example for updating a Shot +---------------------------------------- :: #!/usr/bin/env python diff --git a/docs/cookbook/examples/svn_integration.rst b/docs/cookbook/examples/svn_integration.rst index 8b0a6ce46..c676fd8ea 100644 --- a/docs/cookbook/examples/svn_integration.rst +++ b/docs/cookbook/examples/svn_integration.rst @@ -130,7 +130,7 @@ Explanation of selected lines: - line ``14``: This should be the URL to your instance of Flow Production Tracking. - lines ``15-16``: Make sure you get these values from the "Scripts" page in the Admin section of - the Flow Production Tracking web application. If you're not sure how to do this, check out :doc:`authentication`. + the Flow Production Tracking web application. If you're not sure how to do this, check out :ref:`authentication`. - line ``17``: This is the address of Trac, our web-based interface that we use with Subversion. You may use a different interface, or none at all, so feel free to adjust this line or ignore it as your case may be. diff --git a/docs/cookbook/tasks/split_tasks.rst b/docs/cookbook/tasks/split_tasks.rst index d16c50e94..96f639037 100644 --- a/docs/cookbook/tasks/split_tasks.rst +++ b/docs/cookbook/tasks/split_tasks.rst @@ -62,8 +62,8 @@ How Do Splits Influence Dates And Dates Influence Splits - In the case of a shorter duration splits, starting with the latest ones, will be either removed or shortened until the new duration is met. -Examples -======== +Examples for splitting Tasks +============================ Throughout the following examples, each successive one will build on the previous. start_date, due_date and duration being ignored diff --git a/docs/cookbook/tasks/updating_tasks.rst b/docs/cookbook/tasks/updating_tasks.rst index c7c216e3f..db2433e79 100644 --- a/docs/cookbook/tasks/updating_tasks.rst +++ b/docs/cookbook/tasks/updating_tasks.rst @@ -32,9 +32,9 @@ General Rules first, then ``due_date`` (otherwise setting ``duration`` will change ``due_date`` after it is set). -******** -Examples -******** +*************************** +Examples for updating Tasks +*************************** The following examples show what the resulting Task object will look like after being run on the initial Task object listed under the header of each section. diff --git a/docs/reference.rst b/docs/reference.rst index 77241f052..5f3888a52 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -42,7 +42,6 @@ The documentation for all of the methods you'll need in your scripts lives in he Shotgun.close Shotgun.authenticate_human_user Shotgun.get_session_token - Shotgun.set_up_auth_cookie Shotgun.add_user_agent Shotgun.reset_user_agent Shotgun.set_session_uuid From a1de54efe3880afc3e5ab1aa67b37fb36b741e69 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Thu, 22 May 2025 12:17:35 -0500 Subject: [PATCH 23/59] Packaging for v3.8.3 (#384) --- HISTORY.rst | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/HISTORY.rst b/HISTORY.rst index ad4ebda7a..10e34bb55 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,6 +4,17 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.8.3 (2025 May 22) +==================== + +- Add improvements to Mockgun. + Ensure string comparison are case insensitive. + Ignore duplicate entities when ``multi_entity_update_mode`` is added. + Support for ``None`` in mockgun when using ordering. + Thank you rlessardrodeofx, slingshotsys, and MHendricks for your contributions. +- Minor fixes on unit tests and documentation. + + v3.8.2 (2025 Mar 11) ==================== From cfab2b48fc4df378ca7219adbf99b70d96d3f983 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Tue, 3 Jun 2025 07:08:22 -0700 Subject: [PATCH 24/59] Better CI Job Name (#388) --- azure-pipelines-templates/run-tests.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/azure-pipelines-templates/run-tests.yml b/azure-pipelines-templates/run-tests.yml index bc402c42c..6c60b39c8 100644 --- a/azure-pipelines-templates/run-tests.yml +++ b/azure-pipelines-templates/run-tests.yml @@ -33,9 +33,9 @@ parameters: jobs: # The job will be named after the OS and Azure will suffix the strategy to make it unique - # so we'll have a job name "Windows Python27" for example. What's a strategy? Strategies are the - # name of the keys under the strategy.matrix scope. So for each OS we'll have " Python27" and - # " Python37". + # so we'll have a job name "Windows Python 2.7" for example. What's a strategy? Strategies are the + # name of the keys under the strategy.matrix scope. So for each OS we'll have " Python 2.7" and + # " Python 3.7". - job: ${{ parameters.name }} pool: vmImage: ${{ parameters.vm_image }} @@ -44,11 +44,11 @@ jobs: strategy: matrix: # We support these versions of Python. - Python39: + Python 3.9: python.version: '3.9' - Python310: + Python 3.10: python.version: '3.10' - Python311: + Python 3.11: python.version: '3.11' maxParallel: 4 From 209949404121add444ecd3776c19ba1085d84935 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Tue, 10 Jun 2025 15:25:00 -0500 Subject: [PATCH 25/59] SG-37924 Replace `utcfromtimestamp` for Python 3.12 (#390) * Replace `utcfromtimestamp` for Python 3.12 * Support expected value --- tests/test_client.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/test_client.py b/tests/test_client.py index e29c6158d..4a81996f1 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -587,7 +587,12 @@ def test_transform_data(self): now = datetime.datetime.fromtimestamp(timestamp).replace( microsecond=0, tzinfo=SG_TIMEZONE.local ) - utc_now = datetime.datetime.utcfromtimestamp(timestamp).replace(microsecond=0) + utc_now = ( + datetime.datetime.fromtimestamp(timestamp, tz=datetime.timezone.utc) + .replace(microsecond=0) + .astimezone(None) + .replace(tzinfo=None) + ) local = {"date": now.strftime("%Y-%m-%d"), "datetime": now, "time": now.time()} # date will still be the local date, because they are not transformed utc = { From 7b392bde6ec033e4a1c5749117fc47214e246380 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Wed, 11 Jun 2025 12:10:10 -0500 Subject: [PATCH 26/59] Packaging for v3.8.4 (#391) --- HISTORY.rst | 5 +++++ setup.py | 2 +- shotgun_api3/shotgun.py | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index 10e34bb55..60e978958 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,6 +4,11 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.8.4 (2025 Jun 11) +==================== + +- Replace ``utcfromtimestamp`` to prevent breaking changes in Python 3.12. + v3.8.3 (2025 May 22) ==================== diff --git a/setup.py b/setup.py index cf3304f91..9240486b5 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( name="shotgun_api3", - version="3.8.2", + version="3.8.4", description="Flow Production Tracking Python API", long_description=readme, author="Autodesk", diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index b682e87eb..fea25deba 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -122,7 +122,7 @@ def _is_mimetypes_broken(): # ---------------------------------------------------------------------------- # Version -__version__ = "3.8.2" +__version__ = "3.8.4" # ---------------------------------------------------------------------------- # Errors From bd4615cd00389110168fcfc155e5612f658fe954 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Mon, 14 Jul 2025 04:45:05 -0700 Subject: [PATCH 27/59] SG-4373 Improve documentation about local file references (#389) * Apply suggestions from code review Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> --- docs/cookbook/attachments.rst | 105 ++++++++++++++++++++++++++++------ docs/reference.rst | 7 ++- 2 files changed, 92 insertions(+), 20 deletions(-) diff --git a/docs/cookbook/attachments.rst b/docs/cookbook/attachments.rst index e265071e8..934ed58e8 100644 --- a/docs/cookbook/attachments.rst +++ b/docs/cookbook/attachments.rst @@ -238,7 +238,7 @@ are available: A dictionary representing which LocalStorage entity is applied for this local file link. - **url** (:obj:`str`) *read-only*: - A file:// link provided for convenience pointing to the value in the ``local_path`` + A file URI (``file://``) path provided for convenience pointing to the value in the ``local_path`` Reading Local File Fields ========================= @@ -284,40 +284,107 @@ defaults. Any other keys that are provided will be ignored. Optionally set the mime-type of the associated local file. This is assigned automatically using a best-guess based on the file extension. - * **name** :obj:`str`: Optional display name of the local file. This is set to the filename by default. * **local_path** :obj:`str`: - The full local path to the file. Flow Production Tracking will find the LocalStorage + The full local path to the file. Flow Production Tracking will find the ``LocalStorage`` that has the most specific match to this path and automatically assign that LocalStorage to the file. + Alternative to ``relative_path`` + +* **local_storage** :obj:`dict`: + The reference to an existing ``LocalStorage``. + Must contain ``type: LocalStorage`` plus either an ``id`` or a ``name`` + +* **relative_path** :obj:`str`: + The path to the file relative ``local_storage`` root. + Requires ``local_storage`` + Only accepting slash ``/`` separated path. Does not accept Windows path. + Alternative to ``local_path`` + +Example 1: Using ``local_path`` +------------------------------ :: - data = {'sg_uploaded_movie': {'local_path': '/Users/kp/Movies/testing/test_movie_002.mov', - 'name': 'Better Movie'} - result = sg.update('Version', 123, data) + result = sg.update( + 'Version', + 123, + { + 'sg_uploaded_movie': { + 'local_path': '/Users/kp/Movies/testing/test_movie_002.mov', + 'name': 'Better Movie', + } + ) Returns:: - {'id':123, - 'sg_uploaded_movie': { 'content_type': 'video/quicktime', - 'link_type': 'local', - 'name': 'my_test_movie.mov', - 'local_path': '/Users/kp/Movies/testing/test_movie_002.mov' - 'local_path_linux': '/home/users/macusers/kp/Movies/testing/test_movie_002.mov' - 'local_path_mac': '/Users/kp/Movies/testing/test_movie_002.mov' - 'local_path_windows': 'M:\\macusers\kp\Movies\testing\test_movie_002.mov' - 'local_storage': {'id': 1, - 'name': 'Dailies Directories', - 'type': 'LocalStorage'}, - 'url': 'file:///Users/kp/Movies/testing/test_movie_002.mov'}, - 'type': 'Version'}] + { + 'id':123, + 'sg_uploaded_movie': { + 'content_type': 'video/quicktime', + 'link_type': 'local', + 'name': 'my_test_movie.mov', + 'local_path': '/Users/kp/Movies/testing/test_movie_002.mov' + 'local_path_linux': '/home/users/macusers/kp/Movies/testing/test_movie_002.mov' + 'local_path_mac': '/Users/kp/Movies/testing/test_movie_002.mov' + 'local_path_windows': 'M:\\macusers\kp\Movies\testing\test_movie_002.mov' + 'local_storage': { + 'id': 1, + 'name': 'Dailies Directories', + 'type': 'LocalStorage' + }, + 'url': 'file:///Users/kp/Movies/testing/test_movie_002.mov' + }, + 'type': 'Version', + } The ``content_type`` was assigned a best-guess value based on the file extension. Flow Production Tracking selected the most appropriate specific LocalStorage match and assigned it to local_storage automatically. + +Example 2: Using ``relative_path`` +--------------------------------- + +:: + + result = sg.update( + 'Version', + 123, + { + 'sg_uploaded_movie': { + 'local_storage': { + 'type': 'LocalStorage', + 'name': 'Dailies Directories', + }, + 'relative_path': 'testing/test_movie_002.mov', + } + ) + +Returns:: + + { + 'id':123, + 'sg_uploaded_movie': { + 'content_type': 'video/quicktime', + 'link_type': 'local', + 'name': 'my_test_movie.mov', + 'local_path': '/Users/kp/Movies/testing/test_movie_002.mov', + 'local_path_linux': '/home/users/macusers/kp/Movies/testing/test_movie_002.mov', + 'local_path_mac': '/Users/kp/Movies/testing/test_movie_002.mov', + 'local_path_windows': 'M:\\macusers\kp\Movies\testing\test_movie_002.mov', + 'local_storage': { + 'id': 1, + 'name': 'Dailies Directories', + 'type': 'LocalStorage' + }, + 'url': 'file:///Users/kp/Movies/testing/test_movie_002.mov' + }, + 'type': 'Version', + } + + Un-setting local file field values ================================== diff --git a/docs/reference.rst b/docs/reference.rst index 5f3888a52..e2e050e86 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -803,8 +803,13 @@ Additional keys exist for local file links 'local_path_linux': "string" | None, 'local_path_mac': "string" | None, 'local_path_windows': "string" | None, - 'local_storage': {dictionary}, + 'local_storage': { + 'type': 'LocalStorage', + 'id': int | None, + 'name': "string" | None, + }, 'name': "string", + 'relative_path': "string" | None 'url': "string", } API versions < v3.0.3: From 17c66c4191e4a4210e2fa0c7bb30536326927ec2 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio <123113322+carlos-villavicencio-adsk@users.noreply.github.com> Date: Thu, 17 Jul 2025 14:57:51 -0500 Subject: [PATCH 28/59] SG-38213 Prevent unexpected retries on error (#379) * Prevent unexpected retries on error * Fix tests * Update shotgun_api3/shotgun.py Co-authored-by: Julien Langlois <16244608+julien-lang@users.noreply.github.com> * Packaging for v3.8.3-beta1 * Fix typo * Update HISTORY.rst Co-authored-by: Julien Langlois <16244608+julien-lang@users.noreply.github.com> --------- Co-authored-by: Julien Langlois <16244608+julien-lang@users.noreply.github.com> --- HISTORY.rst | 7 +++++++ shotgun_api3/shotgun.py | 7 +++---- tests/test_api.py | 36 +----------------------------------- tests/test_client.py | 10 +--------- 4 files changed, 12 insertions(+), 48 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index 60e978958..c19b61fec 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,6 +4,13 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.8.5 (2025 Xxx X) +=================== + +- We don't want to retry on general exceptions (e.g. timeout or remote disconnection) + because we might send a resource modification request (create, batch create, etc) and + we can end up duplicating things. + v3.8.4 (2025 Jun 11) ==================== diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index fea25deba..98e38d83b 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -3938,11 +3938,10 @@ def _make_call(self, verb, path, body, headers): if attempt == max_rpc_attempts: LOG.debug("Request failed. Giving up after %d attempts." % attempt) raise - except Exception: + except Exception as e: self._close_connection() - if attempt == max_rpc_attempts: - LOG.debug("Request failed. Giving up after %d attempts." % attempt) - raise + LOG.debug(f"Request failed. Reason: {e}", exc_info=True) + raise LOG.debug( "Request failed, attempt %d of %d. Retrying in %.2f seconds..." diff --git a/tests/test_api.py b/tests/test_api.py index 788c9751b..407df92fd 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -2223,42 +2223,8 @@ def test_make_call_retry(self, mock_request): self.assertEqual(cm2.exception.args[0], "not working") log_content = "\n".join(cm1.output) - for i in [1, 2]: - self.assertIn( - f"Request failed, attempt {i} of 3. Retrying", - log_content, - ) - self.assertIn( - "Request failed. Giving up after 3 attempts.", - log_content, - ) - - # Then, make the exception happening only once and prove the - # retry works - def my_side_effect(*args, **kwargs): - try: - if my_side_effect.counter < 1: - raise Exception("not working") - - return mock.DEFAULT - finally: - my_side_effect.counter += 1 - - my_side_effect.counter = 0 - mock_request.side_effect = my_side_effect - with self.assertLogs("shotgun_api3", level="DEBUG") as cm: - self.assertIsInstance( - self.sg.info(), - dict, - ) - - log_content = "\n".join(cm.output) self.assertIn( - "Request failed, attempt 1 of 3. Retrying", - log_content, - ) - self.assertNotIn( - "Request failed, attempt 2 of 3. Retrying", + "Request failed. Reason: not working", log_content, ) diff --git a/tests/test_client.py b/tests/test_client.py index 4a81996f1..ea6892137 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -330,17 +330,9 @@ def test_network_retry(self): with mock.patch("time.sleep") as mock_sleep: self.assertRaises(httplib2.HttpLib2Error, self.sg.info) self.assertTrue( - self.sg.config.max_rpc_attempts == self.sg._http_request.call_count, + self.sg._http_request.call_count == 1, "Call is repeated", ) - # Ensure that sleep was called with the retry interval between each attempt - attempt_interval = self.sg.config.rpc_attempt_interval / 1000.0 - calls = [mock.callargs(((attempt_interval,), {}))] - calls *= self.sg.config.max_rpc_attempts - 1 - self.assertTrue( - mock_sleep.call_args_list == calls, - "Call is repeated at correct interval.", - ) def test_set_retry_interval(self): """Setting the retry interval through parameter and environment variable works.""" From f3fa9bc8c9dc9a316301c952a820deeeecce7e3a Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Fri, 18 Jul 2025 01:21:22 -0700 Subject: [PATCH 29/59] SG-38877 Update certifi third party library to version2025.7.14 (#397) * Bump certifi to version 2025.7.9 * Fixup file EOL * Update to 2025.07.14 --- shotgun_api3/lib/certifi/__init__.py | 2 +- shotgun_api3/lib/certifi/cacert.pem | 448 +++++++++++---------------- shotgun_api3/lib/certifi/core.py | 33 +- shotgun_api3/lib/requirements.txt | 4 +- 4 files changed, 189 insertions(+), 298 deletions(-) diff --git a/shotgun_api3/lib/certifi/__init__.py b/shotgun_api3/lib/certifi/__init__.py index ee8686bec..e8370493f 100644 --- a/shotgun_api3/lib/certifi/__init__.py +++ b/shotgun_api3/lib/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2024.12.14" +__version__ = "2025.07.14" diff --git a/shotgun_api3/lib/certifi/cacert.pem b/shotgun_api3/lib/certifi/cacert.pem index ef509f865..64c05d7f3 100644 --- a/shotgun_api3/lib/certifi/cacert.pem +++ b/shotgun_api3/lib/certifi/cacert.pem @@ -1,95 +1,4 @@ -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - # Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Label: "Entrust Root Certification Authority" @@ -125,39 +34,6 @@ eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m 0vdXcDazv/wor3ElhVsT/h5/WrQ8 -----END CERTIFICATE----- -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - # Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited # Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited # Label: "QuoVadis Root CA 2" @@ -245,103 +121,6 @@ mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK 4SVhM7JZG+Ju1zdXtg2pEto= -----END CERTIFICATE----- -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - # Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Label: "DigiCert Assured ID Root CA" @@ -474,47 +253,6 @@ ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ -----END CERTIFICATE----- -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - # Issuer: CN=SecureTrust CA O=SecureTrust Corporation # Subject: CN=SecureTrust CA O=SecureTrust Corporation # Label: "SecureTrust CA" @@ -4853,4 +4591,188 @@ Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT 9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp 4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= ------END CERTIFICATE----- \ No newline at end of file +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 2 2023" +# Serial: 153168538924886464690566649552453098598 +# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85 +# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87 +# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1 +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw +OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr +i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE +gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8 +k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT +Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl +2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U +cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP +/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS +uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+ +0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N +DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+ +XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61 +GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI +FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n +riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR +VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc +LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn +4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD +hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG +koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46 +ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS +Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80 +knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ +hJ65bvspmZDogNOfJA== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS ECC Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS ECC Root CA" +# Serial: 310892014698942880364840003424242768478804666567 +# MD5 Fingerprint: 09:48:04:77:d2:fc:65:93:71:66:b1:11:95:4f:06:8c +# SHA1 Fingerprint: b5:ec:39:f3:a1:66:37:ae:c3:05:94:57:e2:be:11:be:b7:a1:7f:36 +# SHA256 Fingerprint: c0:07:6b:9e:f0:53:1f:b1:a6:56:d6:7c:4e:be:97:cd:5d:ba:a4:1e:f4:45:98:ac:c2:48:98:78:c9:2d:87:11 +-----BEGIN CERTIFICATE----- +MIICMTCCAbegAwIBAgIUNnThTXxlE8msg1UloD5Sfi9QaMcwCgYIKoZIzj0EAwMw +WDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgRUNDIFJvb3QgQ0EwHhcNMjQw +NTE1MDU0MTU2WhcNNDQwNTE1MDU0MTU1WjBYMQswCQYDVQQGEwJDTjElMCMGA1UE +ChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1c3RB +c2lhIFRMUyBFQ0MgUm9vdCBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLh/pVs/ +AT598IhtrimY4ZtcU5nb9wj/1WrgjstEpvDBjL1P1M7UiFPoXlfXTr4sP/MSpwDp +guMqWzJ8S5sUKZ74LYO1644xST0mYekdcouJtgq7nDM1D9rs3qlKH8kzsaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQULIVTu7FDzTLqnqOH/qKYqKaT6RAw +DgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2gAMGUCMFRH18MtYYZI9HlaVQ01 +L18N9mdsd0AaRuf4aFtOJx24mH1/k78ITcTaRTChD15KeAIxAKORh/IRM4PDwYqR +OkwrULG9IpRdNYlzg8WbGf60oenUoWa2AaU2+dhoYSi3dOGiMQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia TLS RSA Root CA O=TrustAsia Technologies, Inc. +# Label: "TrustAsia TLS RSA Root CA" +# Serial: 160405846464868906657516898462547310235378010780 +# MD5 Fingerprint: 3b:9e:c3:86:0f:34:3c:6b:c5:46:c4:8e:1d:e7:19:12 +# SHA1 Fingerprint: a5:46:50:c5:62:ea:95:9a:1a:a7:04:6f:17:58:c7:29:53:3d:03:fa +# SHA256 Fingerprint: 06:c0:8d:7d:af:d8:76:97:1e:b1:12:4f:e6:7f:84:7e:c0:c7:a1:58:d3:ea:53:cb:e9:40:e2:ea:97:91:f4:c3 +-----BEGIN CERTIFICATE----- +MIIFgDCCA2igAwIBAgIUHBjYz+VTPyI1RlNUJDxsR9FcSpwwDQYJKoZIhvcNAQEM +BQAwWDELMAkGA1UEBhMCQ04xJTAjBgNVBAoTHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xIjAgBgNVBAMTGVRydXN0QXNpYSBUTFMgUlNBIFJvb3QgQ0EwHhcN +MjQwNTE1MDU0MTU3WhcNNDQwNTE1MDU0MTU2WjBYMQswCQYDVQQGEwJDTjElMCMG +A1UEChMcVHJ1c3RBc2lhIFRlY2hub2xvZ2llcywgSW5jLjEiMCAGA1UEAxMZVHJ1 +c3RBc2lhIFRMUyBSU0EgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC +AgoCggIBAMMWuBtqpERz5dZO9LnPWwvB0ZqB9WOwj0PBuwhaGnrhB3YmH49pVr7+ +NmDQDIPNlOrnxS1cLwUWAp4KqC/lYCZUlviYQB2srp10Zy9U+5RjmOMmSoPGlbYJ +Q1DNDX3eRA5gEk9bNb2/mThtfWza4mhzH/kxpRkQcwUqwzIZheo0qt1CHjCNP561 +HmHVb70AcnKtEj+qpklz8oYVlQwQX1Fkzv93uMltrOXVmPGZLmzjyUT5tUMnCE32 +ft5EebuyjBza00tsLtbDeLdM1aTk2tyKjg7/D8OmYCYozza/+lcK7Fs/6TAWe8Tb +xNRkoDD75f0dcZLdKY9BWN4ArTr9PXwaqLEX8E40eFgl1oUh63kd0Nyrz2I8sMeX +i9bQn9P+PN7F4/w6g3CEIR0JwqH8uyghZVNgepBtljhb//HXeltt08lwSUq6HTrQ +UNoyIBnkiz/r1RYmNzz7dZ6wB3C4FGB33PYPXFIKvF1tjVEK2sUYyJtt3LCDs3+j +TnhMmCWr8n4uIF6CFabW2I+s5c0yhsj55NqJ4js+k8UTav/H9xj8Z7XvGCxUq0DT +bE3txci3OE9kxJRMT6DNrqXGJyV1J23G2pyOsAWZ1SgRxSHUuPzHlqtKZFlhaxP8 +S8ySpg+kUb8OWJDZgoM5pl+z+m6Ss80zDoWo8SnTq1mt1tve1CuBAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFLgHkXlcBvRG/XtZylomkadFK/hT +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQwFAAOCAgEAIZtqBSBdGBanEqT3 +Rz/NyjuujsCCztxIJXgXbODgcMTWltnZ9r96nBO7U5WS/8+S4PPFJzVXqDuiGev4 +iqME3mmL5Dw8veWv0BIb5Ylrc5tvJQJLkIKvQMKtuppgJFqBTQUYo+IzeXoLH5Pt +7DlK9RME7I10nYEKqG/odv6LTytpEoYKNDbdgptvT+Bz3Ul/KD7JO6NXBNiT2Twp +2xIQaOHEibgGIOcberyxk2GaGUARtWqFVwHxtlotJnMnlvm5P1vQiJ3koP26TpUJ +g3933FEFlJ0gcXax7PqJtZwuhfG5WyRasQmr2soaB82G39tp27RIGAAtvKLEiUUj +pQ7hRGU+isFqMB3iYPg6qocJQrmBktwliJiJ8Xw18WLK7nn4GS/+X/jbh87qqA8M +pugLoDzga5SYnH+tBuYc6kIQX+ImFTw3OffXvO645e8D7r0i+yiGNFjEWn9hongP +XvPKnbwbPKfILfanIhHKA9jnZwqKDss1jjQ52MjqjZ9k4DewbNfFj8GQYSbbJIwe +SsCI3zWQzj8C9GRh3sfIB5XeMhg6j6JCQCTl1jNdfK7vsU1P1FeQNWrcrgSXSYk0 +ly4wBOeY99sLAZDBHwo/+ML+TvrbmnNzFrwFuHnYWa8G5z9nODmxfKuU4CkUpijy +323imttUQ/hHWKNddBWcwauwxzQ= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 2 2023" +# Serial: 139766439402180512324132425437959641711 +# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6 +# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b +# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce +-----BEGIN CERTIFICATE----- +MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI +MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE +LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw +OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi +MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK +F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE +7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe +EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6 +lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb +RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV +jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc +jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx +TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+ +ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk +hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF +NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH +kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG +OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y +XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14 +QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4 +pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q +3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU +t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX +cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8 +ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT +2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs +7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP +gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst +Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh +XBxvWHZks/wCuPWdCg== +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Subject: CN=SwissSign RSA TLS Root CA 2022 - 1 O=SwissSign AG +# Label: "SwissSign RSA TLS Root CA 2022 - 1" +# Serial: 388078645722908516278762308316089881486363258315 +# MD5 Fingerprint: 16:2e:e4:19:76:81:85:ba:8e:91:58:f1:15:ef:72:39 +# SHA1 Fingerprint: 81:34:0a:be:4c:cd:ce:cc:e7:7d:cc:8a:d4:57:e2:45:a0:77:5d:ce +# SHA256 Fingerprint: 19:31:44:f4:31:e0:fd:db:74:07:17:d4:de:92:6a:57:11:33:88:4b:43:60:d3:0e:27:29:13:cb:e6:60:ce:41 +-----BEGIN CERTIFICATE----- +MIIFkzCCA3ugAwIBAgIUQ/oMX04bgBhE79G0TzUfRPSA7cswDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzErMCkGA1UE +AxMiU3dpc3NTaWduIFJTQSBUTFMgUm9vdCBDQSAyMDIyIC0gMTAeFw0yMjA2MDgx +MTA4MjJaFw00NzA2MDgxMTA4MjJaMFExCzAJBgNVBAYTAkNIMRUwEwYDVQQKEwxT +d2lzc1NpZ24gQUcxKzApBgNVBAMTIlN3aXNzU2lnbiBSU0EgVExTIFJvb3QgQ0Eg +MjAyMiAtIDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDLKmjiC8NX +vDVjvHClO/OMPE5Xlm7DTjak9gLKHqquuN6orx122ro10JFwB9+zBvKK8i5VUXu7 +LCTLf5ImgKO0lPaCoaTo+nUdWfMHamFk4saMla+ju45vVs9xzF6BYQ1t8qsCLqSX +5XH8irCRIFucdFJtrhUnWXjyCcplDn/L9Ovn3KlMd/YrFgSVrpxxpT8q2kFC5zyE +EPThPYxr4iuRR1VPuFa+Rd4iUU1OKNlfGUEGjw5NBuBwQCMBauTLE5tzrE0USJIt +/m2n+IdreXXhvhCxqohAWVTXz8TQm0SzOGlkjIHRI36qOTw7D59Ke4LKa2/KIj4x +0LDQKhySio/YGZxH5D4MucLNvkEM+KRHBdvBFzA4OmnczcNpI/2aDwLOEGrOyvi5 +KaM2iYauC8BPY7kGWUleDsFpswrzd34unYyzJ5jSmY0lpx+Gs6ZUcDj8fV3oT4MM +0ZPlEuRU2j7yrTrePjxF8CgPBrnh25d7mUWe3f6VWQQvdT/TromZhqwUtKiE+shd +OxtYk8EXlFXIC+OCeYSf8wCENO7cMdWP8vpPlkwGqnj73mSiI80fPsWMvDdUDrta +clXvyFu1cvh43zcgTFeRc5JzrBh3Q4IgaezprClG5QtO+DdziZaKHG29777YtvTK +wP1H8K4LWCDFyB02rpeNUIMmJCn3nTsPBQIDAQABo2MwYTAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBRvjmKLk0Ow4UD2p8P98Q+4 +DxU4pTAdBgNVHQ4EFgQUb45ii5NDsOFA9qfD/fEPuA8VOKUwDQYJKoZIhvcNAQEL +BQADggIBAKwsKUF9+lz1GpUYvyypiqkkVHX1uECry6gkUSsYP2OprphWKwVDIqO3 +10aewCoSPY6WlkDfDDOLazeROpW7OSltwAJsipQLBwJNGD77+3v1dj2b9l4wBlgz +Hqp41eZUBDqyggmNzhYzWUUo8aWjlw5DI/0LIICQ/+Mmz7hkkeUFjxOgdg3XNwwQ +iJb0Pr6VvfHDffCjw3lHC1ySFWPtUnWK50Zpy1FVCypM9fJkT6lc/2cyjlUtMoIc +gC9qkfjLvH4YoiaoLqNTKIftV+Vlek4ASltOU8liNr3CjlvrzG4ngRhZi0Rjn9UM +ZfQpZX+RLOV/fuiJz48gy20HQhFRJjKKLjpHE7iNvUcNCfAWpO2Whi4Z2L6MOuhF +LhG6rlrnub+xzI/goP+4s9GFe3lmozm1O2bYQL7Pt2eLSMkZJVX8vY3PXtpOpvJp +zv1/THfQwUY1mFwjmwJFQ5Ra3bxHrSL+ul4vkSkphnsh3m5kt8sNjzdbowhq6/Td +Ao9QAwKxuDdollDruF/UKIqlIgyKhPBZLtU30WHlQnNYKoH3dtvi4k0NX/a3vgW0 +rk4N3hY9A4GzJl5LuEsAz/+MF7psYC0nhzck5npgL7XTgwSqT0N1osGDsieYK7EO +gLrAhV5Cud+xYJHT6xh+cHiudoO+cVrQkOPKwRYlZ0rwtnu64ZzZ +-----END CERTIFICATE----- diff --git a/shotgun_api3/lib/certifi/core.py b/shotgun_api3/lib/certifi/core.py index 91f538bb1..1c9661cc7 100644 --- a/shotgun_api3/lib/certifi/core.py +++ b/shotgun_api3/lib/certifi/core.py @@ -46,7 +46,7 @@ def where() -> str: def contents() -> str: return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") -elif sys.version_info >= (3, 7): +else: from importlib.resources import path as get_path, read_text @@ -81,34 +81,3 @@ def where() -> str: def contents() -> str: return read_text("certifi", "cacert.pem", encoding="ascii") - -else: - import os - import types - from typing import Union - - Package = Union[types.ModuleType, str] - Resource = Union[str, "os.PathLike"] - - # This fallback will work for Python versions prior to 3.7 that lack the - # importlib.resources module but relies on the existing `where` function - # so won't address issues with environments like PyOxidizer that don't set - # __file__ on modules. - def read_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict' - ) -> str: - with open(where(), encoding=encoding) as data: - return data.read() - - # If we don't have importlib.resources, then we will just do the old logic - # of assuming we're on the filesystem and munge the path directly. - def where() -> str: - f = os.path.dirname(__file__) - - return os.path.join(f, "cacert.pem") - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/shotgun_api3/lib/requirements.txt b/shotgun_api3/lib/requirements.txt index 34172e948..f91a3ae19 100644 --- a/shotgun_api3/lib/requirements.txt +++ b/shotgun_api3/lib/requirements.txt @@ -30,5 +30,5 @@ # released for our dependencies. httplib2==0.22.0 six==1.13.0 -certifi==2024.7.4 -pyparsing==2.4.7 \ No newline at end of file +certifi==2025.7.14 +pyparsing==2.4.7 From 8738890b5108b5157e2d1a42e61de2215235b0fd Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Tue, 9 Sep 2025 08:24:51 -0700 Subject: [PATCH 30/59] Fixup minor issues with README (#410) * Fixup badge * Move developer/maintainer part of docs in developer/ folder * Better badges * Update developer/README.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- README.md | 49 ++++----------------------------------------- developer/README.md | 44 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 45 deletions(-) create mode 100644 developer/README.md diff --git a/README.md b/README.md index 4c9e464df..33e493821 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ -[![VFX Platform](https://img.shields.io/badge/vfxplatform-2025%20%7C%202024%20%7C%202023%20%7C%202022-blue.svg)](http://www.vfxplatform.com/) -[![Python](https://img.shields.io/badge/python-3.11%20%7C%203.10%20%7C%203.9-blue.svg)](https://www.python.org/)/) -[![Reference Documentation](http://img.shields.io/badge/doc-reference-blue.svg)](http://developer.shotgridsoftware.com/python-api) +[![Supported VFX Platform: 2022 - 2025](https://img.shields.io/badge/VFX_Platform-2022_|_2023_|_2024_|_2025-blue)](http://www.vfxplatform.com/ "Supported VFX Platform") +[![Supported Python versions: 3.9 - 3.11](https://img.shields.io/badge/Python-3.9_|_3.10_|_3.11-blue?logo=python&logoColor=f5f5f5)](https://www.python.org/ "Supported Python versions") +[![Reference Documentation](http://img.shields.io/badge/Reference-documentation-blue.svg?logo=wikibooks&logoColor=f5f5f5)](http://developer.shotgridsoftware.com/python-api) + [![Build Status](https://dev.azure.com/shotgun-ecosystem/Python%20API/_apis/build/status/shotgunsoftware.python-api?branchName=master)](https://dev.azure.com/shotgun-ecosystem/Python%20API/_build/latest?definitionId=108&branchName=master) [![Coverage Status](https://coveralls.io/repos/github/shotgunsoftware/python-api/badge.svg?branch=master)](https://coveralls.io/github/shotgunsoftware/python-api?branch=master) @@ -25,13 +26,6 @@ Some useful direct links: You can see the [full history of the Python API on the documentation site](http://developer.shotgridsoftware.com/python-api/changelog.html). -## Updating HTTPLib2 - -The API comes with a copy of the `httplib2` inside the `shotgun_api3/lib` folder. To update the copy to a more recent version of the API, you can run the `update_httplib2.py` script at the root of this repository like this: - - python update_httplib2.py vX.Y.Z - -where `vX.Y.Z` is a release found on `httplib2`'s [release page](https://github.com/httplib2/httplib2/releases). ## Tests @@ -47,38 +41,3 @@ Integration and unit tests are provided. - `test_client` and `tests_unit` use mock server interaction and do not require a Flow Production Tracking instance to be available (no modifications to `tests/config` are necessary). - `test_api` and `test_api_long` *do* require a Flow Production Tracking instance, with a script key available for the tests. The server and script user values must be supplied in the `tests/config` file. The tests will add test data to your server based on information in your config. This data will be manipulated by the tests, and should not be used for other purposes. - To run all of the tests, use the shell script `run-tests`. - -## Release process - -### Packaging up new release - -1) Update the Changelog in the `HISTORY.rst` file - - Add bullet points for any changes that have happened since the previous release. This may include changes you did not make so look at the commit history and make sure we don't miss anything. If you notice something was done that wasn't added to the changelog, hunt down that engineer and make them feel guilty for not doing so. This is a required step in making changes to the API. - - Try and match the language of previous change log messages. We want to keep a consistent voice. - - Make sure the date of the release matches today. We try and keep this TBD until we're ready to do a release so it's easy to catch that it needs to be updated. - - Make sure the version number is filled out and correct. We follow semantic versioning. -2) Ensure any changes or additions to public methods are documented - - Ensure that doc strings are updated in the code itself to work with Sphinx and are correctly formatted. - - Examples are always good especially if this a new feature or method. - - Think about a new user to the API trying to figure out how to use the features you're documenting. -3) Update the version value in `python-api/setup.py` to match the version you are packaging. This controls what version users will get when installing via pip. -4) Update the `__version__` value in `shotgun_api3/shotgun.py` to the version you're releasing. This identified the current version within the API itself. -5) Commit these changes in master with a commit message like `packaging for the vx.x.x release`. -6) Create a tag based off of the master branch called `vx.x.x` to match the version number you're releasing. -7) Push master and your tag to Github. -8) Update the Releases page with your new release. - - The release should already be there from your tag but if not, create a new one. - - Add more detailed information regarding the changes in this release. This is a great place to add examples, and reasons for the change! - -### Letting the world know -Post a message in the [Pipeline Community channel](https://community.shotgridsoftware.com/c/pipeline). - -### Prepare for the Next Dev Cycle -1) Update the `__version__` value in `shotgun_api3/shotgun.py` to the next version number with `.dev` appended to it. For example, `v3.0.24.dev` -2) Add a new section to the Changelog in the `HISTORY.rst` file with the next version number and a TBD date -``` - **v3.0.24 - TBD** - + TBD -``` -3) Commit the changes to master with a commit message like `Bump version to v3.0.24.dev` -4) Push master to Github diff --git a/developer/README.md b/developer/README.md new file mode 100644 index 000000000..369070655 --- /dev/null +++ b/developer/README.md @@ -0,0 +1,44 @@ + +# Updating HTTPLib2 + +The API comes with a copy of the `httplib2` inside the `shotgun_api3/lib` folder. To update the copy to a more recent version of the API, you can run the `update_httplib2.py` script at the root of this repository like this: + + python update_httplib2.py vX.Y.Z + +where `vX.Y.Z` is a release found on `httplib2`'s [release page](https://github.com/httplib2/httplib2/releases). + + +# Release process + +## Packaging up new release + +1) Update the Changelog in the `HISTORY.rst` file + - Add bullet points for any changes that have happened since the previous release. This may include changes you did not make so look at the commit history and make sure we don't miss anything. If you notice something was done that wasn't added to the changelog, hunt down that engineer and make them feel guilty for not doing so. This is a required step in making changes to the API. + - Try and match the language of previous change log messages. We want to keep a consistent voice. + - Make sure the date of the release matches today. We try and keep this TBD until we're ready to do a release so it's easy to catch that it needs to be updated. + - Make sure the version number is filled out and correct. We follow semantic versioning. +2) Ensure any changes or additions to public methods are documented + - Ensure that doc strings are updated in the code itself to work with Sphinx and are correctly formatted. + - Examples are always good especially if this a new feature or method. + - Think about a new user to the API trying to figure out how to use the features you're documenting. +3) Update the version value in `python-api/setup.py` to match the version you are packaging. This controls what version users will get when installing via pip. +4) Update the `__version__` value in `shotgun_api3/shotgun.py` to the version you're releasing. This identifies the current version within the API itself. +5) Commit these changes in master with a commit message like `packaging for the vx.x.x release`. +6) Create a tag based off of the master branch called `vx.x.x` to match the version number you're releasing. +7) Push master and your tag to Github. +8) Update the Releases page with your new release. + - The release should already be there from your tag but if not, create a new one. + - Add more detailed information regarding the changes in this release. This is a great place to add examples, and reasons for the change! + +## Letting the world know +Post a message in the [Pipeline Community channel](https://community.shotgridsoftware.com/c/pipeline). + +## Prepare for the Next Dev Cycle +1) Update the `__version__` value in `shotgun_api3/shotgun.py` to the next version number with `.dev` appended to it. For example, `v3.0.24.dev` +2) Add a new section to the Changelog in the `HISTORY.rst` file with the next version number and a TBD date +``` + **v3.0.24 - TBD** + + TBD +``` +3) Commit the changes to master with a commit message like `Bump version to v3.0.24.dev` +4) Push master to Github From efd4d4d61bf9d8c601e024bc0aa6bf6537749b48 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 10 Sep 2025 06:01:48 -0700 Subject: [PATCH 31/59] Import order (#398) --- shotgun_api3/shotgun.py | 24 ++++++++++++------------ tests/test_api.py | 18 ++++++++++-------- tests/test_client.py | 8 ++++---- tests/test_unit.py | 1 + 4 files changed, 27 insertions(+), 24 deletions(-) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 98e38d83b..cc80320ba 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -29,27 +29,27 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ -# Python 2/3 compatibility -from .lib import six -from .lib import sgsix -from .lib import sgutils -from .lib.six import BytesIO # used for attachment upload -from .lib.six.moves import map - -from .lib.six.moves import http_cookiejar # used for attachment upload +import copy import datetime +import json import logging -import uuid # used for attachment upload import os import re -import copy +import shutil # used for attachment download import ssl import stat # used for attachment upload import sys import time -import json +import uuid # used for attachment upload + +# Python 2/3 compatibility +from .lib import six +from .lib import sgsix +from .lib import sgutils +from .lib.six import BytesIO # used for attachment upload +from .lib.six.moves import map +from .lib.six.moves import http_cookiejar # used for attachment upload from .lib.six.moves import urllib -import shutil # used for attachment download from .lib.six.moves import http_client # Used for secure file upload. from .lib.httplib2 import Http, ProxyInfo, socks, ssl_error_classes from .lib.sgtimezone import SgTimezone diff --git a/tests/test_api.py b/tests/test_api.py index 407df92fd..99fd0c080 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -16,29 +16,31 @@ from __future__ import print_function import datetime -import sys +import glob import os -from . import mock -from .mock import patch, MagicMock import ssl +import sys import time import types -import uuid import unittest -from shotgun_api3.lib.six.moves import range, urllib +import uuid import warnings -import glob -import shotgun_api3 -from shotgun_api3.lib.httplib2 import Http from shotgun_api3.lib import six +from shotgun_api3.lib.httplib2 import Http # To mock the correct exception when testion on Python 2 and 3, use the # ShotgunSSLError variable from sgsix that contains the appropriate exception # class for the current Python version. from shotgun_api3.lib.sgsix import ShotgunSSLError +from shotgun_api3.lib.six.moves import range, urllib + +import shotgun_api3 + from . import base +from . import mock +from .mock import patch, MagicMock class TestShotgunApi(base.LiveTestBase): diff --git a/tests/test_client.py b/tests/test_client.py index ea6892137..08037ac5e 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -14,7 +14,11 @@ import datetime import os +import platform import re +import sys +import time +import unittest from shotgun_api3.lib.six.moves import urllib from shotgun_api3.lib import six, sgutils @@ -27,10 +31,6 @@ except ImportError: import shotgun_api3.lib.simplejson as json -import platform -import sys -import time -import unittest from . import mock import shotgun_api3.lib.httplib2 as httplib2 diff --git a/tests/test_unit.py b/tests/test_unit.py index de996c553..ff78253c2 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -13,6 +13,7 @@ import os import unittest from unittest import mock + from .mock import patch import shotgun_api3 as api from shotgun_api3.shotgun import _is_mimetypes_broken From d3809cd4a8aa3fbfb4b2caabaf75a38d6cfd14cf Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 10 Sep 2025 06:19:32 -0700 Subject: [PATCH 32/59] SG-38306 Python2 Removal - Part 2 - Easy ones (#399) * Import order * Remove __future__ imports * Cleanup super prototype Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Eduardo Chauca Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- docs/reference.rst | 3 -- shotgun_api3/shotgun.py | 84 ++++++++++++++++++++++------------------- tests/base.py | 8 ++-- tests/test_api.py | 39 +++++++++---------- tests/test_api_long.py | 3 +- tests/test_client.py | 16 ++++---- tests/test_mockgun.py | 4 +- tests/test_proxy.py | 2 +- 8 files changed, 78 insertions(+), 81 deletions(-) diff --git a/docs/reference.rst b/docs/reference.rst index e2e050e86..96c917469 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -1034,6 +1034,3 @@ Example for a user whose language preference is set to Japanese: }, ... } - -.. note:: - If needed, the encoding of the returned localized string can be ensured regardless the Python version using shotgun_api3.lib.six.ensure_text(). diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index cc80320ba..a97e44c64 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -712,9 +712,9 @@ def __init__( # the lowercase version of the credentials. auth, self.config.server = self._split_url(base_url) if auth: - auth = base64encode( - sgutils.ensure_binary(urllib.parse.unquote(auth)) - ).decode("utf-8") + auth = base64encode(urllib.parse.unquote(auth).encode("utf-8")).decode( + "utf-8" + ) self.config.authorization = "Basic " + auth.strip() # foo:bar@123.456.789.012:3456 @@ -2270,8 +2270,7 @@ def schema_field_update( "type": entity_type, "field_name": field_name, "properties": [ - {"property_name": k, "value": v} - for k, v in six.iteritems((properties or {})) + {"property_name": k, "value": v} for k, v in (properties or {}).items() ], } params = self._add_project_param(params, project_entity) @@ -2966,7 +2965,11 @@ def download_attachment(self, attachment=False, file_path=None, attachment_id=No url.find("s3.amazonaws.com") != -1 and e.headers["content-type"] == "application/xml" ): - body = [sgutils.ensure_text(line) for line in e.readlines()] + body = [ + line.decode("utf-8") if isinstance(line, bytes) else line + for line in e.readlines() + ] + if body: xml = "".join(body) # Once python 2.4 support is not needed we can think about using @@ -3328,7 +3331,7 @@ def text_search(self, text, entity_types, project_ids=None, limit=None): raise ValueError("entity_types parameter must be a dictionary") api_entity_types = {} - for entity_type, filter_list in six.iteritems(entity_types): + for entity_type, filter_list in entity_types.items(): if isinstance(filter_list, (list, tuple)): resolved_filters = _translate_filters(filter_list, filter_operator=None) @@ -3859,8 +3862,7 @@ def _encode_payload(self, payload): be in a single byte encoding to go over the wire. """ - wire = json.dumps(payload, ensure_ascii=False) - return sgutils.ensure_binary(wire) + return json.dumps(payload, ensure_ascii=False).encode("utf-8") def _make_call(self, verb, path, body, headers): """ @@ -3964,7 +3966,7 @@ def _http_request(self, verb, path, body, headers): resp, content = conn.request(url, method=verb, body=body, headers=headers) # http response code is handled else where http_status = (resp.status, resp.reason) - resp_headers = dict((k.lower(), v) for k, v in six.iteritems(resp)) + resp_headers = dict((k.lower(), v) for k, v in resp.items()) resp_body = content LOG.debug("Response status is %s %s" % http_status) @@ -4044,7 +4046,7 @@ def _decode_list(lst): def _decode_dict(dct): newdict = {} - for k, v in six.iteritems(dct): + for k, v in dct.items(): if isinstance(k, str): k = sgutils.ensure_str(k) if isinstance(v, str): @@ -4118,7 +4120,7 @@ def _visit_data(self, data, visitor): return tuple(recursive(i, visitor) for i in data) if isinstance(data, dict): - return dict((k, recursive(v, visitor)) for k, v in six.iteritems(data)) + return dict((k, recursive(v, visitor)) for k, v in data.items()) return visitor(data) @@ -4165,10 +4167,6 @@ def _outbound_visitor(value): value = _change_tz(value) return value.strftime("%Y-%m-%dT%H:%M:%SZ") - # ensure return is six.text_type - if isinstance(value, str): - return sgutils.ensure_text(value) - return value return self._visit_data(data, _outbound_visitor) @@ -4287,7 +4285,7 @@ def _parse_records(self, records): continue # iterate over each item and check each field for possible injection - for k, v in six.iteritems(rec): + for k, v in rec.items(): if not v: continue @@ -4375,7 +4373,7 @@ def _dict_to_list( [{'field_name': 'foo', 'value': 'bar', 'thing1': 'value1'}] """ ret = [] - for k, v in six.iteritems((d or {})): + for k, v in (d or {}).items(): d = {key_name: k, value_name: v} d.update((extra_data or {}).get(k, {})) ret.append(d) @@ -4388,7 +4386,7 @@ def _dict_to_extra_data(self, d, key_name="value"): e.g. d {'foo' : 'bar'} changed to {'foo': {"value": 'bar'}] """ - return dict([(k, {key_name: v}) for (k, v) in six.iteritems((d or {}))]) + return dict([(k, {key_name: v}) for (k, v) in (d or {}).items()]) def _upload_file_to_storage(self, path, storage_url): """ @@ -4656,7 +4654,10 @@ def _send_form(self, url, params): else: raise ShotgunError("Unanticipated error occurred %s" % (e)) - return sgutils.ensure_text(result) + if isinstance(result, bytes): + result = result.decode("utf-8") + + return result else: raise ShotgunError("Max attemps limit reached.") @@ -4737,9 +4738,8 @@ def http_request(self, request): else: params.append((key, value)) if not files: - data = sgutils.ensure_binary( - urllib.parse.urlencode(params, True) - ) # sequencing on + data = urllib.parse.urlencode(params, True).encode("utf-8") + # sequencing on else: boundary, data = self.encode(params, files) content_type = "multipart/form-data; boundary=%s" % boundary @@ -4762,42 +4762,48 @@ def encode(self, params, files, boundary=None, buffer=None): if buffer is None: buffer = BytesIO() for key, value in params: - if not isinstance(value, str): + if isinstance(key, bytes): + key = key.decode("utf-8") + + if isinstance(value, bytes): + value = value.decode("utf-8") + elif not isinstance(value, str): # If value is not a string (e.g. int) cast to text value = str(value) - value = sgutils.ensure_text(value) - key = sgutils.ensure_text(key) - buffer.write(sgutils.ensure_binary("--%s\r\n" % boundary)) + buffer.write(f"--{boundary}\r\n".encode("utf-8")) buffer.write( - sgutils.ensure_binary('Content-Disposition: form-data; name="%s"' % key) + f'Content-Disposition: form-data; name="{key}"'.encode("utf-8") ) - buffer.write(sgutils.ensure_binary("\r\n\r\n%s\r\n" % value)) + buffer.write(f"\r\n\r\n{value}\r\n".encode("utf-8")) for key, fd in files: # On Windows, it's possible that we were forced to open a file # with non-ascii characters as unicode. In that case, we need to # encode it as a utf-8 string to remove unicode from the equation. # If we don't, the mix of unicode and strings going into the # buffer can cause UnicodeEncodeErrors to be raised. - filename = fd.name - filename = sgutils.ensure_text(filename) + filename = ( + fd.name.decode("utf-8") if isinstance(fd.name, bytes) else fd.name + ) filename = filename.split("/")[-1] - key = sgutils.ensure_text(key) + if isinstance(key, bytes): + key = key.decode("utf-8") + content_type = mimetypes.guess_type(filename)[0] content_type = content_type or "application/octet-stream" file_size = os.fstat(fd.fileno())[stat.ST_SIZE] - buffer.write(sgutils.ensure_binary("--%s\r\n" % boundary)) + buffer.write(f"--{boundary}\r\n".encode("utf-8")) c_dis = 'Content-Disposition: form-data; name="%s"; filename="%s"%s' content_disposition = c_dis % (key, filename, "\r\n") - buffer.write(sgutils.ensure_binary(content_disposition)) - buffer.write(sgutils.ensure_binary("Content-Type: %s\r\n" % content_type)) - buffer.write(sgutils.ensure_binary("Content-Length: %s\r\n" % file_size)) + buffer.write(content_disposition.encode("utf-8")) + buffer.write(f"Content-Type: {content_type}\r\n".encode("utf-8")) + buffer.write(f"Content-Length: {file_size}\r\n".encode("utf-8")) - buffer.write(sgutils.ensure_binary("\r\n")) + buffer.write(b"\r\n") fd.seek(0) shutil.copyfileobj(fd, buffer) - buffer.write(sgutils.ensure_binary("\r\n")) - buffer.write(sgutils.ensure_binary("--%s--\r\n\r\n" % boundary)) + buffer.write(b"\r\n") + buffer.write(f"--{boundary}--\r\n\r\n".encode("utf-8")) buffer = buffer.getvalue() return boundary, buffer diff --git a/tests/base.py b/tests/base.py index 2820d495d..cc8634996 100644 --- a/tests/base.py +++ b/tests/base.py @@ -135,7 +135,7 @@ class MockTestBase(TestBase): """Test base for tests mocking server interactions.""" def setUp(self): - super(MockTestBase, self).setUp() + super().setUp() # TODO see if there is another way to stop sg connecting self._setup_mock() self._setup_mock_data() @@ -252,7 +252,7 @@ class LiveTestBase(TestBase): def setUp(self, auth_mode=None): if not auth_mode: auth_mode = "HumanUser" if self.config.jenkins else "ApiUser" - super(LiveTestBase, self).setUp(auth_mode) + super().setUp(auth_mode) if ( self.sg.server_caps.version and self.sg.server_caps.version >= (3, 3, 0) @@ -410,7 +410,7 @@ class HumanUserAuthLiveTestBase(LiveTestBase): """ def setUp(self): - super(HumanUserAuthLiveTestBase, self).setUp("HumanUser") + super().setUp("HumanUser") class SessionTokenAuthLiveTestBase(LiveTestBase): @@ -420,7 +420,7 @@ class SessionTokenAuthLiveTestBase(LiveTestBase): """ def setUp(self): - super(SessionTokenAuthLiveTestBase, self).setUp("SessionToken") + super().setUp("SessionToken") class SgTestConfig(object): diff --git a/tests/test_api.py b/tests/test_api.py index 99fd0c080..11cd8e8cd 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -14,7 +14,6 @@ test_api_long for other tests. """ -from __future__ import print_function import datetime import glob import os @@ -45,7 +44,7 @@ class TestShotgunApi(base.LiveTestBase): def setUp(self): - super(TestShotgunApi, self).setUp() + super().setUp() # give note unicode content self.sg.update("Note", self.note["id"], {"content": "La Pe\xf1a"}) @@ -247,9 +246,7 @@ def test_upload_download(self): # test upload of non-ascii, unicode path u_path = os.path.abspath( - os.path.expanduser( - glob.glob(os.path.join(six.text_type(this_dir), "Noëlご.jpg"))[0] - ) + os.path.expanduser(glob.glob(os.path.join(this_dir, "Noëlご.jpg"))[0]) ) # If this is a problem, it'll raise with a UnicodeEncodeError. We @@ -327,9 +324,7 @@ def test_upload_to_sg(self, mock_send_form): mock_send_form.return_value = "1\n:123\nasd" this_dir, _ = os.path.split(__file__) u_path = os.path.abspath( - os.path.expanduser( - glob.glob(os.path.join(six.text_type(this_dir), "Noëlご.jpg"))[0] - ) + os.path.expanduser(glob.glob(os.path.join(this_dir, "Noëlご.jpg"))[0]) ) upload_id = self.sg.upload( "Version", @@ -419,7 +414,7 @@ def test_upload_thumbnail_in_create(self): url = new_version.get("filmstrip_image") data = self.sg.download_attachment({"url": url}) - self.assertTrue(isinstance(data, six.binary_type)) + self.assertTrue(isinstance(data, bytes)) self.sg.delete("Version", new_version["id"]) @@ -1061,7 +1056,7 @@ class TestDataTypes(base.LiveTestBase): """ def setUp(self): - super(TestDataTypes, self).setUp() + super().setUp() def test_set_checkbox(self): entity = "HumanUser" @@ -1271,7 +1266,7 @@ class TestUtc(base.LiveTestBase): """Test utc options""" def setUp(self): - super(TestUtc, self).setUp() + super().setUp() utc = shotgun_api3.shotgun.SG_TIMEZONE.utc self.datetime_utc = datetime.datetime(2008, 10, 13, 23, 10, tzinfo=utc) local = shotgun_api3.shotgun.SG_TIMEZONE.local @@ -1313,7 +1308,7 @@ def _assert_expected(self, sg, date_time, expected): class TestFind(base.LiveTestBase): def setUp(self): - super(TestFind, self).setUp() + super().setUp() # We will need the created_at field for the shot fields = list(self.shot.keys())[:] fields.append("created_at") @@ -2109,7 +2104,7 @@ def test_following(self): class TestErrors(base.TestBase): def setUp(self): auth_mode = "HumanUser" if self.config.jenkins else "ApiUser" - super(TestErrors, self).setUp(auth_mode) + super().setUp(auth_mode) def test_bad_auth(self): """test_bad_auth invalid script name or api key raises fault""" @@ -2401,7 +2396,7 @@ def test_upload_missing_file(self): class TestScriptUserSudoAuth(base.LiveTestBase): def setUp(self): - super(TestScriptUserSudoAuth, self).setUp() + super().setUp() self.sg.update( "HumanUser", @@ -2442,7 +2437,7 @@ def test_user_is_creator(self): class TestHumanUserSudoAuth(base.TestBase): def setUp(self): - super(TestHumanUserSudoAuth, self).setUp("HumanUser") + super().setUp("HumanUser") def test_human_user_sudo_auth_fails(self): """ @@ -2713,7 +2708,7 @@ class TestActivityStream(base.LiveTestBase): """ def setUp(self): - super(TestActivityStream, self).setUp() + super().setUp() self._prefix = uuid.uuid4().hex self._shot = self.sg.create( @@ -2763,7 +2758,7 @@ def tearDown(self): ) self.sg.batch(batch_data) - super(TestActivityStream, self).tearDown() + super().tearDown() def test_simple(self): """ @@ -2836,7 +2831,7 @@ class TestNoteThreadRead(base.LiveTestBase): """ def setUp(self): - super(TestNoteThreadRead, self).setUp() + super().setUp() # get path to our std attahcment this_dir, _ = os.path.split(__file__) @@ -3047,7 +3042,7 @@ class TestTextSearch(base.LiveTestBase): """ def setUp(self): - super(TestTextSearch, self).setUp() + super().setUp() # create 5 shots and 5 assets to search for self._prefix = uuid.uuid4().hex @@ -3087,7 +3082,7 @@ def tearDown(self): ) self.sg.batch(batch_data) - super(TestTextSearch, self).tearDown() + super().tearDown() def test_simple(self): """ @@ -3471,9 +3466,9 @@ def test_import_httplib(self): def _has_unicode(data): for k, v in data.items(): - if isinstance(k, six.text_type): + if isinstance(k, str): return True - if isinstance(v, six.text_type): + if isinstance(v, str): return True return False diff --git a/tests/test_api_long.py b/tests/test_api_long.py index 0bf509b3c..29a34e991 100644 --- a/tests/test_api_long.py +++ b/tests/test_api_long.py @@ -13,7 +13,6 @@ Includes the schema functions and the automated searching for all entity types """ -from __future__ import print_function from . import base import random import shotgun_api3 @@ -56,7 +55,7 @@ def test_automated_find(self): # pivot_column fields aren't valid for sorting so ensure we're # not using one. order_field = None - for field_name, field in six.iteritems(fields): + for field_name, field in fields.items(): # Restrict sorting to only types we know will always be sortable # Since no_sorting is not exposed to us, we'll have to rely on # this as a safeguard against trying to sort by a field with diff --git a/tests/test_client.py b/tests/test_client.py index 08037ac5e..9c826322e 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -45,14 +45,17 @@ def b64encode(val): - return base64encode(sgutils.ensure_binary(val)).decode("utf-8") + if isinstance(val, str): + val = val.encode("utf-8") + + return base64encode(val).decode("utf-8") class TestShotgunClient(base.MockTestBase): """Test case for shotgun api with server interactions mocked.""" def setUp(self): - super(TestShotgunClient, self).setUp() + super().setUp() # get domain and uri scheme match = re.search("(https?://)(.*)", self.server_url) self.uri_prefix = match.group(1) @@ -422,11 +425,10 @@ def test_call_rpc(self): expected = "rpc response with list result, first item" self.assertEqual(d["results"][0], rv, expected) - # Test unicode mixed with utf-8 as reported in Ticket #17959 + # Test payload encoding with non-ascii characters (using utf-8 literal) d = {"results": ["foo", "bar"]} a = { - "utf_str": "\xe2\x88\x9a", - "unicode_str": sgutils.ensure_text("\xe2\x88\x9a"), + "utf_literal": "\xe2\x88\x9a", } self._mock_http(d) rv = self.sg._call_rpc("list", a) @@ -640,9 +642,7 @@ def test_encode_payload(self): self.assertTrue(isinstance(j, bytes)) def test_decode_response_ascii(self): - self._assert_decode_resonse( - True, sgutils.ensure_str("my data \u00e0", encoding="utf8") - ) + self._assert_decode_resonse(True, "my data \u00e0") def test_decode_response_unicode(self): self._assert_decode_resonse(False, "my data \u00e0") diff --git a/tests/test_mockgun.py b/tests/test_mockgun.py index e7e4295e4..ad478304a 100644 --- a/tests/test_mockgun.py +++ b/tests/test_mockgun.py @@ -79,7 +79,7 @@ def setUp(self): """ Creates test data. """ - super(TestValidateFilterSyntax, self).setUp() + super().setUp() self._mockgun = Mockgun( "https://test.shotgunstudio.com", login="user", password="1234" @@ -578,7 +578,7 @@ def setUp(self): """ Creates tests data. """ - super(TestFilterOperator, self).setUp() + super().setUp() self._mockgun = Mockgun( "https://test.shotgunstudio.com", login="user", password="1234" diff --git a/tests/test_proxy.py b/tests/test_proxy.py index cb713cd9d..7bf0d7006 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -18,7 +18,7 @@ class ServerConnectionTest(base.TestBase): """Tests for server connection""" def setUp(self): - super(ServerConnectionTest, self).setUp() + super().setUp() def test_connection(self): """Tests server connects and returns nothing""" From 517f65f0540f39f5d007ed352524678691f98ab2 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 10 Sep 2025 07:08:22 -0700 Subject: [PATCH 33/59] SG-38306 Python2 Removal - Part 3 - Cleanup imports (#400) * six.moves imports * Cleanup BytesIO import from six * simple json * Cleanup Py2-3 compat with ImportError * Simplify Base64 --------- Co-authored-by: Eduardo Chauca --- docs/cookbook/examples/ami_handler.rst | 2 +- shotgun_api3/lib/mockgun/schema.py | 2 +- shotgun_api3/shotgun.py | 45 ++++++++++++-------------- tests/base.py | 19 +++-------- tests/test_api.py | 11 ++++--- tests/test_client.py | 23 ++++--------- tests/test_unit.py | 3 +- 7 files changed, 42 insertions(+), 63 deletions(-) diff --git a/docs/cookbook/examples/ami_handler.rst b/docs/cookbook/examples/ami_handler.rst index 3fb5e3571..6b8f3384b 100644 --- a/docs/cookbook/examples/ami_handler.rst +++ b/docs/cookbook/examples/ami_handler.rst @@ -218,7 +218,7 @@ via ``POST``. If you're using a custom protocol the data is sent via ``GET``. params = params.split("&") p = {"column_display_names": [], "cols": []} for arg in params: - key, value = map(six.moves.urllib.parse.unquote, arg.split("=", 1)) + key, value = map(urllib.parse.unquote, arg.split("=", 1)) if key == "column_display_names" or key == "cols": p[key].append(value) else: diff --git a/shotgun_api3/lib/mockgun/schema.py b/shotgun_api3/lib/mockgun/schema.py index 5d5019df4..ab671629d 100644 --- a/shotgun_api3/lib/mockgun/schema.py +++ b/shotgun_api3/lib/mockgun/schema.py @@ -30,8 +30,8 @@ ----------------------------------------------------------------------------- """ -from ..six.moves import cPickle as pickle import os +import pickle from .errors import MockgunError diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index a97e44c64..3a2c9b8eb 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -29,9 +29,13 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ +import base64 import copy import datetime import json +import http.client # Used for secure file upload +import http.cookiejar # used for attachment upload +import io # used for attachment upload import logging import os import re @@ -40,29 +44,22 @@ import stat # used for attachment upload import sys import time +import urllib.error +import urllib.parse +import urllib.request import uuid # used for attachment upload +# Import Error and ResponseError (even though they're unused in this file) since they need +# to be exposed as part of the API. +from xmlrpc.client import Error, ProtocolError, ResponseError # noqa + # Python 2/3 compatibility from .lib import six from .lib import sgsix from .lib import sgutils -from .lib.six import BytesIO # used for attachment upload -from .lib.six.moves import map -from .lib.six.moves import http_cookiejar # used for attachment upload -from .lib.six.moves import urllib -from .lib.six.moves import http_client # Used for secure file upload. from .lib.httplib2 import Http, ProxyInfo, socks, ssl_error_classes from .lib.sgtimezone import SgTimezone -# Import Error and ResponseError (even though they're unused in this file) since they need -# to be exposed as part of the API. -from .lib.six.moves.xmlrpc_client import Error, ProtocolError, ResponseError # noqa - -if six.PY3: - from base64 import encodebytes as base64encode -else: - from base64 import encodestring as base64encode - LOG = logging.getLogger("shotgun_api3") """ @@ -708,13 +705,13 @@ def __init__( # and auth header # Do NOT self._split_url(self.base_url) here, as it contains the lower - # case version of the base_url argument. Doing so would base64encode + # case version of the base_url argument. Doing so would base64.encodebytes # the lowercase version of the credentials. auth, self.config.server = self._split_url(base_url) if auth: - auth = base64encode(urllib.parse.unquote(auth).encode("utf-8")).decode( - "utf-8" - ) + auth = base64.encodebytes( + urllib.parse.unquote(auth).encode("utf-8") + ).decode("utf-8") self.config.authorization = "Basic " + auth.strip() # foo:bar@123.456.789.012:3456 @@ -3003,8 +3000,8 @@ def get_auth_cookie_handler(self): This is used internally for downloading attachments from FPTR. """ sid = self.get_session_token() - cj = http_cookiejar.LWPCookieJar() - c = http_cookiejar.Cookie( + cj = http.cookiejar.LWPCookieJar() + c = http.cookiejar.Cookie( "0", "_session_id", sid, @@ -4432,7 +4429,7 @@ def _multipart_upload_file_to_storage(self, path, upload_info): data_size = len(data) # keep data as a stream so that we don't need to worry how it was # encoded. - data = BytesIO(data) + data = io.BytesIO(data) bytes_read += data_size part_url = self._get_upload_part_link( upload_info, filename, part_number @@ -4662,13 +4659,13 @@ def _send_form(self, url, params): raise ShotgunError("Max attemps limit reached.") -class CACertsHTTPSConnection(http_client.HTTPConnection): +class CACertsHTTPSConnection(http.client.HTTPConnection): """ " This class allows to create an HTTPS connection that uses the custom certificates passed in. """ - default_port = http_client.HTTPS_PORT + default_port = http.client.HTTPS_PORT def __init__(self, *args, **kwargs): """ @@ -4760,7 +4757,7 @@ def encode(self, params, files, boundary=None, buffer=None): # We'll do this across both python 2/3 rather than add more branching. boundary = uuid.uuid4() if buffer is None: - buffer = BytesIO() + buffer = io.BytesIO() for key, value in params: if isinstance(key, bytes): key = key.decode("utf-8") diff --git a/tests/base.py b/tests/base.py index cc8634996..e30ec01a4 100644 --- a/tests/base.py +++ b/tests/base.py @@ -1,31 +1,20 @@ """Base class for Flow Production Tracking API tests.""" +import configparser import contextlib +import json import os import random import re import time import unittest +import urllib.error from . import mock import shotgun_api3 as api -from shotgun_api3.shotgun import json from shotgun_api3.shotgun import ServerCapabilities from shotgun_api3.lib import six -from shotgun_api3.lib.six.moves import urllib -from shotgun_api3.lib.six.moves.configparser import ConfigParser - -try: - # Attempt to import skip from unittest. Since this was added in Python 2.7 - # in the case that we're running on Python 2.6 we'll need a decorator to - # provide some equivalent functionality. - from unittest import skip -except ImportError: - # On Python 2.6 we'll just have to ignore tests that are skipped -- we won't - # mark them as skipped, but we will not fail on them. - def skip(f): - return lambda self: None THUMBNAIL_MAX_ATTEMPTS = 30 @@ -456,7 +445,7 @@ def config_keys(self): ] def read_config(self, config_path): - config_parser = ConfigParser() + config_parser = configparser.ConfigParser() config_parser.read(config_path) for section in config_parser.sections(): for option in config_parser.options(section): diff --git a/tests/test_api.py b/tests/test_api.py index 11cd8e8cd..cd4dcbeef 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -22,6 +22,9 @@ import time import types import unittest +import urllib.parse +import urllib.request +import urllib.error import uuid import warnings @@ -33,8 +36,6 @@ # class for the current Python version. from shotgun_api3.lib.sgsix import ShotgunSSLError -from shotgun_api3.lib.six.moves import range, urllib - import shotgun_api3 from . import base @@ -629,7 +630,7 @@ def test_linked_thumbnail_url(self): # For now skip tests that are erroneously failling on some sites to # allow CI to pass until the known issue causing this is resolved. - @base.skip("Skipping test that erroneously fails on some sites.") + @unittest.skip("Skipping test that erroneously fails on some sites.") def test_share_thumbnail(self): """share thumbnail between two entities""" @@ -2899,7 +2900,7 @@ def _check_attachment(self, data, attachment_id, additional_fields): # For now skip tests that are erroneously failling on some sites to # allow CI to pass until the known issue causing this is resolved. - @base.skip("Skipping test that erroneously fails on some sites.") + @unittest.skip("Skipping test that erroneously fails on some sites.") def test_simple(self): """ Test note reply thread API call @@ -2978,7 +2979,7 @@ def test_simple(self): # For now skip tests that are erroneously failling on some sites to # allow CI to pass until the known issue causing this is resolved. - @base.skip("Skipping test that erroneously fails on some sites.") + @unittest.skip("Skipping test that erroneously fails on some sites.") def test_complex(self): """ Test note reply thread API call with additional params diff --git a/tests/test_client.py b/tests/test_client.py index 9c826322e..164731908 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -12,25 +12,21 @@ CRUD functions. These tests always use a mock http connection so not not need a live server to run against.""" +import configparser +import base64 import datetime +import json import os import platform import re import sys import time import unittest +import urllib.parse +import urllib.error -from shotgun_api3.lib.six.moves import urllib from shotgun_api3.lib import six, sgutils -try: - import simplejson as json -except ImportError: - try: - import json as json - except ImportError: - import shotgun_api3.lib.simplejson as json - from . import mock import shotgun_api3.lib.httplib2 as httplib2 @@ -38,17 +34,12 @@ from shotgun_api3.shotgun import ServerCapabilities, SG_TIMEZONE from . import base -if six.PY3: - from base64 import encodebytes as base64encode -else: - from base64 import encodestring as base64encode - def b64encode(val): if isinstance(val, str): val = val.encode("utf-8") - return base64encode(val).decode("utf-8") + return base64.encodebytes(val).decode("utf-8") class TestShotgunClient(base.MockTestBase): @@ -190,7 +181,7 @@ def test_read_config(self): """Validate that config values are properly coerced.""" this_dir = os.path.dirname(os.path.realpath(__file__)) config_path = os.path.join(this_dir, "test_config_file") - config = base.ConfigParser() + config = configparser.ConfigParser() config.read(config_path) result = config.get("SERVER_INFO", "api_key") expected = "%abce" diff --git a/tests/test_unit.py b/tests/test_unit.py index ff78253c2..d2853e5df 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -13,11 +13,12 @@ import os import unittest from unittest import mock +import urllib.request +import urllib.error from .mock import patch import shotgun_api3 as api from shotgun_api3.shotgun import _is_mimetypes_broken -from shotgun_api3.lib.six.moves import range, urllib from shotgun_api3.lib.httplib2 import Http, ssl_error_classes From 8c5ef90ea9fa39440974d672788133889fc614c7 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 10 Sep 2025 07:54:24 -0700 Subject: [PATCH 34/59] SG-38306 Python2 Removal - Part 4 - Mimetypes module (#401) * Remove deprecated custom mimetype module --------- Co-authored-by: Eduardo Chauca --- shotgun_api3/lib/README.md | 13 - shotgun_api3/lib/mimetypes.py | 598 ---------------------------------- shotgun_api3/shotgun.py | 29 +- tests/test_unit.py | 19 -- 4 files changed, 1 insertion(+), 658 deletions(-) delete mode 100644 shotgun_api3/lib/mimetypes.py diff --git a/shotgun_api3/lib/README.md b/shotgun_api3/lib/README.md index 1bdec6f78..7097b6c62 100644 --- a/shotgun_api3/lib/README.md +++ b/shotgun_api3/lib/README.md @@ -10,19 +10,6 @@ Some third-party modules are bundled with `python-api` inside lib. The version of `httplib2` bundled should be updated manually, however its version is included in the unused `shotgun_api3/lib/requirements.txt` to allow Github's automated CVE notifications to work. -### mimetypes - -The `mimetypes` module is broken on Windows only for Python 2.7.0 to 2.7.9 inclusively. -We bundle the version from 2.7.10 - -See bugs: - - * [9291](http://bugs.python.org/issue9291) (Fixed in 2.7.7) - * [21652](http://bugs.python.org/issue21652) (Fixed in 2.7.8) - * [22028](http://bugs.python.org/issue22028) (Fixed in 2.7.10) - -The version of `mimetypes` bundled should be updated manually if necessary, however it is unlikely this will be needed, as it is only used for Python versions 2.7.0 - 2.7.9, and newer Python versions simply use the native `mimetypes` module. - ### six Six is a Python 2/3 compatibility library. In python-api, it's used to make simultaneous support for Python on 2 and 3 easier to maintain and more readable, but allowing the use of common helper functions, unified interfaces for modules that changed, and variables to ease type comparisons. For more on six, see the [documentation](https://six.readthedocs.io/). diff --git a/shotgun_api3/lib/mimetypes.py b/shotgun_api3/lib/mimetypes.py deleted file mode 100644 index bc8488535..000000000 --- a/shotgun_api3/lib/mimetypes.py +++ /dev/null @@ -1,598 +0,0 @@ -"""Guess the MIME type of a file. - -This module defines two useful functions: - -guess_type(url, strict=1) -- guess the MIME type and encoding of a URL. - -guess_extension(type, strict=1) -- guess the extension for a given MIME type. - -It also contains the following, for tuning the behavior: - -Data: - -knownfiles -- list of files to parse -inited -- flag set when init() has been called -suffix_map -- dictionary mapping suffixes to suffixes -encodings_map -- dictionary mapping suffixes to encodings -types_map -- dictionary mapping suffixes to types - -Functions: - -init([files]) -- parse a list of files, default knownfiles (on Windows, the - default values are taken from the registry) -read_mime_types(file) -- parse one file, return a dictionary or None - -Note that this code has not been updated for python 3 compatibility, as it is -a patched version of the native mimetypes module and is used only in Python -versions 2.7.0 - 2.7.9, which included a broken version of the mimetypes module. -""" - -import os -import sys -import posixpath -import urllib -try: - import _winreg -except ImportError: - _winreg = None - -__all__ = [ - "guess_type","guess_extension","guess_all_extensions", - "add_type","read_mime_types","init" -] - -knownfiles = [ - "/etc/mime.types", - "/etc/httpd/mime.types", # Mac OS X - "/etc/httpd/conf/mime.types", # Apache - "/etc/apache/mime.types", # Apache 1 - "/etc/apache2/mime.types", # Apache 2 - "/usr/local/etc/httpd/conf/mime.types", - "/usr/local/lib/netscape/mime.types", - "/usr/local/etc/httpd/conf/mime.types", # Apache 1.2 - "/usr/local/etc/mime.types", # Apache 1.3 - ] - -inited = False -_db = None - - -class MimeTypes: - """MIME-types datastore. - - This datastore can handle information from mime.types-style files - and supports basic determination of MIME type from a filename or - URL, and can guess a reasonable extension given a MIME type. - """ - - def __init__(self, filenames=(), strict=True): - if not inited: - init() - self.encodings_map = encodings_map.copy() - self.suffix_map = suffix_map.copy() - self.types_map = ({}, {}) # dict for (non-strict, strict) - self.types_map_inv = ({}, {}) - for (ext, type) in types_map.items(): - self.add_type(type, ext, True) - for (ext, type) in common_types.items(): - self.add_type(type, ext, False) - for name in filenames: - self.read(name, strict) - - def add_type(self, type, ext, strict=True): - """Add a mapping between a type and an extension. - - When the extension is already known, the new - type will replace the old one. When the type - is already known the extension will be added - to the list of known extensions. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - """ - self.types_map[strict][ext] = type - exts = self.types_map_inv[strict].setdefault(type, []) - if ext not in exts: - exts.append(ext) - - def guess_type(self, url, strict=True): - """Guess the type of a file based on its URL. - - Return value is a tuple (type, encoding) where type is None if - the type can't be guessed (no or unknown suffix) or a string - of the form type/subtype, usable for a MIME Content-type - header; and encoding is None for no encoding or the name of - the program used to encode (e.g. compress or gzip). The - mappings are table driven. Encoding suffixes are case - sensitive; type suffixes are first tried case sensitive, then - case insensitive. - - The suffixes .tgz, .taz and .tz (case sensitive!) are all - mapped to '.tar.gz'. (This is table-driven too, using the - dictionary suffix_map.) - - Optional `strict' argument when False adds a bunch of commonly found, - but non-standard types. - """ - scheme, url = urllib.splittype(url) - if scheme == 'data': - # syntax of data URLs: - # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data - # mediatype := [ type "/" subtype ] *( ";" parameter ) - # data := *urlchar - # parameter := attribute "=" value - # type/subtype defaults to "text/plain" - comma = url.find(',') - if comma < 0: - # bad data URL - return None, None - semi = url.find(';', 0, comma) - if semi >= 0: - type = url[:semi] - else: - type = url[:comma] - if '=' in type or '/' not in type: - type = 'text/plain' - return type, None # never compressed, so encoding is None - base, ext = posixpath.splitext(url) - while ext in self.suffix_map: - base, ext = posixpath.splitext(base + self.suffix_map[ext]) - if ext in self.encodings_map: - encoding = self.encodings_map[ext] - base, ext = posixpath.splitext(base) - else: - encoding = None - types_map = self.types_map[True] - if ext in types_map: - return types_map[ext], encoding - elif ext.lower() in types_map: - return types_map[ext.lower()], encoding - elif strict: - return None, encoding - types_map = self.types_map[False] - if ext in types_map: - return types_map[ext], encoding - elif ext.lower() in types_map: - return types_map[ext.lower()], encoding - else: - return None, encoding - - def guess_all_extensions(self, type, strict=True): - """Guess the extensions for a file based on its MIME type. - - Return value is a list of strings giving the possible filename - extensions, including the leading dot ('.'). The extension is not - guaranteed to have been associated with any particular data stream, - but would be mapped to the MIME type `type' by guess_type(). - - Optional `strict' argument when false adds a bunch of commonly found, - but non-standard types. - """ - type = type.lower() - extensions = self.types_map_inv[True].get(type, []) - if not strict: - for ext in self.types_map_inv[False].get(type, []): - if ext not in extensions: - extensions.append(ext) - return extensions - - def guess_extension(self, type, strict=True): - """Guess the extension for a file based on its MIME type. - - Return value is a string giving a filename extension, - including the leading dot ('.'). The extension is not - guaranteed to have been associated with any particular data - stream, but would be mapped to the MIME type `type' by - guess_type(). If no extension can be guessed for `type', None - is returned. - - Optional `strict' argument when false adds a bunch of commonly found, - but non-standard types. - """ - extensions = self.guess_all_extensions(type, strict) - if not extensions: - return None - return extensions[0] - - def read(self, filename, strict=True): - """ - Read a single mime.types-format file, specified by pathname. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - """ - with open(filename) as fp: - self.readfp(fp, strict) - - def readfp(self, fp, strict=True): - """ - Read a single mime.types-format file. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - """ - while 1: - line = fp.readline() - if not line: - break - words = line.split() - for i in range(len(words)): - if words[i][0] == '#': - del words[i:] - break - if not words: - continue - type, suffixes = words[0], words[1:] - for suff in suffixes: - self.add_type(type, '.' + suff, strict) - - def read_windows_registry(self, strict=True): - """ - Load the MIME types database from Windows registry. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - """ - - # Windows only - if not _winreg: - return - - def enum_types(mimedb): - i = 0 - while True: - try: - ctype = _winreg.EnumKey(mimedb, i) - except EnvironmentError: - break - else: - if '\0' not in ctype: - yield ctype - i += 1 - - default_encoding = sys.getdefaultencoding() - with _winreg.OpenKey(_winreg.HKEY_CLASSES_ROOT, '') as hkcr: - for subkeyname in enum_types(hkcr): - try: - with _winreg.OpenKey(hkcr, subkeyname) as subkey: - # Only check file extensions - if not subkeyname.startswith("."): - continue - # raises EnvironmentError if no 'Content Type' value - mimetype, datatype = _winreg.QueryValueEx( - subkey, 'Content Type') - if datatype != _winreg.REG_SZ: - continue - try: - mimetype = mimetype.encode(default_encoding) - except UnicodeEncodeError: - continue - self.add_type(mimetype, subkeyname, strict) - except EnvironmentError: - continue - -def guess_type(url, strict=True): - """Guess the type of a file based on its URL. - - Return value is a tuple (type, encoding) where type is None if the - type can't be guessed (no or unknown suffix) or a string of the - form type/subtype, usable for a MIME Content-type header; and - encoding is None for no encoding or the name of the program used - to encode (e.g. compress or gzip). The mappings are table - driven. Encoding suffixes are case sensitive; type suffixes are - first tried case sensitive, then case insensitive. - - The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped - to ".tar.gz". (This is table-driven too, using the dictionary - suffix_map). - - Optional `strict' argument when false adds a bunch of commonly found, but - non-standard types. - """ - if _db is None: - init() - return _db.guess_type(url, strict) - - -def guess_all_extensions(type, strict=True): - """Guess the extensions for a file based on its MIME type. - - Return value is a list of strings giving the possible filename - extensions, including the leading dot ('.'). The extension is not - guaranteed to have been associated with any particular data - stream, but would be mapped to the MIME type `type' by - guess_type(). If no extension can be guessed for `type', None - is returned. - - Optional `strict' argument when false adds a bunch of commonly found, - but non-standard types. - """ - if _db is None: - init() - return _db.guess_all_extensions(type, strict) - -def guess_extension(type, strict=True): - """Guess the extension for a file based on its MIME type. - - Return value is a string giving a filename extension, including the - leading dot ('.'). The extension is not guaranteed to have been - associated with any particular data stream, but would be mapped to the - MIME type `type' by guess_type(). If no extension can be guessed for - `type', None is returned. - - Optional `strict' argument when false adds a bunch of commonly found, - but non-standard types. - """ - if _db is None: - init() - return _db.guess_extension(type, strict) - -def add_type(type, ext, strict=True): - """Add a mapping between a type and an extension. - - When the extension is already known, the new - type will replace the old one. When the type - is already known the extension will be added - to the list of known extensions. - - If strict is true, information will be added to - list of standard types, else to the list of non-standard - types. - """ - if _db is None: - init() - return _db.add_type(type, ext, strict) - - -def init(files=None): - global suffix_map, types_map, encodings_map, common_types - global inited, _db - inited = True # so that MimeTypes.__init__() doesn't call us again - db = MimeTypes() - if files is None: - if _winreg: - db.read_windows_registry() - files = knownfiles - for file in files: - if os.path.isfile(file): - db.read(file) - encodings_map = db.encodings_map - suffix_map = db.suffix_map - types_map = db.types_map[True] - common_types = db.types_map[False] - # Make the DB a global variable now that it is fully initialized - _db = db - - -def read_mime_types(file): - try: - f = open(file) - except IOError: - return None - with f: - db = MimeTypes() - db.readfp(f, True) - return db.types_map[True] - - -def _default_mime_types(): - global suffix_map - global encodings_map - global types_map - global common_types - - suffix_map = { - '.tgz': '.tar.gz', - '.taz': '.tar.gz', - '.tz': '.tar.gz', - '.tbz2': '.tar.bz2', - '.txz': '.tar.xz', - } - - encodings_map = { - '.gz': 'gzip', - '.Z': 'compress', - '.bz2': 'bzip2', - '.xz': 'xz', - } - - # Before adding new types, make sure they are either registered with IANA, - # at http://www.isi.edu/in-notes/iana/assignments/media-types - # or extensions, i.e. using the x- prefix - - # If you add to these, please keep them sorted! - types_map = { - '.a' : 'application/octet-stream', - '.ai' : 'application/postscript', - '.aif' : 'audio/x-aiff', - '.aifc' : 'audio/x-aiff', - '.aiff' : 'audio/x-aiff', - '.au' : 'audio/basic', - '.avi' : 'video/x-msvideo', - '.bat' : 'text/plain', - '.bcpio' : 'application/x-bcpio', - '.bin' : 'application/octet-stream', - '.bmp' : 'image/x-ms-bmp', - '.c' : 'text/plain', - # Duplicates :( - '.cdf' : 'application/x-cdf', - '.cdf' : 'application/x-netcdf', - '.cpio' : 'application/x-cpio', - '.csh' : 'application/x-csh', - '.css' : 'text/css', - '.dll' : 'application/octet-stream', - '.doc' : 'application/msword', - '.dot' : 'application/msword', - '.dvi' : 'application/x-dvi', - '.eml' : 'message/rfc822', - '.eps' : 'application/postscript', - '.etx' : 'text/x-setext', - '.exe' : 'application/octet-stream', - '.gif' : 'image/gif', - '.gtar' : 'application/x-gtar', - '.h' : 'text/plain', - '.hdf' : 'application/x-hdf', - '.htm' : 'text/html', - '.html' : 'text/html', - '.ico' : 'image/vnd.microsoft.icon', - '.ief' : 'image/ief', - '.jpe' : 'image/jpeg', - '.jpeg' : 'image/jpeg', - '.jpg' : 'image/jpeg', - '.js' : 'application/javascript', - '.ksh' : 'text/plain', - '.latex' : 'application/x-latex', - '.m1v' : 'video/mpeg', - '.man' : 'application/x-troff-man', - '.me' : 'application/x-troff-me', - '.mht' : 'message/rfc822', - '.mhtml' : 'message/rfc822', - '.mif' : 'application/x-mif', - '.mov' : 'video/quicktime', - '.movie' : 'video/x-sgi-movie', - '.mp2' : 'audio/mpeg', - '.mp3' : 'audio/mpeg', - '.mp4' : 'video/mp4', - '.mpa' : 'video/mpeg', - '.mpe' : 'video/mpeg', - '.mpeg' : 'video/mpeg', - '.mpg' : 'video/mpeg', - '.ms' : 'application/x-troff-ms', - '.nc' : 'application/x-netcdf', - '.nws' : 'message/rfc822', - '.o' : 'application/octet-stream', - '.obj' : 'application/octet-stream', - '.oda' : 'application/oda', - '.p12' : 'application/x-pkcs12', - '.p7c' : 'application/pkcs7-mime', - '.pbm' : 'image/x-portable-bitmap', - '.pdf' : 'application/pdf', - '.pfx' : 'application/x-pkcs12', - '.pgm' : 'image/x-portable-graymap', - '.pl' : 'text/plain', - '.png' : 'image/png', - '.pnm' : 'image/x-portable-anymap', - '.pot' : 'application/vnd.ms-powerpoint', - '.ppa' : 'application/vnd.ms-powerpoint', - '.ppm' : 'image/x-portable-pixmap', - '.pps' : 'application/vnd.ms-powerpoint', - '.ppt' : 'application/vnd.ms-powerpoint', - '.ps' : 'application/postscript', - '.pwz' : 'application/vnd.ms-powerpoint', - '.py' : 'text/x-python', - '.pyc' : 'application/x-python-code', - '.pyo' : 'application/x-python-code', - '.qt' : 'video/quicktime', - '.ra' : 'audio/x-pn-realaudio', - '.ram' : 'application/x-pn-realaudio', - '.ras' : 'image/x-cmu-raster', - '.rdf' : 'application/xml', - '.rgb' : 'image/x-rgb', - '.roff' : 'application/x-troff', - '.rtx' : 'text/richtext', - '.sgm' : 'text/x-sgml', - '.sgml' : 'text/x-sgml', - '.sh' : 'application/x-sh', - '.shar' : 'application/x-shar', - '.snd' : 'audio/basic', - '.so' : 'application/octet-stream', - '.src' : 'application/x-wais-source', - '.sv4cpio': 'application/x-sv4cpio', - '.sv4crc' : 'application/x-sv4crc', - '.swf' : 'application/x-shockwave-flash', - '.t' : 'application/x-troff', - '.tar' : 'application/x-tar', - '.tcl' : 'application/x-tcl', - '.tex' : 'application/x-tex', - '.texi' : 'application/x-texinfo', - '.texinfo': 'application/x-texinfo', - '.tif' : 'image/tiff', - '.tiff' : 'image/tiff', - '.tr' : 'application/x-troff', - '.tsv' : 'text/tab-separated-values', - '.txt' : 'text/plain', - '.ustar' : 'application/x-ustar', - '.vcf' : 'text/x-vcard', - '.wav' : 'audio/x-wav', - '.wiz' : 'application/msword', - '.wsdl' : 'application/xml', - '.xbm' : 'image/x-xbitmap', - '.xlb' : 'application/vnd.ms-excel', - # Duplicates :( - '.xls' : 'application/excel', - '.xls' : 'application/vnd.ms-excel', - '.xml' : 'text/xml', - '.xpdl' : 'application/xml', - '.xpm' : 'image/x-xpixmap', - '.xsl' : 'application/xml', - '.xwd' : 'image/x-xwindowdump', - '.zip' : 'application/zip', - } - - # These are non-standard types, commonly found in the wild. They will - # only match if strict=0 flag is given to the API methods. - - # Please sort these too - common_types = { - '.jpg' : 'image/jpg', - '.mid' : 'audio/midi', - '.midi': 'audio/midi', - '.pct' : 'image/pict', - '.pic' : 'image/pict', - '.pict': 'image/pict', - '.rtf' : 'application/rtf', - '.xul' : 'text/xul' - } - - -_default_mime_types() - - -if __name__ == '__main__': - import getopt - - USAGE = """\ -Usage: mimetypes.py [options] type - -Options: - --help / -h -- print this message and exit - --lenient / -l -- additionally search of some common, but non-standard - types. - --extension / -e -- guess extension instead of type - -More than one type argument may be given. -""" - - def usage(code, msg=''): - print USAGE - if msg: print msg - sys.exit(code) - - try: - opts, args = getopt.getopt(sys.argv[1:], 'hle', - ['help', 'lenient', 'extension']) - except getopt.error, msg: - usage(1, msg) - - strict = 1 - extension = 0 - for opt, arg in opts: - if opt in ('-h', '--help'): - usage(0) - elif opt in ('-l', '--lenient'): - strict = 0 - elif opt in ('-e', '--extension'): - extension = 1 - for gtype in args: - if extension: - guess = guess_extension(gtype, strict) - if not guess: print "I don't know anything about type", gtype - else: print guess - else: - guess, encoding = guess_type(gtype, strict) - if not guess: print "I don't know anything about type", gtype - else: print 'type:', guess, 'encoding:', encoding \ No newline at end of file diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 3a2c9b8eb..778393309 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -37,6 +37,7 @@ import http.cookiejar # used for attachment upload import io # used for attachment upload import logging +import mimetypes import os import re import shutil # used for attachment download @@ -72,34 +73,6 @@ """ LOG.setLevel(logging.WARN) - -def _is_mimetypes_broken(): - """ - Checks if this version of Python ships with a broken version of mimetypes - - :returns: True if the version of mimetypes is broken, False otherwise. - """ - # mimetypes is broken on Windows only and for Python 2.7.0 to 2.7.9 inclusively. - # We're bundling the version from 2.7.10. - # See bugs : - # http://bugs.python.org/issue9291 <- Fixed in 2.7.7 - # http://bugs.python.org/issue21652 <- Fixed in 2.7.8 - # http://bugs.python.org/issue22028 <- Fixed in 2.7.10 - return ( - sys.platform == "win32" - and sys.version_info[0] == 2 - and sys.version_info[1] == 7 - and sys.version_info[2] >= 0 - and sys.version_info[2] <= 9 - ) - - -if _is_mimetypes_broken(): - from .lib import mimetypes as mimetypes -else: - import mimetypes - - # mimetypes imported in version specific imports mimetypes.add_type("video/webm", ".webm") # webm and mp4 seem to be missing mimetypes.add_type("video/mp4", ".mp4") # from some OS/distros diff --git a/tests/test_unit.py b/tests/test_unit.py index d2853e5df..58e46d366 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -18,7 +18,6 @@ from .mock import patch import shotgun_api3 as api -from shotgun_api3.shotgun import _is_mimetypes_broken from shotgun_api3.lib.httplib2 import Http, ssl_error_classes @@ -800,23 +799,5 @@ def test_urlib(self): assert response is not None -class TestMimetypesFix(unittest.TestCase): - """ - Makes sure that the mimetypes fix will be imported. - """ - - @patch("shotgun_api3.shotgun.sys") - def _test_mimetypes_import( - self, platform, major, minor, patch_number, result, mock - ): - """ - Mocks sys.platform and sys.version_info to test the mimetypes import code. - """ - - mock.version_info = [major, minor, patch_number] - mock.platform = platform - self.assertEqual(_is_mimetypes_broken(), result) - - if __name__ == "__main__": unittest.main() From a15e61778e2a2fa8c56420b05c52c05d37dda298 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 10 Sep 2025 10:27:15 -0700 Subject: [PATCH 35/59] SG-38306 Python2 Removal - Part 5 - Remove deprecated backported mock module (#402) * Remove deprecated backported mock module --- .flake8 | 2 +- tests/base.py | 13 +- tests/mock.py | 993 ------------------------------------------- tests/test_api.py | 36 +- tests/test_client.py | 11 +- tests/test_unit.py | 5 +- 6 files changed, 33 insertions(+), 1027 deletions(-) delete mode 100644 tests/mock.py diff --git a/.flake8 b/.flake8 index 343f01039..4fc6605a0 100644 --- a/.flake8 +++ b/.flake8 @@ -10,4 +10,4 @@ [flake8] max-line-length = 120 -exclude = shotgun_api3/lib/httplib2/*,shotgun_api3/lib/six.py,tests/httplib2test.py,tests/mock.py +exclude = shotgun_api3/lib/httplib2/*,shotgun_api3/lib/six.py,tests/httplib2test.py diff --git a/tests/base.py b/tests/base.py index e30ec01a4..d1f138f47 100644 --- a/tests/base.py +++ b/tests/base.py @@ -8,10 +8,9 @@ import re import time import unittest +import unittest.mock import urllib.error -from . import mock - import shotgun_api3 as api from shotgun_api3.shotgun import ServerCapabilities from shotgun_api3.lib import six @@ -133,12 +132,12 @@ def _setup_mock(self, s3_status_code_error=503): """Setup mocking on the ShotgunClient to stop it calling a live server""" # Replace the function used to make the final call to the server # eaiser than mocking the http connection + response - self.sg._http_request = mock.Mock( + self.sg._http_request = unittest.mock.Mock( spec=api.Shotgun._http_request, return_value=((200, "OK"), {}, None) ) # Replace the function used to make the final call to the S3 server, and simulate # the exception HTTPError raised with 503 status errors - self.sg._make_upload_request = mock.Mock( + self.sg._make_upload_request = unittest.mock.Mock( spec=api.Shotgun._make_upload_request, side_effect=urllib.error.HTTPError( "url", @@ -152,12 +151,12 @@ def _setup_mock(self, s3_status_code_error=503): # also replace the function that is called to get the http connection # to avoid calling the server. OK to return a mock as we will not use # it - self.mock_conn = mock.Mock(spec=api.lib.httplib2.Http) + self.mock_conn = unittest.mock.Mock(spec=api.lib.httplib2.Http) # The Http objects connection property is a dict of connections # it is holding self.mock_conn.connections = dict() self.sg._connection = self.mock_conn - self.sg._get_connection = mock.Mock(return_value=self.mock_conn) + self.sg._get_connection = unittest.mock.Mock(return_value=self.mock_conn) # create the server caps directly to say we have the correct version self.sg._server_caps = ServerCapabilities( @@ -173,7 +172,7 @@ def _mock_http(self, data, headers=None, status=None): """ # test for a mock object rather than config.mock as some tests # force the mock to be created - if not isinstance(self.sg._http_request, mock.Mock): + if not isinstance(self.sg._http_request, unittest.mock.Mock): return if not isinstance(data, str): diff --git a/tests/mock.py b/tests/mock.py deleted file mode 100644 index 736571c64..000000000 --- a/tests/mock.py +++ /dev/null @@ -1,993 +0,0 @@ -# mock.py -# Test tools for mocking and patching. -# Copyright (C) 2007-2011 Michael Foord & the mock team -# E-mail: fuzzyman AT voidspace DOT org DOT uk - -# mock 0.7.0 -# http://www.voidspace.org.uk/python/mock/ - -# Released subject to the BSD License -# Please see http://www.voidspace.org.uk/python/license.shtml - -# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml -# Comments, suggestions and bug reports welcome. - - -__all__ = ( - "Mock", - "MagicMock", - "mocksignature", - "patch", - "patch_object", - "sentinel", - "DEFAULT", -) - -__version__ = "0.7.0" - -__unittest = True - - -import sys -import warnings - -try: - import inspect -except ImportError: - # for alternative platforms that - # may not have inspect - inspect = None - -try: - from functools import wraps -except ImportError: - # Python 2.4 compatibility - def wraps(original): - def inner(f): - f.__name__ = original.__name__ - f.__doc__ = original.__doc__ - f.__module__ = original.__module__ - return f - - return inner - - -try: - unicode -except NameError: - # Python 3 - basestring = unicode = str - -try: - long -except NameError: - # Python 3 - long = int - -inPy3k = sys.version_info[0] == 3 - -if inPy3k: - self = "__self__" -else: - self = "im_self" - - -# getsignature and mocksignature heavily "inspired" by -# the decorator module: http://pypi.python.org/pypi/decorator/ -# by Michele Simionato - - -def _getsignature(func, skipfirst): - if inspect is None: - raise ImportError("inspect module not available") - - if inspect.isclass(func): - func = func.__init__ - # will have a self arg - skipfirst = True - elif not (inspect.ismethod(func) or inspect.isfunction(func)): - func = func.__call__ - - regargs, varargs, varkwargs, defaults = inspect.getargspec(func) - - # instance methods need to lose the self argument - if getattr(func, self, None) is not None: - regargs = regargs[1:] - - _msg = "_mock_ is a reserved argument name, can't mock signatures using _mock_" - assert "_mock_" not in regargs, _msg - if varargs is not None: - assert "_mock_" not in varargs, _msg - if varkwargs is not None: - assert "_mock_" not in varkwargs, _msg - if skipfirst: - regargs = regargs[1:] - signature = inspect.formatargspec( - regargs, varargs, varkwargs, defaults, formatvalue=lambda value: "" - ) - return signature[1:-1], func - - -def _copy_func_details(func, funcopy): - funcopy.__name__ = func.__name__ - funcopy.__doc__ = func.__doc__ - funcopy.__dict__.update(func.__dict__) - funcopy.__module__ = func.__module__ - if not inPy3k: - funcopy.func_defaults = func.func_defaults - else: - funcopy.__defaults__ = func.__defaults__ - funcopy.__kwdefaults__ = func.__kwdefaults__ - - -def mocksignature(func, mock=None, skipfirst=False): - """ - mocksignature(func, mock=None, skipfirst=False) - - Create a new function with the same signature as `func` that delegates - to `mock`. If `skipfirst` is True the first argument is skipped, useful - for methods where `self` needs to be omitted from the new function. - - If you don't pass in a `mock` then one will be created for you. - - The mock is set as the `mock` attribute of the returned function for easy - access. - - `mocksignature` can also be used with classes. It copies the signature of - the `__init__` method. - - When used with callable objects (instances) it copies the signature of the - `__call__` method. - """ - if mock is None: - mock = Mock() - signature, func = _getsignature(func, skipfirst) - src = "lambda %(signature)s: _mock_(%(signature)s)" % {"signature": signature} - - funcopy = eval(src, dict(_mock_=mock)) - _copy_func_details(func, funcopy) - funcopy.mock = mock - return funcopy - - -def _is_magic(name): - return "__%s__" % name[2:-2] == name - - -class SentinelObject(object): - "A unique, named, sentinel object." - - def __init__(self, name): - self.name = name - - def __repr__(self): - return '' % self.name - - -class Sentinel(object): - """Access attributes to return a named object, usable as a sentinel.""" - - def __init__(self): - self._sentinels = {} - - def __getattr__(self, name): - if name == "__bases__": - # Without this help(mock) raises an exception - raise AttributeError - return self._sentinels.setdefault(name, SentinelObject(name)) - - -sentinel = Sentinel() - -DEFAULT = sentinel.DEFAULT - - -class OldStyleClass: - pass - - -ClassType = type(OldStyleClass) - - -def _copy(value): - if type(value) in (dict, list, tuple, set): - return type(value)(value) - return value - - -if inPy3k: - class_types = type -else: - class_types = (type, ClassType) - - -class Mock(object): - """ - Create a new ``Mock`` object. ``Mock`` takes several optional arguments - that specify the behaviour of the Mock object: - - * ``spec``: This can be either a list of strings or an existing object (a - class or instance) that acts as the specification for the mock object. If - you pass in an object then a list of strings is formed by calling dir on - the object (excluding unsupported magic attributes and methods). Accessing - any attribute not in this list will raise an ``AttributeError``. - - If ``spec`` is an object (rather than a list of strings) then - `mock.__class__` returns the class of the spec object. This allows mocks - to pass `isinstance` tests. - - * ``spec_set``: A stricter variant of ``spec``. If used, attempting to *set* - or get an attribute on the mock that isn't on the object passed as - ``spec_set`` will raise an ``AttributeError``. - - * ``side_effect``: A function to be called whenever the Mock is called. See - the :attr:`Mock.side_effect` attribute. Useful for raising exceptions or - dynamically changing return values. The function is called with the same - arguments as the mock, and unless it returns :data:`DEFAULT`, the return - value of this function is used as the return value. - - Alternatively ``side_effect`` can be an exception class or instance. In - this case the exception will be raised when the mock is called. - - * ``return_value``: The value returned when the mock is called. By default - this is a new Mock (created on first access). See the - :attr:`Mock.return_value` attribute. - - * ``wraps``: Item for the mock object to wrap. If ``wraps`` is not None - then calling the Mock will pass the call through to the wrapped object - (returning the real result and ignoring ``return_value``). Attribute - access on the mock will return a Mock object that wraps the corresponding - attribute of the wrapped object (so attempting to access an attribute that - doesn't exist will raise an ``AttributeError``). - - If the mock has an explicit ``return_value`` set then calls are not passed - to the wrapped object and the ``return_value`` is returned instead. - - * ``name``: If the mock has a name then it will be used in the repr of the - mock. This can be useful for debugging. The name is propagated to child - mocks. - """ - - def __new__(cls, *args, **kw): - # every instance has its own class - # so we can create magic methods on the - # class without stomping on other mocks - new = type(cls.__name__, (cls,), {"__doc__": cls.__doc__}) - return object.__new__(new) - - def __init__( - self, - spec=None, - side_effect=None, - return_value=DEFAULT, - wraps=None, - name=None, - spec_set=None, - parent=None, - ): - self._parent = parent - self._name = name - _spec_class = None - if spec_set is not None: - spec = spec_set - spec_set = True - - if spec is not None and not isinstance(spec, list): - if isinstance(spec, class_types): - _spec_class = spec - else: - _spec_class = spec.__class__ - spec = dir(spec) - - self._spec_class = _spec_class - self._spec_set = spec_set - self._methods = spec - self._children = {} - self._return_value = return_value - self.side_effect = side_effect - self._wraps = wraps - - self.reset_mock() - - @property - def __class__(self): - if self._spec_class is None: - return type(self) - return self._spec_class - - def reset_mock(self): - "Restore the mock object to its initial state." - self.called = False - self.call_args = None - self.call_count = 0 - self.call_args_list = [] - self.method_calls = [] - for child in self._children.values(): - child.reset_mock() - if isinstance(self._return_value, Mock): - if not self._return_value is self: - self._return_value.reset_mock() - - def __get_return_value(self): - if self._return_value is DEFAULT: - self._return_value = self._get_child_mock() - return self._return_value - - def __set_return_value(self, value): - self._return_value = value - - __return_value_doc = "The value to be returned when the mock is called." - return_value = property(__get_return_value, __set_return_value, __return_value_doc) - - def __call__(self, *args, **kwargs): - self.called = True - self.call_count += 1 - self.call_args = callargs((args, kwargs)) - self.call_args_list.append(callargs((args, kwargs))) - - parent = self._parent - name = self._name - while parent is not None: - parent.method_calls.append(callargs((name, args, kwargs))) - if parent._parent is None: - break - name = parent._name + "." + name - parent = parent._parent - - ret_val = DEFAULT - if self.side_effect is not None: - if ( - isinstance(self.side_effect, BaseException) - or isinstance(self.side_effect, class_types) - and issubclass(self.side_effect, BaseException) - ): - raise self.side_effect - - ret_val = self.side_effect(*args, **kwargs) - if ret_val is DEFAULT: - ret_val = self.return_value - - if self._wraps is not None and self._return_value is DEFAULT: - return self._wraps(*args, **kwargs) - if ret_val is DEFAULT: - ret_val = self.return_value - return ret_val - - def __getattr__(self, name): - if name == "_methods": - raise AttributeError(name) - elif self._methods is not None: - if name not in self._methods or name in _all_magics: - raise AttributeError("Mock object has no attribute '%s'" % name) - elif _is_magic(name): - raise AttributeError(name) - - if name not in self._children: - wraps = None - if self._wraps is not None: - wraps = getattr(self._wraps, name) - self._children[name] = self._get_child_mock( - parent=self, name=name, wraps=wraps - ) - - return self._children[name] - - def __repr__(self): - if self._name is None and self._spec_class is None: - return object.__repr__(self) - - name_string = "" - spec_string = "" - if self._name is not None: - - def get_name(name): - if name is None: - return "mock" - return name - - parent = self._parent - name = self._name - while parent is not None: - name = get_name(parent._name) + "." + name - parent = parent._parent - name_string = " name=%r" % name - if self._spec_class is not None: - spec_string = " spec=%r" - if self._spec_set: - spec_string = " spec_set=%r" - spec_string = spec_string % self._spec_class.__name__ - return "<%s%s%s id='%s'>" % ( - type(self).__name__, - name_string, - spec_string, - id(self), - ) - - def __setattr__(self, name, value): - if not "method_calls" in self.__dict__: - # allow all attribute setting until initialisation is complete - return object.__setattr__(self, name, value) - if ( - self._spec_set - and self._methods is not None - and name not in self._methods - and name not in self.__dict__ - and name != "return_value" - ): - raise AttributeError("Mock object has no attribute '%s'" % name) - if name in _unsupported_magics: - msg = "Attempting to set unsupported magic method %r." % name - raise AttributeError(msg) - elif name in _all_magics: - if self._methods is not None and name not in self._methods: - raise AttributeError("Mock object has no attribute '%s'" % name) - - if not isinstance(value, Mock): - setattr(type(self), name, _get_method(name, value)) - original = value - real = lambda *args, **kw: original(self, *args, **kw) - value = mocksignature(value, real, skipfirst=True) - else: - setattr(type(self), name, value) - return object.__setattr__(self, name, value) - - def __delattr__(self, name): - if name in _all_magics and name in type(self).__dict__: - delattr(type(self), name) - return object.__delattr__(self, name) - - def assert_called_with(self, *args, **kwargs): - """ - assert that the mock was called with the specified arguments. - - Raises an AssertionError if the args and keyword args passed in are - different to the last call to the mock. - """ - if self.call_args is None: - raise AssertionError("Expected: %s\nNot called" % ((args, kwargs),)) - if not self.call_args == (args, kwargs): - raise AssertionError( - "Expected: %s\nCalled with: %s" % ((args, kwargs), self.call_args) - ) - - def assert_called_once_with(self, *args, **kwargs): - """ - assert that the mock was called exactly once and with the specified - arguments. - """ - if not self.call_count == 1: - msg = "Expected to be called once. Called %s times." % self.call_count - raise AssertionError(msg) - return self.assert_called_with(*args, **kwargs) - - def _get_child_mock(self, **kw): - klass = type(self).__mro__[1] - return klass(**kw) - - -class callargs(tuple): - """ - A tuple for holding the results of a call to a mock, either in the form - `(args, kwargs)` or `(name, args, kwargs)`. - - If args or kwargs are empty then a callargs tuple will compare equal to - a tuple without those values. This makes comparisons less verbose:: - - callargs('name', (), {}) == ('name',) - callargs('name', (1,), {}) == ('name', (1,)) - callargs((), {'a': 'b'}) == ({'a': 'b'},) - """ - - def __eq__(self, other): - if len(self) == 3: - if other[0] != self[0]: - return False - args_kwargs = self[1:] - other_args_kwargs = other[1:] - else: - args_kwargs = tuple(self) - other_args_kwargs = other - - if len(other_args_kwargs) == 0: - other_args, other_kwargs = (), {} - elif len(other_args_kwargs) == 1: - if isinstance(other_args_kwargs[0], tuple): - other_args = other_args_kwargs[0] - other_kwargs = {} - else: - other_args = () - other_kwargs = other_args_kwargs[0] - else: - other_args, other_kwargs = other_args_kwargs - - return tuple(args_kwargs) == (other_args, other_kwargs) - - -def _dot_lookup(thing, comp, import_path): - try: - return getattr(thing, comp) - except AttributeError: - __import__(import_path) - return getattr(thing, comp) - - -def _importer(target): - components = target.split(".") - import_path = components.pop(0) - thing = __import__(import_path) - - for comp in components: - import_path += ".%s" % comp - thing = _dot_lookup(thing, comp, import_path) - return thing - - -class _patch(object): - def __init__(self, target, attribute, new, spec, create, mocksignature, spec_set): - self.target = target - self.attribute = attribute - self.new = new - self.spec = spec - self.create = create - self.has_local = False - self.mocksignature = mocksignature - self.spec_set = spec_set - - def copy(self): - return _patch( - self.target, - self.attribute, - self.new, - self.spec, - self.create, - self.mocksignature, - self.spec_set, - ) - - def __call__(self, func): - if isinstance(func, class_types): - return self.decorate_class(func) - else: - return self.decorate_callable(func) - - def decorate_class(self, klass): - for attr in dir(klass): - attr_value = getattr(klass, attr) - if attr.startswith("test") and hasattr(attr_value, "__call__"): - setattr(klass, attr, self.copy()(attr_value)) - return klass - - def decorate_callable(self, func): - if hasattr(func, "patchings"): - func.patchings.append(self) - return func - - @wraps(func) - def patched(*args, **keywargs): - # don't use a with here (backwards compatability with 2.5) - extra_args = [] - for patching in patched.patchings: - arg = patching.__enter__() - if patching.new is DEFAULT: - extra_args.append(arg) - args += tuple(extra_args) - try: - return func(*args, **keywargs) - finally: - for patching in reversed(getattr(patched, "patchings", [])): - patching.__exit__() - - patched.patchings = [self] - if hasattr(func, "func_code"): - # not in Python 3 - patched.compat_co_firstlineno = getattr( - func, "compat_co_firstlineno", func.func_code.co_firstlineno - ) - return patched - - def get_original(self): - target = self.target - name = self.attribute - - original = DEFAULT - local = False - - try: - original = target.__dict__[name] - except (AttributeError, KeyError): - original = getattr(target, name, DEFAULT) - else: - local = True - - if not self.create and original is DEFAULT: - raise AttributeError("%s does not have the attribute %r" % (target, name)) - return original, local - - def __enter__(self): - """Perform the patch.""" - new, spec, spec_set = self.new, self.spec, self.spec_set - original, local = self.get_original() - if new is DEFAULT: - # XXXX what if original is DEFAULT - shouldn't use it as a spec - inherit = False - if spec_set == True: - spec_set = original - if isinstance(spec_set, class_types): - inherit = True - elif spec == True: - # set spec to the object we are replacing - spec = original - if isinstance(spec, class_types): - inherit = True - new = Mock(spec=spec, spec_set=spec_set) - if inherit: - new.return_value = Mock(spec=spec, spec_set=spec_set) - new_attr = new - if self.mocksignature: - new_attr = mocksignature(original, new) - - self.temp_original = original - self.is_local = local - setattr(self.target, self.attribute, new_attr) - return new - - def __exit__(self, *_): - """Undo the patch.""" - if self.is_local and self.temp_original is not DEFAULT: - setattr(self.target, self.attribute, self.temp_original) - else: - delattr(self.target, self.attribute) - if not self.create and not hasattr(self.target, self.attribute): - # needed for proxy objects like django settings - setattr(self.target, self.attribute, self.temp_original) - - del self.temp_original - del self.is_local - - start = __enter__ - stop = __exit__ - - -def _patch_object( - target, - attribute, - new=DEFAULT, - spec=None, - create=False, - mocksignature=False, - spec_set=None, -): - """ - patch.object(target, attribute, new=DEFAULT, spec=None, create=False, - mocksignature=False, spec_set=None) - - patch the named member (`attribute`) on an object (`target`) with a mock - object. - - Arguments new, spec, create, mocksignature and spec_set have the same - meaning as for patch. - """ - return _patch(target, attribute, new, spec, create, mocksignature, spec_set) - - -def patch_object(*args, **kwargs): - "A deprecated form of patch.object(...)" - warnings.warn(("Please use patch.object instead."), DeprecationWarning, 2) - return _patch_object(*args, **kwargs) - - -def patch( - target, new=DEFAULT, spec=None, create=False, mocksignature=False, spec_set=None -): - """ - ``patch`` acts as a function decorator, class decorator or a context - manager. Inside the body of the function or with statement, the ``target`` - (specified in the form `'PackageName.ModuleName.ClassName'`) is patched - with a ``new`` object. When the function/with statement exits the patch is - undone. - - The target is imported and the specified attribute patched with the new - object, so it must be importable from the environment you are calling the - decorator from. - - If ``new`` is omitted, then a new ``Mock`` is created and passed in as an - extra argument to the decorated function. - - The ``spec`` and ``spec_set`` keyword arguments are passed to the ``Mock`` - if patch is creating one for you. - - In addition you can pass ``spec=True`` or ``spec_set=True``, which causes - patch to pass in the object being mocked as the spec/spec_set object. - - If ``mocksignature`` is True then the patch will be done with a function - created by mocking the one being replaced. If the object being replaced is - a class then the signature of `__init__` will be copied. If the object - being replaced is a callable object then the signature of `__call__` will - be copied. - - By default ``patch`` will fail to replace attributes that don't exist. If - you pass in 'create=True' and the attribute doesn't exist, patch will - create the attribute for you when the patched function is called, and - delete it again afterwards. This is useful for writing tests against - attributes that your production code creates at runtime. It is off by by - default because it can be dangerous. With it switched on you can write - passing tests against APIs that don't actually exist! - - Patch can be used as a TestCase class decorator. It works by - decorating each test method in the class. This reduces the boilerplate - code when your test methods share a common patchings set. - - Patch can be used with the with statement, if this is available in your - version of Python. Here the patching applies to the indented block after - the with statement. If you use "as" then the patched object will be bound - to the name after the "as"; very useful if `patch` is creating a mock - object for you. - - `patch.dict(...)` and `patch.object(...)` are available for alternate - use-cases. - """ - try: - target, attribute = target.rsplit(".", 1) - except (TypeError, ValueError): - raise TypeError("Need a valid target to patch. You supplied: %r" % (target,)) - target = _importer(target) - return _patch(target, attribute, new, spec, create, mocksignature, spec_set) - - -class _patch_dict(object): - """ - Patch a dictionary and restore the dictionary to its original state after - the test. - - `in_dict` can be a dictionary or a mapping like container. If it is a - mapping then it must at least support getting, setting and deleting items - plus iterating over keys. - - `in_dict` can also be a string specifying the name of the dictionary, which - will then be fetched by importing it. - - `values` can be a dictionary of values to set in the dictionary. `values` - can also be an iterable of ``(key, value)`` pairs. - - If `clear` is True then the dictionary will be cleared before the new - values are set. - """ - - def __init__(self, in_dict, values=(), clear=False): - if isinstance(in_dict, basestring): - in_dict = _importer(in_dict) - self.in_dict = in_dict - # support any argument supported by dict(...) constructor - self.values = dict(values) - self.clear = clear - self._original = None - - def __call__(self, f): - if isinstance(f, class_types): - return self.decorate_class(f) - - @wraps(f) - def _inner(*args, **kw): - self._patch_dict() - try: - return f(*args, **kw) - finally: - self._unpatch_dict() - - return _inner - - def decorate_class(self, klass): - for attr in dir(klass): - attr_value = getattr(klass, attr) - if attr.startswith("test") and hasattr(attr_value, "__call__"): - decorator = _patch_dict(self.in_dict, self.values, self.clear) - decorated = decorator(attr_value) - setattr(klass, attr, decorated) - return klass - - def __enter__(self): - """Patch the dict.""" - self._patch_dict() - - def _patch_dict(self): - """Unpatch the dict.""" - values = self.values - in_dict = self.in_dict - clear = self.clear - - try: - original = in_dict.copy() - except AttributeError: - # dict like object with no copy method - # must support iteration over keys - original = {} - for key in in_dict: - original[key] = in_dict[key] - self._original = original - - if clear: - _clear_dict(in_dict) - - try: - in_dict.update(values) - except AttributeError: - # dict like object with no update method - for key in values: - in_dict[key] = values[key] - - def _unpatch_dict(self): - in_dict = self.in_dict - original = self._original - - _clear_dict(in_dict) - - try: - in_dict.update(original) - except AttributeError: - for key in original: - in_dict[key] = original[key] - - def __exit__(self, *args): - self._unpatch_dict() - return False - - start = __enter__ - stop = __exit__ - - -def _clear_dict(in_dict): - try: - in_dict.clear() - except AttributeError: - keys = list(in_dict) - for key in keys: - del in_dict[key] - - -patch.object = _patch_object -patch.dict = _patch_dict - - -magic_methods = ( - "lt le gt ge eq ne " - "getitem setitem delitem " - "len contains iter " - "hash str sizeof " - "enter exit " - "divmod neg pos abs invert " - "complex int float index " - "trunc floor ceil " -) - -numerics = "add sub mul div truediv floordiv mod lshift rshift and xor or pow " -inplace = " ".join("i%s" % n for n in numerics.split()) -right = " ".join("r%s" % n for n in numerics.split()) -extra = "" -if inPy3k: - extra = "bool next " -else: - extra = "unicode long nonzero oct hex " -# __truediv__ and __rtruediv__ not available in Python 3 either - -# not including __prepare__, __instancecheck__, __subclasscheck__ -# (as they are metaclass methods) -# __del__ is not supported at all as it causes problems if it exists - -_non_defaults = set( - "__%s__" % method - for method in [ - "cmp", - "getslice", - "setslice", - "coerce", - "subclasses", - "dir", - "format", - "get", - "set", - "delete", - "reversed", - "missing", - "reduce", - "reduce_ex", - "getinitargs", - "getnewargs", - "getstate", - "setstate", - "getformat", - "setformat", - "repr", - ] -) - - -def _get_method(name, func): - "Turns a callable object (like a mock) into a real function" - - def method(self, *args, **kw): - return func(self, *args, **kw) - - method.__name__ = name - return method - - -_magics = set( - "__%s__" % method - for method in " ".join([magic_methods, numerics, inplace, right, extra]).split() -) - -_all_magics = _magics | _non_defaults - -_unsupported_magics = set( - [ - "__getattr__", - "__setattr__", - "__init__", - "__new__", - "__prepare__" "__instancecheck__", - "__subclasscheck__", - "__del__", - ] -) - -_calculate_return_value = { - "__hash__": lambda self: object.__hash__(self), - "__str__": lambda self: object.__str__(self), - "__sizeof__": lambda self: object.__sizeof__(self), - "__unicode__": lambda self: unicode(object.__str__(self)), -} - -_return_values = { - "__int__": 1, - "__contains__": False, - "__len__": 0, - "__iter__": iter([]), - "__exit__": False, - "__complex__": 1j, - "__float__": 1.0, - "__bool__": True, - "__nonzero__": True, - "__oct__": "1", - "__hex__": "0x1", - "__long__": long(1), - "__index__": 1, -} - - -def _set_return_value(mock, method, name): - return_value = DEFAULT - if name in _return_values: - return_value = _return_values[name] - elif name in _calculate_return_value: - try: - return_value = _calculate_return_value[name](mock) - except AttributeError: - return_value = AttributeError(name) - if return_value is not DEFAULT: - method.return_value = return_value - - -class MagicMock(Mock): - """ - MagicMock is a subclass of :Mock with default implementations - of most of the magic methods. You can use MagicMock without having to - configure the magic methods yourself. - - If you use the ``spec`` or ``spec_set`` arguments then *only* magic - methods that exist in the spec will be created. - - Attributes and the return value of a `MagicMock` will also be `MagicMocks`. - """ - - def __init__(self, *args, **kw): - Mock.__init__(self, *args, **kw) - - these_magics = _magics - if self._methods is not None: - these_magics = _magics.intersection(self._methods) - - for entry in these_magics: - # could specify parent? - m = Mock() - setattr(self, entry, m) - _set_return_value(self, m, entry) diff --git a/tests/test_api.py b/tests/test_api.py index cd4dcbeef..d42328f3c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -22,6 +22,7 @@ import time import types import unittest +import unittest.mock import urllib.parse import urllib.request import urllib.error @@ -39,8 +40,6 @@ import shotgun_api3 from . import base -from . import mock -from .mock import patch, MagicMock class TestShotgunApi(base.LiveTestBase): @@ -315,13 +314,12 @@ def test_upload_download(self): # cleanup os.remove(file_path) - @patch("shotgun_api3.Shotgun._send_form") + @unittest.mock.patch("shotgun_api3.Shotgun._send_form") def test_upload_to_sg(self, mock_send_form): """ Upload an attachment tests for _upload_to_sg() """ self.sg.server_info["s3_direct_uploads_enabled"] = False - mock_send_form.method.assert_called_once() mock_send_form.return_value = "1\n:123\nasd" this_dir, _ = os.path.split(__file__) u_path = os.path.abspath( @@ -334,6 +332,7 @@ def test_upload_to_sg(self, mock_send_form): "attachments", tag_list="monkeys, everywhere, send, help", ) + mock_send_form.assert_called_once() mock_send_form_args, _ = mock_send_form.call_args display_name_to_send = mock_send_form_args[1].get("display_name", "") self.assertTrue(isinstance(upload_id, int)) @@ -356,7 +355,7 @@ def test_upload_to_sg(self, mock_send_form): display_name_to_send.startswith("b'") and display_name_to_send.endswith("'") ) - mock_send_form.method.assert_called_once() + mock_send_form.reset_mock() mock_send_form.return_value = "2\nIt can't be upload" self.assertRaises( shotgun_api3.ShotgunError, @@ -367,6 +366,7 @@ def test_upload_to_sg(self, mock_send_form): "attachments", tag_list="monkeys, everywhere, send, help", ) + mock_send_form.assert_called_once() self.sg.server_info["s3_direct_uploads_enabled"] = True def test_upload_thumbnail_in_create(self): @@ -714,11 +714,10 @@ def share_thumbnail_retry(*args, **kwargs): shotgun_api3.ShotgunError, self.sg.share_thumbnail, [self.shot, self.asset] ) - @patch("shotgun_api3.Shotgun._send_form") + @unittest.mock.patch("shotgun_api3.Shotgun._send_form") def test_share_thumbnail_not_ready(self, mock_send_form): """throw an exception if trying to share a transient thumbnail""" - mock_send_form.method.assert_called_once() mock_send_form.return_value = ( "2" "\nsource_entity image is a transient thumbnail that cannot be shared. " @@ -732,11 +731,12 @@ def test_share_thumbnail_not_ready(self, mock_send_form): source_entity=self.asset, ) - @patch("shotgun_api3.Shotgun._send_form") + mock_send_form.assert_called_once() + + @unittest.mock.patch("shotgun_api3.Shotgun._send_form") def test_share_thumbnail_returns_error(self, mock_send_form): """throw an exception if server returns an error code""" - mock_send_form.method.assert_called_once() mock_send_form.return_value = "1\nerror message.\n" self.assertRaises( @@ -746,6 +746,8 @@ def test_share_thumbnail_returns_error(self, mock_send_form): source_entity=self.asset, ) + mock_send_form.assert_called_once() + def test_deprecated_functions(self): """Deprecated functions raise errors""" self.assertRaises(shotgun_api3.ShotgunError, self.sg.schema, "foo") @@ -2194,17 +2196,17 @@ def test_bad_auth(self): user = self.sg.find_one("HumanUser", [["login", "is", login]]) self.sg.update("HumanUser", user["id"], {"locked_until": None}) - @patch("shotgun_api3.shotgun.Http.request") + @unittest.mock.patch("shotgun_api3.shotgun.Http.request") def test_status_not_200(self, mock_request): - response = MagicMock(name="response mock", spec=dict) + response = unittest.mock.MagicMock(name="response mock", spec=dict) response.status = 300 response.reason = "reason" mock_request.return_value = (response, {}) self.assertRaises(shotgun_api3.ProtocolError, self.sg.find_one, "Shot", []) - @patch("shotgun_api3.shotgun.Http.request") + @unittest.mock.patch("shotgun_api3.shotgun.Http.request") def test_make_call_retry(self, mock_request): - response = MagicMock(name="response mock", spec=dict) + response = unittest.mock.MagicMock(name="response mock", spec=dict) response.status = 200 response.reason = "reason" mock_request.return_value = (response, {}) @@ -2234,7 +2236,7 @@ def my_side_effect2(*args, **kwargs): "EOF occurred in violation of protocol (_ssl.c:2426)" ) - return mock.DEFAULT + return unittest.mock.DEFAULT finally: my_side_effect2.counter += 1 @@ -2260,7 +2262,7 @@ def my_side_effect2(*args, **kwargs): finally: self.sg.config.rpc_attempt_interval = bak_rpc_attempt_interval - @patch("shotgun_api3.shotgun.Http.request") + @unittest.mock.patch("shotgun_api3.shotgun.Http.request") def test_sha2_error(self, mock_request): # Simulate the exception raised with SHA-2 errors mock_request.side_effect = ShotgunSSLError( @@ -2300,7 +2302,7 @@ def test_sha2_error(self, mock_request): if original_env_val is not None: os.environ["SHOTGUN_FORCE_CERTIFICATE_VALIDATION"] = original_env_val - @patch("shotgun_api3.shotgun.Http.request") + @unittest.mock.patch("shotgun_api3.shotgun.Http.request") def test_sha2_error_with_strict(self, mock_request): # Simulate the exception raised with SHA-2 errors mock_request.side_effect = ShotgunSSLError( @@ -2331,7 +2333,7 @@ def test_sha2_error_with_strict(self, mock_request): if original_env_val is not None: os.environ["SHOTGUN_FORCE_CERTIFICATE_VALIDATION"] = original_env_val - @patch.object(urllib.request.OpenerDirector, "open") + @unittest.mock.patch.object(urllib.request.OpenerDirector, "open") def test_sanitized_auth_params(self, mock_open): # Simulate the server blowing up and giving us a 500 error mock_open.side_effect = urllib.error.HTTPError("url", 500, "message", {}, None) diff --git a/tests/test_client.py b/tests/test_client.py index 164731908..f99a79806 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -22,13 +22,12 @@ import sys import time import unittest +import unittest.mock import urllib.parse import urllib.error from shotgun_api3.lib import six, sgutils -from . import mock - import shotgun_api3.lib.httplib2 as httplib2 import shotgun_api3 as api from shotgun_api3.shotgun import ServerCapabilities, SG_TIMEZONE @@ -321,7 +320,7 @@ def test_network_retry(self): """Network failure is retried, with a sleep call between retries.""" self.sg._http_request.side_effect = httplib2.HttpLib2Error - with mock.patch("time.sleep") as mock_sleep: + with unittest.mock.patch("time.sleep") as mock_sleep: self.assertRaises(httplib2.HttpLib2Error, self.sg.info) self.assertTrue( self.sg._http_request.call_count == 1, @@ -507,7 +506,7 @@ def test_upload_s3_urlerror__get_attachment_upload_info(self): """ Test URLError response is retried when invoking _send_form """ - mock_opener = mock.Mock() + mock_opener = unittest.mock.Mock() mock_opener.return_value.open.side_effect = urllib.error.URLError( "[WinError 10054] An existing connection was forcibly closed by the remote host" ) @@ -535,7 +534,7 @@ def test_upload_s3_urlerror__upload_to_storage(self): """ Test URLError response is retried when uploading to S3. """ - self.sg._make_upload_request = mock.Mock( + self.sg._make_upload_request = unittest.mock.Mock( spec=api.Shotgun._make_upload_request, side_effect=urllib.error.URLError( "[Errno 104] Connection reset by peer", @@ -684,7 +683,7 @@ def test_parse_records(self): }, } url = "http://foo/files/0000/0000/0012/232/shot_thumb.jpg" - self.sg._build_thumb_url = mock.Mock(return_value=url) + self.sg._build_thumb_url = unittest.mock.Mock(return_value=url) modified, txt = self.sg._parse_records([orig, "plain text"]) self.assertEqual("plain text", txt, "non dict value is left as is") diff --git a/tests/test_unit.py b/tests/test_unit.py index 58e46d366..445d1fe07 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -16,7 +16,6 @@ import urllib.request import urllib.error -from .mock import patch import shotgun_api3 as api from shotgun_api3.lib.httplib2 import Http, ssl_error_classes @@ -188,7 +187,7 @@ def test_filters(self): actual_condition = result["filters"]["conditions"][0] self.assertEqual(expected_condition, actual_condition) - @patch("shotgun_api3.Shotgun._call_rpc") + @mock.patch("shotgun_api3.Shotgun._call_rpc") def get_call_rpc_params(self, args, kws, call_rpc): """Return params sent to _call_rpc from summarize.""" if not args: @@ -301,7 +300,7 @@ def test_no_platform(self): finally: api.shotgun.sys.platform = platform - @patch("shotgun_api3.shotgun.sys") + @mock.patch("shotgun_api3.shotgun.sys") def test_py_version(self, mock_sys): major = 2 minor = 7 From c3888df486bf1a536ef0e9634df44d6f1746a868 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 10 Sep 2025 10:54:15 -0700 Subject: [PATCH 36/59] SG-38306 Python2 Removal - Part 6 - Remove python2 from httplib2 module (#403) * Remove python2 from httplib2 module --------- Co-authored-by: Eduardo Chauca --- shotgun_api3/lib/httplib2/__init__.py | 1838 +++++++++++++- shotgun_api3/lib/httplib2/auth.py | 2 +- shotgun_api3/lib/httplib2/python2/__init__.py | 1993 --------------- shotgun_api3/lib/httplib2/python2/auth.py | 63 - shotgun_api3/lib/httplib2/python2/cacerts.txt | 2225 ----------------- shotgun_api3/lib/httplib2/python2/certs.py | 42 - shotgun_api3/lib/httplib2/python2/error.py | 48 - shotgun_api3/lib/httplib2/python2/iri2uri.py | 123 - shotgun_api3/lib/httplib2/python2/socks.py | 518 ---- shotgun_api3/lib/httplib2/python3/__init__.py | 1799 ------------- shotgun_api3/lib/httplib2/python3/auth.py | 69 - shotgun_api3/lib/httplib2/python3/cacerts.txt | 2225 ----------------- shotgun_api3/lib/httplib2/python3/certs.py | 42 - shotgun_api3/lib/httplib2/python3/error.py | 48 - shotgun_api3/lib/httplib2/python3/iri2uri.py | 124 - shotgun_api3/lib/httplib2/python3/socks.py | 518 ---- shotgun_api3/shotgun.py | 4 +- tests/test_api.py | 15 - tests/test_unit.py | 5 +- update_httplib2.py | 19 +- 20 files changed, 1811 insertions(+), 9909 deletions(-) delete mode 100644 shotgun_api3/lib/httplib2/python2/__init__.py delete mode 100644 shotgun_api3/lib/httplib2/python2/auth.py delete mode 100644 shotgun_api3/lib/httplib2/python2/cacerts.txt delete mode 100644 shotgun_api3/lib/httplib2/python2/certs.py delete mode 100644 shotgun_api3/lib/httplib2/python2/error.py delete mode 100644 shotgun_api3/lib/httplib2/python2/iri2uri.py delete mode 100644 shotgun_api3/lib/httplib2/python2/socks.py delete mode 100644 shotgun_api3/lib/httplib2/python3/__init__.py delete mode 100644 shotgun_api3/lib/httplib2/python3/auth.py delete mode 100644 shotgun_api3/lib/httplib2/python3/cacerts.txt delete mode 100644 shotgun_api3/lib/httplib2/python3/certs.py delete mode 100644 shotgun_api3/lib/httplib2/python3/error.py delete mode 100644 shotgun_api3/lib/httplib2/python3/iri2uri.py delete mode 100644 shotgun_api3/lib/httplib2/python3/socks.py diff --git a/shotgun_api3/lib/httplib2/__init__.py b/shotgun_api3/lib/httplib2/__init__.py index 42c9916d1..ba5fa2f23 100644 --- a/shotgun_api3/lib/httplib2/__init__.py +++ b/shotgun_api3/lib/httplib2/__init__.py @@ -1,39 +1,1799 @@ -from .. import six - -# Define all here to keep linters happy. It should be overwritten by the code -# below, but if in the future __all__ is not defined in httplib2 this will keep -# things from breaking. -__all__ = [] - -# Import the proper implementation into the module namespace depending on the -# current python version. httplib2 supports python 2/3 by forking the code rather -# than with a single cross-compatible module. Rather than modify third party code, -# we'll just import the appropriate branch here. -if six.PY3: - # Generate ssl_error_classes - import ssl as __ssl - ssl_error_classes = (__ssl.SSLError, __ssl.CertificateError) - del __ssl - - # get the python3 fork of httplib2 - from . import python3 as __httplib2_compat - - -else: - # Generate ssl_error_classes - from .python2 import SSLHandshakeError as __SSLHandshakeError # TODO: shouldn't rely on this. not public - ssl_error_classes = (__SSLHandshakeError,) - del __SSLHandshakeError - - # get the python2 fork of httplib2 - from . import python2 as __httplib2_compat - -# Import all of the httplib2 module. Note that we can't use a star import because -# we need to import *everything*, not just what exists in __all__. -for __name in dir(__httplib2_compat): - globals()[__name] = getattr(__httplib2_compat, __name) -del __httplib2_compat -del __name - -# Add ssl_error_classes to __all__ -__all__.append("ssl_error_classes") +# -*- coding: utf-8 -*- +"""Small, fast HTTP client library for Python.""" + +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = [ + "Thomas Broyer (t.broyer@ltgt.net)", + "James Antill", + "Xavier Verges Farrero", + "Jonathan Feinberg", + "Blair Zajac", + "Sam Ruby", + "Louis Nyffenegger", + "Mark Pilgrim", + "Alex Yu", + "Lai Han", +] +__license__ = "MIT" +__version__ = "0.22.0" + +import base64 +import calendar +import copy +import email +import email.feedparser +from email import header +import email.message +import email.utils +import errno +from gettext import gettext as _ +import gzip +from hashlib import md5 as _md5 +from hashlib import sha1 as _sha +import hmac +import http.client +import io +import os +import random +import re +import socket +import ssl +import sys +import time +import urllib.parse +import zlib + +try: + import socks +except ImportError: + # TODO: remove this fallback and copypasted socksipy module upon py2/3 merge, + # idea is to have soft-dependency on any compatible module called socks + from . import socks +from . import auth +from .error import * +from .iri2uri import iri2uri + + +def has_timeout(timeout): + if hasattr(socket, "_GLOBAL_DEFAULT_TIMEOUT"): + return timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT + return timeout is not None + + +__all__ = [ + "debuglevel", + "FailedToDecompressContent", + "Http", + "HttpLib2Error", + "ProxyInfo", + "RedirectLimit", + "RedirectMissingLocation", + "Response", + "RETRIES", + "UnimplementedDigestAuthOptionError", + "UnimplementedHmacDigestAuthOptionError", +] + +# The httplib debug level, set to a non-zero value to get debug output +debuglevel = 0 + +# A request will be tried 'RETRIES' times if it fails at the socket/connection level. +RETRIES = 2 + + +# Open Items: +# ----------- + +# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) + +# Pluggable cache storage (supports storing the cache in +# flat files by default. We need a plug-in architecture +# that can support Berkeley DB and Squid) + +# == Known Issues == +# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. +# Does not handle Cache-Control: max-stale +# Does not use Age: headers when calculating cache freshness. + +# The number of redirections to follow before giving up. +# Note that only GET redirects are automatically followed. +# Will also honor 301 requests by saving that info and never +# requesting that URI again. +DEFAULT_MAX_REDIRECTS = 5 + +# Which headers are hop-by-hop headers by default +HOP_BY_HOP = [ + "connection", + "keep-alive", + "proxy-authenticate", + "proxy-authorization", + "te", + "trailers", + "transfer-encoding", + "upgrade", +] + +# https://tools.ietf.org/html/rfc7231#section-8.1.3 +SAFE_METHODS = ("GET", "HEAD", "OPTIONS", "TRACE") + +# To change, assign to `Http().redirect_codes` +REDIRECT_CODES = frozenset((300, 301, 302, 303, 307, 308)) + + +from . import certs + +CA_CERTS = certs.where() + +# PROTOCOL_TLS is python 3.5.3+. PROTOCOL_SSLv23 is deprecated. +# Both PROTOCOL_TLS and PROTOCOL_SSLv23 are equivalent and means: +# > Selects the highest protocol version that both the client and server support. +# > Despite the name, this option can select “TLS” protocols as well as “SSL”. +# source: https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_SSLv23 + +# PROTOCOL_TLS_CLIENT is python 3.10.0+. PROTOCOL_TLS is deprecated. +# > Auto-negotiate the highest protocol version that both the client and server support, and configure the context client-side connections. +# > The protocol enables CERT_REQUIRED and check_hostname by default. +# source: https://docs.python.org/3.10/library/ssl.html#ssl.PROTOCOL_TLS + +DEFAULT_TLS_VERSION = getattr(ssl, "PROTOCOL_TLS_CLIENT", None) or getattr(ssl, "PROTOCOL_TLS", None) or getattr(ssl, "PROTOCOL_SSLv23") + + +def _build_ssl_context( + disable_ssl_certificate_validation, + ca_certs, + cert_file=None, + key_file=None, + maximum_version=None, + minimum_version=None, + key_password=None, +): + if not hasattr(ssl, "SSLContext"): + raise RuntimeError("httplib2 requires Python 3.2+ for ssl.SSLContext") + + context = ssl.SSLContext(DEFAULT_TLS_VERSION) + # check_hostname and verify_mode should be set in opposite order during disable + # https://bugs.python.org/issue31431 + if disable_ssl_certificate_validation and hasattr(context, "check_hostname"): + context.check_hostname = not disable_ssl_certificate_validation + context.verify_mode = ssl.CERT_NONE if disable_ssl_certificate_validation else ssl.CERT_REQUIRED + + # SSLContext.maximum_version and SSLContext.minimum_version are python 3.7+. + # source: https://docs.python.org/3/library/ssl.html#ssl.SSLContext.maximum_version + if maximum_version is not None: + if hasattr(context, "maximum_version"): + if isinstance(maximum_version, str): + maximum_version = getattr(ssl.TLSVersion, maximum_version) + context.maximum_version = maximum_version + else: + raise RuntimeError("setting tls_maximum_version requires Python 3.7 and OpenSSL 1.1 or newer") + if minimum_version is not None: + if hasattr(context, "minimum_version"): + if isinstance(minimum_version, str): + minimum_version = getattr(ssl.TLSVersion, minimum_version) + context.minimum_version = minimum_version + else: + raise RuntimeError("setting tls_minimum_version requires Python 3.7 and OpenSSL 1.1 or newer") + # check_hostname requires python 3.4+ + # we will perform the equivalent in HTTPSConnectionWithTimeout.connect() by calling ssl.match_hostname + # if check_hostname is not supported. + if hasattr(context, "check_hostname"): + context.check_hostname = not disable_ssl_certificate_validation + + context.load_verify_locations(ca_certs) + + if cert_file: + context.load_cert_chain(cert_file, key_file, key_password) + + return context + + +def _get_end2end_headers(response): + hopbyhop = list(HOP_BY_HOP) + hopbyhop.extend([x.strip() for x in response.get("connection", "").split(",")]) + return [header for header in list(response.keys()) if header not in hopbyhop] + + +_missing = object() + + +def _errno_from_exception(e): + # TODO python 3.11+ cheap try: return e.errno except AttributeError: pass + errno = getattr(e, "errno", _missing) + if errno is not _missing: + return errno + + # socket.error and common wrap in .args + args = getattr(e, "args", None) + if args: + return _errno_from_exception(args[0]) + + # pysocks.ProxyError wraps in .socket_err + # https://github.com/httplib2/httplib2/pull/202 + socket_err = getattr(e, "socket_err", None) + if socket_err: + return _errno_from_exception(socket_err) + + return None + + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + + +def urlnorm(uri): + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) + authority = authority.lower() + scheme = scheme.lower() + if not path: + path = "/" + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + scheme = scheme.lower() + defrag_uri = scheme + "://" + authority + request_uri + return scheme, authority, request_uri, defrag_uri + + +# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) +re_url_scheme = re.compile(r"^\w+://") +re_unsafe = re.compile(r"[^\w\-_.()=!]+", re.ASCII) + + +def safename(filename): + """Return a filename suitable for the cache. + Strips dangerous and common characters to create a filename we + can use to store the cache in. + """ + if isinstance(filename, bytes): + filename_bytes = filename + filename = filename.decode("utf-8") + else: + filename_bytes = filename.encode("utf-8") + filemd5 = _md5(filename_bytes).hexdigest() + filename = re_url_scheme.sub("", filename) + filename = re_unsafe.sub("", filename) + + # limit length of filename (vital for Windows) + # https://github.com/httplib2/httplib2/pull/74 + # C:\Users\ \AppData\Local\Temp\ , + # 9 chars + max 104 chars + 20 chars + x + 1 + 32 = max 259 chars + # Thus max safe filename x = 93 chars. Let it be 90 to make a round sum: + filename = filename[:90] + + return ",".join((filename, filemd5)) + + +NORMALIZE_SPACE = re.compile(r"(?:\r\n)?[ \t]+") + + +def _normalize_headers(headers): + return dict( + [ + (_convert_byte_str(key).lower(), NORMALIZE_SPACE.sub(_convert_byte_str(value), " ").strip(),) + for (key, value) in headers.items() + ] + ) + + +def _convert_byte_str(s): + if not isinstance(s, str): + return str(s, "utf-8") + return s + + +def _parse_cache_control(headers): + retval = {} + if "cache-control" in headers: + parts = headers["cache-control"].split(",") + parts_with_args = [ + tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=") + ] + parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] + retval = dict(parts_with_args + parts_wo_args) + return retval + + +# Whether to use a strict mode to parse WWW-Authenticate headers +# Might lead to bad results in case of ill-formed header value, +# so disabled by default, falling back to relaxed parsing. +# Set to true to turn on, useful for testing servers. +USE_WWW_AUTH_STRICT_PARSING = 0 + + +def _entry_disposition(response_headers, request_headers): + """Determine freshness from the Date, Expires and Cache-Control headers. + + We don't handle the following: + + 1. Cache-Control: max-stale + 2. Age: headers are not used in the calculations. + + Not that this algorithm is simpler than you might think + because we are operating as a private (non-shared) cache. + This lets us ignore 's-maxage'. We can also ignore + 'proxy-invalidate' since we aren't a proxy. + We will never return a stale document as + fresh as a design decision, and thus the non-implementation + of 'max-stale'. This also lets us safely ignore 'must-revalidate' + since we operate as if every server has sent 'must-revalidate'. + Since we are private we get to ignore both 'public' and + 'private' parameters. We also ignore 'no-transform' since + we don't do any transformations. + The 'no-store' parameter is handled at a higher level. + So the only Cache-Control parameters we look at are: + + no-cache + only-if-cached + max-age + min-fresh + """ + + retval = "STALE" + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + + if "pragma" in request_headers and request_headers["pragma"].lower().find("no-cache") != -1: + retval = "TRANSPARENT" + if "cache-control" not in request_headers: + request_headers["cache-control"] = "no-cache" + elif "no-cache" in cc: + retval = "TRANSPARENT" + elif "no-cache" in cc_response: + retval = "STALE" + elif "only-if-cached" in cc: + retval = "FRESH" + elif "date" in response_headers: + date = calendar.timegm(email.utils.parsedate_tz(response_headers["date"])) + now = time.time() + current_age = max(0, now - date) + if "max-age" in cc_response: + try: + freshness_lifetime = int(cc_response["max-age"]) + except ValueError: + freshness_lifetime = 0 + elif "expires" in response_headers: + expires = email.utils.parsedate_tz(response_headers["expires"]) + if None == expires: + freshness_lifetime = 0 + else: + freshness_lifetime = max(0, calendar.timegm(expires) - date) + else: + freshness_lifetime = 0 + if "max-age" in cc: + try: + freshness_lifetime = int(cc["max-age"]) + except ValueError: + freshness_lifetime = 0 + if "min-fresh" in cc: + try: + min_fresh = int(cc["min-fresh"]) + except ValueError: + min_fresh = 0 + current_age += min_fresh + if freshness_lifetime > current_age: + retval = "FRESH" + return retval + + +def _decompressContent(response, new_content): + content = new_content + try: + encoding = response.get("content-encoding", None) + if encoding in ["gzip", "deflate"]: + if encoding == "gzip": + content = gzip.GzipFile(fileobj=io.BytesIO(new_content)).read() + if encoding == "deflate": + try: + content = zlib.decompress(content, zlib.MAX_WBITS) + except (IOError, zlib.error): + content = zlib.decompress(content, -zlib.MAX_WBITS) + response["content-length"] = str(len(content)) + # Record the historical presence of the encoding in a way the won't interfere. + response["-content-encoding"] = response["content-encoding"] + del response["content-encoding"] + except (IOError, zlib.error): + content = "" + raise FailedToDecompressContent( + _("Content purported to be compressed with %s but failed to decompress.") % response.get("content-encoding"), + response, + content, + ) + return content + + +def _bind_write_headers(msg): + def _write_headers(self): + # Self refers to the Generator object. + for h, v in msg.items(): + print("%s:" % h, end=" ", file=self._fp) + if isinstance(v, header.Header): + print(v.encode(maxlinelen=self._maxheaderlen), file=self._fp) + else: + # email.Header got lots of smarts, so use it. + headers = header.Header(v, maxlinelen=self._maxheaderlen, charset="utf-8", header_name=h) + print(headers.encode(), file=self._fp) + # A blank line always separates headers from body. + print(file=self._fp) + + return _write_headers + + +def _updateCache(request_headers, response_headers, content, cache, cachekey): + if cachekey: + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + if "no-store" in cc or "no-store" in cc_response: + cache.delete(cachekey) + else: + info = email.message.Message() + for key, value in response_headers.items(): + if key not in ["status", "content-encoding", "transfer-encoding"]: + info[key] = value + + # Add annotations to the cache to indicate what headers + # are variant for this request. + vary = response_headers.get("vary", None) + if vary: + vary_headers = vary.lower().replace(" ", "").split(",") + for header in vary_headers: + key = "-varied-%s" % header + try: + info[key] = request_headers[header] + except KeyError: + pass + + status = response_headers.status + if status == 304: + status = 200 + + status_header = "status: %d\r\n" % status + + try: + header_str = info.as_string() + except UnicodeEncodeError: + setattr(info, "_write_headers", _bind_write_headers(info)) + header_str = info.as_string() + + header_str = re.sub("\r(?!\n)|(? 0: + service = "cl" + # No point in guessing Base or Spreadsheet + # elif request_uri.find("spreadsheets") > 0: + # service = "wise" + + auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers["user-agent"],) + resp, content = self.http.request( + "https://www.google.com/accounts/ClientLogin", + method="POST", + body=urlencode(auth), + headers={"Content-Type": "application/x-www-form-urlencoded"}, + ) + lines = content.split("\n") + d = dict([tuple(line.split("=", 1)) for line in lines if line]) + if resp.status == 403: + self.Auth = "" + else: + self.Auth = d["Auth"] + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers["authorization"] = "GoogleLogin Auth=" + self.Auth + + +AUTH_SCHEME_CLASSES = { + "basic": BasicAuthentication, + "wsse": WsseAuthentication, + "digest": DigestAuthentication, + "hmacdigest": HmacDigestAuthentication, + "googlelogin": GoogleLoginAuthentication, +} + +AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] + + +class FileCache(object): + """Uses a local directory as a store for cached files. + Not really safe to use if multiple threads or processes are going to + be running on the same cache. + """ + + def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior + self.cache = cache + self.safe = safe + if not os.path.exists(cache): + os.makedirs(self.cache) + + def get(self, key): + retval = None + cacheFullPath = os.path.join(self.cache, self.safe(key)) + try: + f = open(cacheFullPath, "rb") + retval = f.read() + f.close() + except IOError: + pass + return retval + + def set(self, key, value): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + f = open(cacheFullPath, "wb") + f.write(value) + f.close() + + def delete(self, key): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + if os.path.exists(cacheFullPath): + os.remove(cacheFullPath) + + +class Credentials(object): + def __init__(self): + self.credentials = [] + + def add(self, name, password, domain=""): + self.credentials.append((domain.lower(), name, password)) + + def clear(self): + self.credentials = [] + + def iter(self, domain): + for (cdomain, name, password) in self.credentials: + if cdomain == "" or domain == cdomain: + yield (name, password) + + +class KeyCerts(Credentials): + """Identical to Credentials except that + name/password are mapped to key/cert.""" + + def add(self, key, cert, domain, password): + self.credentials.append((domain.lower(), key, cert, password)) + + def iter(self, domain): + for (cdomain, key, cert, password) in self.credentials: + if cdomain == "" or domain == cdomain: + yield (key, cert, password) + + +class AllHosts(object): + pass + + +class ProxyInfo(object): + """Collect information required to use a proxy.""" + + bypass_hosts = () + + def __init__( + self, proxy_type, proxy_host, proxy_port, proxy_rdns=True, proxy_user=None, proxy_pass=None, proxy_headers=None, + ): + """Args: + + proxy_type: The type of proxy server. This must be set to one of + socks.PROXY_TYPE_XXX constants. For example: p = + ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', + proxy_port=8000) + proxy_host: The hostname or IP address of the proxy server. + proxy_port: The port that the proxy server is running on. + proxy_rdns: If True (default), DNS queries will not be performed + locally, and instead, handed to the proxy to resolve. This is useful + if the network does not allow resolution of non-local names. In + httplib2 0.9 and earlier, this defaulted to False. + proxy_user: The username used to authenticate with the proxy server. + proxy_pass: The password used to authenticate with the proxy server. + proxy_headers: Additional or modified headers for the proxy connect + request. + """ + if isinstance(proxy_user, bytes): + proxy_user = proxy_user.decode() + if isinstance(proxy_pass, bytes): + proxy_pass = proxy_pass.decode() + ( + self.proxy_type, + self.proxy_host, + self.proxy_port, + self.proxy_rdns, + self.proxy_user, + self.proxy_pass, + self.proxy_headers, + ) = ( + proxy_type, + proxy_host, + proxy_port, + proxy_rdns, + proxy_user, + proxy_pass, + proxy_headers, + ) + + def astuple(self): + return ( + self.proxy_type, + self.proxy_host, + self.proxy_port, + self.proxy_rdns, + self.proxy_user, + self.proxy_pass, + self.proxy_headers, + ) + + def isgood(self): + return socks and (self.proxy_host != None) and (self.proxy_port != None) + + def applies_to(self, hostname): + return not self.bypass_host(hostname) + + def bypass_host(self, hostname): + """Has this host been excluded from the proxy config""" + if self.bypass_hosts is AllHosts: + return True + + hostname = "." + hostname.lstrip(".") + for skip_name in self.bypass_hosts: + # *.suffix + if skip_name.startswith(".") and hostname.endswith(skip_name): + return True + # exact match + if hostname == "." + skip_name: + return True + return False + + def __repr__(self): + return ( + "" + ).format(p=self) + + +def proxy_info_from_environment(method="http"): + """Read proxy info from the environment variables. + """ + if method not in ("http", "https"): + return + + env_var = method + "_proxy" + url = os.environ.get(env_var, os.environ.get(env_var.upper())) + if not url: + return + return proxy_info_from_url(url, method, noproxy=None) + + +def proxy_info_from_url(url, method="http", noproxy=None): + """Construct a ProxyInfo from a URL (such as http_proxy env var) + """ + url = urllib.parse.urlparse(url) + + proxy_type = 3 # socks.PROXY_TYPE_HTTP + pi = ProxyInfo( + proxy_type=proxy_type, + proxy_host=url.hostname, + proxy_port=url.port or dict(https=443, http=80)[method], + proxy_user=url.username or None, + proxy_pass=url.password or None, + proxy_headers=None, + ) + + bypass_hosts = [] + # If not given an explicit noproxy value, respect values in env vars. + if noproxy is None: + noproxy = os.environ.get("no_proxy", os.environ.get("NO_PROXY", "")) + # Special case: A single '*' character means all hosts should be bypassed. + if noproxy == "*": + bypass_hosts = AllHosts + elif noproxy.strip(): + bypass_hosts = noproxy.split(",") + bypass_hosts = tuple(filter(bool, bypass_hosts)) # To exclude empty string. + + pi.bypass_hosts = bypass_hosts + return pi + + +class HTTPConnectionWithTimeout(http.client.HTTPConnection): + """HTTPConnection subclass that supports timeouts + + HTTPConnection subclass that supports timeouts + + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ + + def __init__(self, host, port=None, timeout=None, proxy_info=None): + http.client.HTTPConnection.__init__(self, host, port=port, timeout=timeout) + + self.proxy_info = proxy_info + if proxy_info and not isinstance(proxy_info, ProxyInfo): + self.proxy_info = proxy_info("http") + + def connect(self): + """Connect to the host and port specified in __init__.""" + if self.proxy_info and socks is None: + raise ProxiesUnavailableError("Proxy support missing but proxy use was requested!") + if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host): + use_proxy = True + ( + proxy_type, + proxy_host, + proxy_port, + proxy_rdns, + proxy_user, + proxy_pass, + proxy_headers, + ) = self.proxy_info.astuple() + + host = proxy_host + port = proxy_port + else: + use_proxy = False + + host = self.host + port = self.port + proxy_type = None + + socket_err = None + + for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + try: + if use_proxy: + self.sock = socks.socksocket(af, socktype, proto) + self.sock.setproxy( + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, + ) + else: + self.sock = socket.socket(af, socktype, proto) + self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if has_timeout(self.timeout): + self.sock.settimeout(self.timeout) + if self.debuglevel > 0: + print("connect: ({0}, {1}) ************".format(self.host, self.port)) + if use_proxy: + print( + "proxy: {0} ************".format( + str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) + ) + ) + + self.sock.connect((self.host, self.port) + sa[2:]) + except socket.error as e: + socket_err = e + if self.debuglevel > 0: + print("connect fail: ({0}, {1})".format(self.host, self.port)) + if use_proxy: + print( + "proxy: {0}".format( + str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) + ) + ) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket_err + + +class HTTPSConnectionWithTimeout(http.client.HTTPSConnection): + """This class allows communication via SSL. + + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ + + def __init__( + self, + host, + port=None, + key_file=None, + cert_file=None, + timeout=None, + proxy_info=None, + ca_certs=None, + disable_ssl_certificate_validation=False, + tls_maximum_version=None, + tls_minimum_version=None, + key_password=None, + ): + + self.disable_ssl_certificate_validation = disable_ssl_certificate_validation + self.ca_certs = ca_certs if ca_certs else CA_CERTS + + self.proxy_info = proxy_info + if proxy_info and not isinstance(proxy_info, ProxyInfo): + self.proxy_info = proxy_info("https") + + context = _build_ssl_context( + self.disable_ssl_certificate_validation, + self.ca_certs, + cert_file, + key_file, + maximum_version=tls_maximum_version, + minimum_version=tls_minimum_version, + key_password=key_password, + ) + super(HTTPSConnectionWithTimeout, self).__init__( + host, port=port, timeout=timeout, context=context, + ) + self.key_file = key_file + self.cert_file = cert_file + self.key_password = key_password + + def connect(self): + """Connect to a host on a given (SSL) port.""" + if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host): + use_proxy = True + ( + proxy_type, + proxy_host, + proxy_port, + proxy_rdns, + proxy_user, + proxy_pass, + proxy_headers, + ) = self.proxy_info.astuple() + + host = proxy_host + port = proxy_port + else: + use_proxy = False + + host = self.host + port = self.port + proxy_type = None + proxy_headers = None + + socket_err = None + + address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM) + for family, socktype, proto, canonname, sockaddr in address_info: + try: + if use_proxy: + sock = socks.socksocket(family, socktype, proto) + + sock.setproxy( + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, + ) + else: + sock = socket.socket(family, socktype, proto) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if has_timeout(self.timeout): + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + + self.sock = self._context.wrap_socket(sock, server_hostname=self.host) + + # Python 3.3 compatibility: emulate the check_hostname behavior + if not hasattr(self._context, "check_hostname") and not self.disable_ssl_certificate_validation: + try: + ssl.match_hostname(self.sock.getpeercert(), self.host) + except Exception: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + if self.debuglevel > 0: + print("connect: ({0}, {1})".format(self.host, self.port)) + if use_proxy: + print( + "proxy: {0}".format( + str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) + ) + ) + except (ssl.SSLError, ssl.CertificateError) as e: + if sock: + sock.close() + if self.sock: + self.sock.close() + self.sock = None + raise + except (socket.timeout, socket.gaierror): + raise + except socket.error as e: + socket_err = e + if self.debuglevel > 0: + print("connect fail: ({0}, {1})".format(self.host, self.port)) + if use_proxy: + print( + "proxy: {0}".format( + str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) + ) + ) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket_err + + +SCHEME_TO_CONNECTION = { + "http": HTTPConnectionWithTimeout, + "https": HTTPSConnectionWithTimeout, +} + + +class Http(object): + """An HTTP client that handles: + + - all methods + - caching + - ETags + - compression, + - HTTPS + - Basic + - Digest + - WSSE + + and more. + """ + + def __init__( + self, + cache=None, + timeout=None, + proxy_info=proxy_info_from_environment, + ca_certs=None, + disable_ssl_certificate_validation=False, + tls_maximum_version=None, + tls_minimum_version=None, + ): + """If 'cache' is a string then it is used as a directory name for + a disk cache. Otherwise it must be an object that supports the + same interface as FileCache. + + All timeouts are in seconds. If None is passed for timeout + then Python's default timeout for sockets will be used. See + for example the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + + `proxy_info` may be: + - a callable that takes the http scheme ('http' or 'https') and + returns a ProxyInfo instance per request. By default, uses + proxy_info_from_environment. + - a ProxyInfo instance (static proxy config). + - None (proxy disabled). + + ca_certs is the path of a file containing root CA certificates for SSL + server certificate validation. By default, a CA cert file bundled with + httplib2 is used. + + If disable_ssl_certificate_validation is true, SSL cert validation will + not be performed. + + tls_maximum_version / tls_minimum_version require Python 3.7+ / + OpenSSL 1.1.0g+. A value of "TLSv1_3" requires OpenSSL 1.1.1+. + """ + self.proxy_info = proxy_info + self.ca_certs = ca_certs + self.disable_ssl_certificate_validation = disable_ssl_certificate_validation + self.tls_maximum_version = tls_maximum_version + self.tls_minimum_version = tls_minimum_version + # Map domain name to an httplib connection + self.connections = {} + # The location of the cache, for now a directory + # where cached responses are held. + if cache and isinstance(cache, str): + self.cache = FileCache(cache) + else: + self.cache = cache + + # Name/password + self.credentials = Credentials() + + # Key/cert + self.certificates = KeyCerts() + + # authorization objects + self.authorizations = [] + + # If set to False then no redirects are followed, even safe ones. + self.follow_redirects = True + + self.redirect_codes = REDIRECT_CODES + + # Which HTTP methods do we apply optimistic concurrency to, i.e. + # which methods get an "if-match:" etag header added to them. + self.optimistic_concurrency_methods = ["PUT", "PATCH"] + + self.safe_methods = list(SAFE_METHODS) + + # If 'follow_redirects' is True, and this is set to True then + # all redirecs are followed, including unsafe ones. + self.follow_all_redirects = False + + self.ignore_etag = False + + self.force_exception_to_status_code = False + + self.timeout = timeout + + # Keep Authorization: headers on a redirect. + self.forward_authorization_headers = False + + def close(self): + """Close persistent connections, clear sensitive data. + Not thread-safe, requires external synchronization against concurrent requests. + """ + existing, self.connections = self.connections, {} + for _, c in existing.items(): + c.close() + self.certificates.clear() + self.clear_credentials() + + def __getstate__(self): + state_dict = copy.copy(self.__dict__) + # In case request is augmented by some foreign object such as + # credentials which handle auth + if "request" in state_dict: + del state_dict["request"] + if "connections" in state_dict: + del state_dict["connections"] + return state_dict + + def __setstate__(self, state): + self.__dict__.update(state) + self.connections = {} + + def _auth_from_challenge(self, host, request_uri, headers, response, content): + """A generator that creates Authorization objects + that can be applied to requests. + """ + challenges = auth._parse_www_authenticate(response, "www-authenticate") + for cred in self.credentials.iter(host): + for scheme in AUTH_SCHEME_ORDER: + if scheme in challenges: + yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) + + def add_credentials(self, name, password, domain=""): + """Add a name and password that will be used + any time a request requires authentication.""" + self.credentials.add(name, password, domain) + + def add_certificate(self, key, cert, domain, password=None): + """Add a key and cert that will be used + any time a request requires authentication.""" + self.certificates.add(key, cert, domain, password) + + def clear_credentials(self): + """Remove all the names and passwords + that are used for authentication""" + self.credentials.clear() + self.authorizations = [] + + def _conn_request(self, conn, request_uri, method, body, headers): + i = 0 + seen_bad_status_line = False + while i < RETRIES: + i += 1 + try: + if conn.sock is None: + conn.connect() + conn.request(method, request_uri, body, headers) + except socket.timeout: + conn.close() + raise + except socket.gaierror: + conn.close() + raise ServerNotFoundError("Unable to find the server at %s" % conn.host) + except socket.error as e: + errno_ = _errno_from_exception(e) + if errno_ in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES: + continue # retry on potentially transient errors + raise + except http.client.HTTPException: + if conn.sock is None: + if i < RETRIES - 1: + conn.close() + conn.connect() + continue + else: + conn.close() + raise + if i < RETRIES - 1: + conn.close() + conn.connect() + continue + # Just because the server closed the connection doesn't apparently mean + # that the server didn't send a response. + pass + try: + response = conn.getresponse() + except (http.client.BadStatusLine, http.client.ResponseNotReady): + # If we get a BadStatusLine on the first try then that means + # the connection just went stale, so retry regardless of the + # number of RETRIES set. + if not seen_bad_status_line and i == 1: + i = 0 + seen_bad_status_line = True + conn.close() + conn.connect() + continue + else: + conn.close() + raise + except socket.timeout: + raise + except (socket.error, http.client.HTTPException): + conn.close() + if i == 0: + conn.close() + conn.connect() + continue + else: + raise + else: + content = b"" + if method == "HEAD": + conn.close() + else: + content = response.read() + response = Response(response) + if method != "HEAD": + content = _decompressContent(response, content) + + break + return (response, content) + + def _request( + self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey, + ): + """Do the actual request using the connection object + and also follow one level of redirects if necessary""" + + auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] + auth = auths and sorted(auths)[0][1] or None + if auth: + auth.request(method, request_uri, headers, body) + + (response, content) = self._conn_request(conn, request_uri, method, body, headers) + + if auth: + if auth.response(response, body): + auth.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers) + response._stale_digest = 1 + + if response.status == 401: + for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): + authorization.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers) + if response.status != 401: + self.authorizations.append(authorization) + authorization.response(response, body) + break + + if self.follow_all_redirects or method in self.safe_methods or response.status in (303, 308): + if self.follow_redirects and response.status in self.redirect_codes: + # Pick out the location header and basically start from the beginning + # remembering first to strip the ETag header and decrement our 'depth' + if redirections: + if "location" not in response and response.status != 300: + raise RedirectMissingLocation( + _("Redirected but the response is missing a Location: header."), response, content, + ) + # Fix-up relative redirects (which violate an RFC 2616 MUST) + if "location" in response: + location = response["location"] + (scheme, authority, path, query, fragment) = parse_uri(location) + if authority == None: + response["location"] = urllib.parse.urljoin(absolute_uri, location) + if response.status == 308 or (response.status == 301 and (method in self.safe_methods)): + response["-x-permanent-redirect-url"] = response["location"] + if "content-location" not in response: + response["content-location"] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + if "if-none-match" in headers: + del headers["if-none-match"] + if "if-modified-since" in headers: + del headers["if-modified-since"] + if "authorization" in headers and not self.forward_authorization_headers: + del headers["authorization"] + if "location" in response: + location = response["location"] + old_response = copy.deepcopy(response) + if "content-location" not in old_response: + old_response["content-location"] = absolute_uri + redirect_method = method + if response.status in [302, 303]: + redirect_method = "GET" + body = None + (response, content) = self.request( + location, method=redirect_method, body=body, headers=headers, redirections=redirections - 1, + ) + response.previous = old_response + else: + raise RedirectLimit( + "Redirected more times than redirection_limit allows.", response, content, + ) + elif response.status in [200, 203] and method in self.safe_methods: + # Don't cache 206's since we aren't going to handle byte range requests + if "content-location" not in response: + response["content-location"] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + + return (response, content) + + def _normalize_headers(self, headers): + return _normalize_headers(headers) + + # Need to catch and rebrand some exceptions + # Then need to optionally turn all exceptions into status codes + # including all socket.* and httplib.* exceptions. + + def request( + self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None, + ): + """ Performs a single HTTP request. +The 'uri' is the URI of the HTTP resource and can begin +with either 'http' or 'https'. The value of 'uri' must be an absolute URI. + +The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. +There is no restriction on the methods allowed. + +The 'body' is the entity body to be sent with the request. It is a string +object. + +Any extra headers that are to be sent with the request should be provided in the +'headers' dictionary. + +The maximum number of redirect to follow before raising an +exception is 'redirections. The default is 5. + +The return value is a tuple of (response, content), the first +being and instance of the 'Response' class, the second being +a string that contains the response entity body. + """ + conn_key = "" + + try: + if headers is None: + headers = {} + else: + headers = self._normalize_headers(headers) + + if "user-agent" not in headers: + headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__ + + uri = iri2uri(uri) + # Prevent CWE-75 space injection to manipulate request via part of uri. + # Prevent CWE-93 CRLF injection to modify headers via part of uri. + uri = uri.replace(" ", "%20").replace("\r", "%0D").replace("\n", "%0A") + + (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) + + conn_key = scheme + ":" + authority + conn = self.connections.get(conn_key) + if conn is None: + if not connection_type: + connection_type = SCHEME_TO_CONNECTION[scheme] + certs = list(self.certificates.iter(authority)) + if issubclass(connection_type, HTTPSConnectionWithTimeout): + if certs: + conn = self.connections[conn_key] = connection_type( + authority, + key_file=certs[0][0], + cert_file=certs[0][1], + timeout=self.timeout, + proxy_info=self.proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation=self.disable_ssl_certificate_validation, + tls_maximum_version=self.tls_maximum_version, + tls_minimum_version=self.tls_minimum_version, + key_password=certs[0][2], + ) + else: + conn = self.connections[conn_key] = connection_type( + authority, + timeout=self.timeout, + proxy_info=self.proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation=self.disable_ssl_certificate_validation, + tls_maximum_version=self.tls_maximum_version, + tls_minimum_version=self.tls_minimum_version, + ) + else: + conn = self.connections[conn_key] = connection_type( + authority, timeout=self.timeout, proxy_info=self.proxy_info + ) + conn.set_debuglevel(debuglevel) + + if "range" not in headers and "accept-encoding" not in headers: + headers["accept-encoding"] = "gzip, deflate" + + info = email.message.Message() + cachekey = None + cached_value = None + if self.cache: + cachekey = defrag_uri + cached_value = self.cache.get(cachekey) + if cached_value: + try: + info, content = cached_value.split(b"\r\n\r\n", 1) + info = email.message_from_bytes(info) + for k, v in info.items(): + if v.startswith("=?") and v.endswith("?="): + info.replace_header(k, str(*email.header.decode_header(v)[0])) + except (IndexError, ValueError): + self.cache.delete(cachekey) + cachekey = None + cached_value = None + + if ( + method in self.optimistic_concurrency_methods + and self.cache + and "etag" in info + and not self.ignore_etag + and "if-match" not in headers + ): + # http://www.w3.org/1999/04/Editing/ + headers["if-match"] = info["etag"] + + # https://tools.ietf.org/html/rfc7234 + # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location + # when a non-error status code is received in response to an unsafe request method. + if self.cache and cachekey and method not in self.safe_methods: + self.cache.delete(cachekey) + + # Check the vary header in the cache to see if this request + # matches what varies in the cache. + if method in self.safe_methods and "vary" in info: + vary = info["vary"] + vary_headers = vary.lower().replace(" ", "").split(",") + for header in vary_headers: + key = "-varied-%s" % header + value = info[key] + if headers.get(header, None) != value: + cached_value = None + break + + if ( + self.cache + and cached_value + and (method in self.safe_methods or info["status"] == "308") + and "range" not in headers + ): + redirect_method = method + if info["status"] not in ("307", "308"): + redirect_method = "GET" + if "-x-permanent-redirect-url" in info: + # Should cached permanent redirects be counted in our redirection count? For now, yes. + if redirections <= 0: + raise RedirectLimit( + "Redirected more times than redirection_limit allows.", {}, "", + ) + (response, new_content) = self.request( + info["-x-permanent-redirect-url"], + method=redirect_method, + headers=headers, + redirections=redirections - 1, + ) + response.previous = Response(info) + response.previous.fromcache = True + else: + # Determine our course of action: + # Is the cached entry fresh or stale? + # Has the client requested a non-cached response? + # + # There seems to be three possible answers: + # 1. [FRESH] Return the cache entry w/o doing a GET + # 2. [STALE] Do the GET (but add in cache validators if available) + # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request + entry_disposition = _entry_disposition(info, headers) + + if entry_disposition == "FRESH": + response = Response(info) + response.fromcache = True + return (response, content) + + if entry_disposition == "STALE": + if "etag" in info and not self.ignore_etag and not "if-none-match" in headers: + headers["if-none-match"] = info["etag"] + if "last-modified" in info and not "last-modified" in headers: + headers["if-modified-since"] = info["last-modified"] + elif entry_disposition == "TRANSPARENT": + pass + + (response, new_content) = self._request( + conn, authority, uri, request_uri, method, body, headers, redirections, cachekey, + ) + + if response.status == 304 and method == "GET": + # Rewrite the cache entry with the new end-to-end headers + # Take all headers that are in response + # and overwrite their values in info. + # unless they are hop-by-hop, or are listed in the connection header. + + for key in _get_end2end_headers(response): + info[key] = response[key] + merged_response = Response(info) + if hasattr(response, "_stale_digest"): + merged_response._stale_digest = response._stale_digest + _updateCache(headers, merged_response, content, self.cache, cachekey) + response = merged_response + response.status = 200 + response.fromcache = True + + elif response.status == 200: + content = new_content + else: + self.cache.delete(cachekey) + content = new_content + else: + cc = _parse_cache_control(headers) + if "only-if-cached" in cc: + info["status"] = "504" + response = Response(info) + content = b"" + else: + (response, content) = self._request( + conn, authority, uri, request_uri, method, body, headers, redirections, cachekey, + ) + except Exception as e: + is_timeout = isinstance(e, socket.timeout) + if is_timeout: + conn = self.connections.pop(conn_key, None) + if conn: + conn.close() + + if self.force_exception_to_status_code: + if isinstance(e, HttpLib2ErrorWithResponse): + response = e.response + content = e.content + response.status = 500 + response.reason = str(e) + elif isinstance(e, socket.timeout): + content = b"Request Timeout" + response = Response({"content-type": "text/plain", "status": "408", "content-length": len(content),}) + response.reason = "Request Timeout" + else: + content = str(e).encode("utf-8") + response = Response({"content-type": "text/plain", "status": "400", "content-length": len(content),}) + response.reason = "Bad Request" + else: + raise + + return (response, content) + + +class Response(dict): + """An object more like email.message than httplib.HTTPResponse.""" + + """Is this response from our local cache""" + fromcache = False + """HTTP protocol version used by server. + + 10 for HTTP/1.0, 11 for HTTP/1.1. + """ + version = 11 + + "Status code returned by server. " + status = 200 + """Reason phrase returned by server.""" + reason = "Ok" + + previous = None + + def __init__(self, info): + # info is either an email.message or + # an httplib.HTTPResponse object. + if isinstance(info, http.client.HTTPResponse): + for key, value in info.getheaders(): + key = key.lower() + prev = self.get(key) + if prev is not None: + value = ", ".join((prev, value)) + self[key] = value + self.status = info.status + self["status"] = str(self.status) + self.reason = info.reason + self.version = info.version + elif isinstance(info, email.message.Message): + for key, value in list(info.items()): + self[key.lower()] = value + self.status = int(self["status"]) + else: + for key, value in info.items(): + self[key.lower()] = value + self.status = int(self.get("status", self.status)) + + def __getattr__(self, name): + if name == "dict": + return self + else: + raise AttributeError(name) diff --git a/shotgun_api3/lib/httplib2/auth.py b/shotgun_api3/lib/httplib2/auth.py index 53f427be1..fd75e61bd 100644 --- a/shotgun_api3/lib/httplib2/auth.py +++ b/shotgun_api3/lib/httplib2/auth.py @@ -1,7 +1,7 @@ import base64 import re -from ... import pyparsing as pp +from .. import pyparsing as pp from .error import * diff --git a/shotgun_api3/lib/httplib2/python2/__init__.py b/shotgun_api3/lib/httplib2/python2/__init__.py deleted file mode 100644 index cbd8f382e..000000000 --- a/shotgun_api3/lib/httplib2/python2/__init__.py +++ /dev/null @@ -1,1993 +0,0 @@ -"""Small, fast HTTP client library for Python. - -Features persistent connections, cache, and Google App Engine Standard -Environment support. -""" - -from __future__ import print_function - -__author__ = "Joe Gregorio (joe@bitworking.org)" -__copyright__ = "Copyright 2006, Joe Gregorio" -__contributors__ = [ - "Thomas Broyer (t.broyer@ltgt.net)", - "James Antill", - "Xavier Verges Farrero", - "Jonathan Feinberg", - "Blair Zajac", - "Sam Ruby", - "Louis Nyffenegger", - "Alex Yu", - "Lai Han", -] -__license__ = "MIT" -__version__ = "0.22.0" - -import base64 -import calendar -import copy -import email -import email.FeedParser -import email.Message -import email.Utils -import errno -import gzip -import httplib -import os -import random -import re -import StringIO -import sys -import time -import urllib -import urlparse -import zlib - -try: - from hashlib import sha1 as _sha, md5 as _md5 -except ImportError: - # prior to Python 2.5, these were separate modules - import sha - import md5 - - _sha = sha.new - _md5 = md5.new -import hmac -from gettext import gettext as _ -import socket - -try: - from . import socks -except ImportError: - try: - import socks - except (ImportError, AttributeError): - socks = None -from . import auth -from .error import * - -# Build the appropriate socket wrapper for ssl -ssl = None -ssl_SSLError = None -ssl_CertificateError = None -try: - import ssl # python 2.6 -except ImportError: - pass -if ssl is not None: - ssl_SSLError = getattr(ssl, "SSLError", None) - ssl_CertificateError = getattr(ssl, "CertificateError", None) - - -def _ssl_wrap_socket(sock, key_file, cert_file, disable_validation, ca_certs, ssl_version, hostname, key_password): - if disable_validation: - cert_reqs = ssl.CERT_NONE - else: - cert_reqs = ssl.CERT_REQUIRED - if ssl_version is None: - ssl_version = ssl.PROTOCOL_SSLv23 - - if hasattr(ssl, "SSLContext"): # Python 2.7.9 - context = ssl.SSLContext(ssl_version) - context.verify_mode = cert_reqs - context.check_hostname = cert_reqs != ssl.CERT_NONE - if cert_file: - if key_password: - context.load_cert_chain(cert_file, key_file, key_password) - else: - context.load_cert_chain(cert_file, key_file) - if ca_certs: - context.load_verify_locations(ca_certs) - return context.wrap_socket(sock, server_hostname=hostname) - else: - if key_password: - raise NotSupportedOnThisPlatform("Certificate with password is not supported.") - return ssl.wrap_socket( - sock, keyfile=key_file, certfile=cert_file, cert_reqs=cert_reqs, ca_certs=ca_certs, ssl_version=ssl_version, - ) - - -def _ssl_wrap_socket_unsupported( - sock, key_file, cert_file, disable_validation, ca_certs, ssl_version, hostname, key_password -): - if not disable_validation: - raise CertificateValidationUnsupported( - "SSL certificate validation is not supported without " - "the ssl module installed. To avoid this error, install " - "the ssl module, or explicity disable validation." - ) - if key_password: - raise NotSupportedOnThisPlatform("Certificate with password is not supported.") - ssl_sock = socket.ssl(sock, key_file, cert_file) - return httplib.FakeSocket(sock, ssl_sock) - - -if ssl is None: - _ssl_wrap_socket = _ssl_wrap_socket_unsupported - -if sys.version_info >= (2, 3): - from .iri2uri import iri2uri -else: - - def iri2uri(uri): - return uri - - -def has_timeout(timeout): # python 2.6 - if hasattr(socket, "_GLOBAL_DEFAULT_TIMEOUT"): - return timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT - return timeout is not None - - -__all__ = [ - "Http", - "Response", - "ProxyInfo", - "HttpLib2Error", - "RedirectMissingLocation", - "RedirectLimit", - "FailedToDecompressContent", - "UnimplementedDigestAuthOptionError", - "UnimplementedHmacDigestAuthOptionError", - "debuglevel", - "ProxiesUnavailableError", -] - -# The httplib debug level, set to a non-zero value to get debug output -debuglevel = 0 - -# A request will be tried 'RETRIES' times if it fails at the socket/connection level. -RETRIES = 2 - -# Python 2.3 support -if sys.version_info < (2, 4): - - def sorted(seq): - seq.sort() - return seq - - -# Python 2.3 support -def HTTPResponse__getheaders(self): - """Return list of (header, value) tuples.""" - if self.msg is None: - raise httplib.ResponseNotReady() - return self.msg.items() - - -if not hasattr(httplib.HTTPResponse, "getheaders"): - httplib.HTTPResponse.getheaders = HTTPResponse__getheaders - - -# All exceptions raised here derive from HttpLib2Error -class HttpLib2Error(Exception): - pass - - -# Some exceptions can be caught and optionally -# be turned back into responses. -class HttpLib2ErrorWithResponse(HttpLib2Error): - def __init__(self, desc, response, content): - self.response = response - self.content = content - HttpLib2Error.__init__(self, desc) - - -class RedirectMissingLocation(HttpLib2ErrorWithResponse): - pass - - -class RedirectLimit(HttpLib2ErrorWithResponse): - pass - - -class FailedToDecompressContent(HttpLib2ErrorWithResponse): - pass - - -class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): - pass - - -class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): - pass - - -class MalformedHeader(HttpLib2Error): - pass - - -class RelativeURIError(HttpLib2Error): - pass - - -class ServerNotFoundError(HttpLib2Error): - pass - - -class ProxiesUnavailableError(HttpLib2Error): - pass - - -class CertificateValidationUnsupported(HttpLib2Error): - pass - - -class SSLHandshakeError(HttpLib2Error): - pass - - -class NotSupportedOnThisPlatform(HttpLib2Error): - pass - - -class CertificateHostnameMismatch(SSLHandshakeError): - def __init__(self, desc, host, cert): - HttpLib2Error.__init__(self, desc) - self.host = host - self.cert = cert - - -class NotRunningAppEngineEnvironment(HttpLib2Error): - pass - - -# Open Items: -# ----------- -# Proxy support - -# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) - -# Pluggable cache storage (supports storing the cache in -# flat files by default. We need a plug-in architecture -# that can support Berkeley DB and Squid) - -# == Known Issues == -# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. -# Does not handle Cache-Control: max-stale -# Does not use Age: headers when calculating cache freshness. - -# The number of redirections to follow before giving up. -# Note that only GET redirects are automatically followed. -# Will also honor 301 requests by saving that info and never -# requesting that URI again. -DEFAULT_MAX_REDIRECTS = 5 - -from . import certs - -CA_CERTS = certs.where() - -# Which headers are hop-by-hop headers by default -HOP_BY_HOP = [ - "connection", - "keep-alive", - "proxy-authenticate", - "proxy-authorization", - "te", - "trailers", - "transfer-encoding", - "upgrade", -] - -# https://tools.ietf.org/html/rfc7231#section-8.1.3 -SAFE_METHODS = ("GET", "HEAD") # TODO add "OPTIONS", "TRACE" - -# To change, assign to `Http().redirect_codes` -REDIRECT_CODES = frozenset((300, 301, 302, 303, 307, 308)) - - -def _get_end2end_headers(response): - hopbyhop = list(HOP_BY_HOP) - hopbyhop.extend([x.strip() for x in response.get("connection", "").split(",")]) - return [header for header in response.keys() if header not in hopbyhop] - - -URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") - - -def parse_uri(uri): - """Parses a URI using the regex given in Appendix B of RFC 3986. - - (scheme, authority, path, query, fragment) = parse_uri(uri) - """ - groups = URI.match(uri).groups() - return (groups[1], groups[3], groups[4], groups[6], groups[8]) - - -def urlnorm(uri): - (scheme, authority, path, query, fragment) = parse_uri(uri) - if not scheme or not authority: - raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) - authority = authority.lower() - scheme = scheme.lower() - if not path: - path = "/" - # Could do syntax based normalization of the URI before - # computing the digest. See Section 6.2.2 of Std 66. - request_uri = query and "?".join([path, query]) or path - scheme = scheme.lower() - defrag_uri = scheme + "://" + authority + request_uri - return scheme, authority, request_uri, defrag_uri - - -# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) -re_url_scheme = re.compile(r"^\w+://") -re_unsafe = re.compile(r"[^\w\-_.()=!]+") - - -def safename(filename): - """Return a filename suitable for the cache. - Strips dangerous and common characters to create a filename we - can use to store the cache in. - """ - if isinstance(filename, str): - filename_bytes = filename - filename = filename.decode("utf-8") - else: - filename_bytes = filename.encode("utf-8") - filemd5 = _md5(filename_bytes).hexdigest() - filename = re_url_scheme.sub("", filename) - filename = re_unsafe.sub("", filename) - - # limit length of filename (vital for Windows) - # https://github.com/httplib2/httplib2/pull/74 - # C:\Users\ \AppData\Local\Temp\ , - # 9 chars + max 104 chars + 20 chars + x + 1 + 32 = max 259 chars - # Thus max safe filename x = 93 chars. Let it be 90 to make a round sum: - filename = filename[:90] - - return ",".join((filename, filemd5)) - - -NORMALIZE_SPACE = re.compile(r"(?:\r\n)?[ \t]+") - - -def _normalize_headers(headers): - return dict([(key.lower(), NORMALIZE_SPACE.sub(value, " ").strip()) for (key, value) in headers.iteritems()]) - - -def _parse_cache_control(headers): - retval = {} - if "cache-control" in headers: - parts = headers["cache-control"].split(",") - parts_with_args = [ - tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=") - ] - parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] - retval = dict(parts_with_args + parts_wo_args) - return retval - - -# Whether to use a strict mode to parse WWW-Authenticate headers -# Might lead to bad results in case of ill-formed header value, -# so disabled by default, falling back to relaxed parsing. -# Set to true to turn on, usefull for testing servers. -USE_WWW_AUTH_STRICT_PARSING = 0 - - -# TODO: add current time as _entry_disposition argument to avoid sleep in tests -def _entry_disposition(response_headers, request_headers): - """Determine freshness from the Date, Expires and Cache-Control headers. - - We don't handle the following: - - 1. Cache-Control: max-stale - 2. Age: headers are not used in the calculations. - - Not that this algorithm is simpler than you might think - because we are operating as a private (non-shared) cache. - This lets us ignore 's-maxage'. We can also ignore - 'proxy-invalidate' since we aren't a proxy. - We will never return a stale document as - fresh as a design decision, and thus the non-implementation - of 'max-stale'. This also lets us safely ignore 'must-revalidate' - since we operate as if every server has sent 'must-revalidate'. - Since we are private we get to ignore both 'public' and - 'private' parameters. We also ignore 'no-transform' since - we don't do any transformations. - The 'no-store' parameter is handled at a higher level. - So the only Cache-Control parameters we look at are: - - no-cache - only-if-cached - max-age - min-fresh - """ - - retval = "STALE" - cc = _parse_cache_control(request_headers) - cc_response = _parse_cache_control(response_headers) - - if "pragma" in request_headers and request_headers["pragma"].lower().find("no-cache") != -1: - retval = "TRANSPARENT" - if "cache-control" not in request_headers: - request_headers["cache-control"] = "no-cache" - elif "no-cache" in cc: - retval = "TRANSPARENT" - elif "no-cache" in cc_response: - retval = "STALE" - elif "only-if-cached" in cc: - retval = "FRESH" - elif "date" in response_headers: - date = calendar.timegm(email.Utils.parsedate_tz(response_headers["date"])) - now = time.time() - current_age = max(0, now - date) - if "max-age" in cc_response: - try: - freshness_lifetime = int(cc_response["max-age"]) - except ValueError: - freshness_lifetime = 0 - elif "expires" in response_headers: - expires = email.Utils.parsedate_tz(response_headers["expires"]) - if None == expires: - freshness_lifetime = 0 - else: - freshness_lifetime = max(0, calendar.timegm(expires) - date) - else: - freshness_lifetime = 0 - if "max-age" in cc: - try: - freshness_lifetime = int(cc["max-age"]) - except ValueError: - freshness_lifetime = 0 - if "min-fresh" in cc: - try: - min_fresh = int(cc["min-fresh"]) - except ValueError: - min_fresh = 0 - current_age += min_fresh - if freshness_lifetime > current_age: - retval = "FRESH" - return retval - - -def _decompressContent(response, new_content): - content = new_content - try: - encoding = response.get("content-encoding", None) - if encoding in ["gzip", "deflate"]: - if encoding == "gzip": - content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() - if encoding == "deflate": - try: - content = zlib.decompress(content, zlib.MAX_WBITS) - except (IOError, zlib.error): - content = zlib.decompress(content, -zlib.MAX_WBITS) - response["content-length"] = str(len(content)) - # Record the historical presence of the encoding in a way the won't interfere. - response["-content-encoding"] = response["content-encoding"] - del response["content-encoding"] - except (IOError, zlib.error): - content = "" - raise FailedToDecompressContent( - _("Content purported to be compressed with %s but failed to decompress.") % response.get("content-encoding"), - response, - content, - ) - return content - - -def _updateCache(request_headers, response_headers, content, cache, cachekey): - if cachekey: - cc = _parse_cache_control(request_headers) - cc_response = _parse_cache_control(response_headers) - if "no-store" in cc or "no-store" in cc_response: - cache.delete(cachekey) - else: - info = email.Message.Message() - for key, value in response_headers.iteritems(): - if key not in ["status", "content-encoding", "transfer-encoding"]: - info[key] = value - - # Add annotations to the cache to indicate what headers - # are variant for this request. - vary = response_headers.get("vary", None) - if vary: - vary_headers = vary.lower().replace(" ", "").split(",") - for header in vary_headers: - key = "-varied-%s" % header - try: - info[key] = request_headers[header] - except KeyError: - pass - - status = response_headers.status - if status == 304: - status = 200 - - status_header = "status: %d\r\n" % status - - header_str = info.as_string() - - header_str = re.sub("\r(?!\n)|(? 0: - service = "cl" - # No point in guessing Base or Spreadsheet - # elif request_uri.find("spreadsheets") > 0: - # service = "wise" - - auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers["user-agent"],) - resp, content = self.http.request( - "https://www.google.com/accounts/ClientLogin", - method="POST", - body=urlencode(auth), - headers={"Content-Type": "application/x-www-form-urlencoded"}, - ) - lines = content.split("\n") - d = dict([tuple(line.split("=", 1)) for line in lines if line]) - if resp.status == 403: - self.Auth = "" - else: - self.Auth = d["Auth"] - - def request(self, method, request_uri, headers, content): - """Modify the request headers to add the appropriate - Authorization header.""" - headers["authorization"] = "GoogleLogin Auth=" + self.Auth - - -AUTH_SCHEME_CLASSES = { - "basic": BasicAuthentication, - "wsse": WsseAuthentication, - "digest": DigestAuthentication, - "hmacdigest": HmacDigestAuthentication, - "googlelogin": GoogleLoginAuthentication, -} - -AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] - - -class FileCache(object): - """Uses a local directory as a store for cached files. - Not really safe to use if multiple threads or processes are going to - be running on the same cache. - """ - - def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior - self.cache = cache - self.safe = safe - if not os.path.exists(cache): - os.makedirs(self.cache) - - def get(self, key): - retval = None - cacheFullPath = os.path.join(self.cache, self.safe(key)) - try: - f = file(cacheFullPath, "rb") - retval = f.read() - f.close() - except IOError: - pass - return retval - - def set(self, key, value): - cacheFullPath = os.path.join(self.cache, self.safe(key)) - f = file(cacheFullPath, "wb") - f.write(value) - f.close() - - def delete(self, key): - cacheFullPath = os.path.join(self.cache, self.safe(key)) - if os.path.exists(cacheFullPath): - os.remove(cacheFullPath) - - -class Credentials(object): - def __init__(self): - self.credentials = [] - - def add(self, name, password, domain=""): - self.credentials.append((domain.lower(), name, password)) - - def clear(self): - self.credentials = [] - - def iter(self, domain): - for (cdomain, name, password) in self.credentials: - if cdomain == "" or domain == cdomain: - yield (name, password) - - -class KeyCerts(Credentials): - """Identical to Credentials except that - name/password are mapped to key/cert.""" - - def add(self, key, cert, domain, password): - self.credentials.append((domain.lower(), key, cert, password)) - - def iter(self, domain): - for (cdomain, key, cert, password) in self.credentials: - if cdomain == "" or domain == cdomain: - yield (key, cert, password) - - -class AllHosts(object): - pass - - -class ProxyInfo(object): - """Collect information required to use a proxy.""" - - bypass_hosts = () - - def __init__( - self, proxy_type, proxy_host, proxy_port, proxy_rdns=True, proxy_user=None, proxy_pass=None, proxy_headers=None, - ): - """Args: - - proxy_type: The type of proxy server. This must be set to one of - socks.PROXY_TYPE_XXX constants. For example: p = - ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', - proxy_port=8000) - proxy_host: The hostname or IP address of the proxy server. - proxy_port: The port that the proxy server is running on. - proxy_rdns: If True (default), DNS queries will not be performed - locally, and instead, handed to the proxy to resolve. This is useful - if the network does not allow resolution of non-local names. In - httplib2 0.9 and earlier, this defaulted to False. - proxy_user: The username used to authenticate with the proxy server. - proxy_pass: The password used to authenticate with the proxy server. - proxy_headers: Additional or modified headers for the proxy connect - request. - """ - self.proxy_type = proxy_type - self.proxy_host = proxy_host - self.proxy_port = proxy_port - self.proxy_rdns = proxy_rdns - self.proxy_user = proxy_user - self.proxy_pass = proxy_pass - self.proxy_headers = proxy_headers - - def astuple(self): - return ( - self.proxy_type, - self.proxy_host, - self.proxy_port, - self.proxy_rdns, - self.proxy_user, - self.proxy_pass, - self.proxy_headers, - ) - - def isgood(self): - return (self.proxy_host != None) and (self.proxy_port != None) - - def applies_to(self, hostname): - return not self.bypass_host(hostname) - - def bypass_host(self, hostname): - """Has this host been excluded from the proxy config""" - if self.bypass_hosts is AllHosts: - return True - - hostname = "." + hostname.lstrip(".") - for skip_name in self.bypass_hosts: - # *.suffix - if skip_name.startswith(".") and hostname.endswith(skip_name): - return True - # exact match - if hostname == "." + skip_name: - return True - return False - - def __repr__(self): - return ( - "" - ).format(p=self) - - -def proxy_info_from_environment(method="http"): - """Read proxy info from the environment variables. - """ - if method not in ["http", "https"]: - return - - env_var = method + "_proxy" - url = os.environ.get(env_var, os.environ.get(env_var.upper())) - if not url: - return - return proxy_info_from_url(url, method, None) - - -def proxy_info_from_url(url, method="http", noproxy=None): - """Construct a ProxyInfo from a URL (such as http_proxy env var) - """ - url = urlparse.urlparse(url) - - proxy_type = 3 # socks.PROXY_TYPE_HTTP - pi = ProxyInfo( - proxy_type=proxy_type, - proxy_host=url.hostname, - proxy_port=url.port or dict(https=443, http=80)[method], - proxy_user=url.username or None, - proxy_pass=url.password or None, - proxy_headers=None, - ) - - bypass_hosts = [] - # If not given an explicit noproxy value, respect values in env vars. - if noproxy is None: - noproxy = os.environ.get("no_proxy", os.environ.get("NO_PROXY", "")) - # Special case: A single '*' character means all hosts should be bypassed. - if noproxy == "*": - bypass_hosts = AllHosts - elif noproxy.strip(): - bypass_hosts = noproxy.split(",") - bypass_hosts = filter(bool, bypass_hosts) # To exclude empty string. - - pi.bypass_hosts = bypass_hosts - return pi - - -class HTTPConnectionWithTimeout(httplib.HTTPConnection): - """HTTPConnection subclass that supports timeouts - - All timeouts are in seconds. If None is passed for timeout then - Python's default timeout for sockets will be used. See for example - the docs of socket.setdefaulttimeout(): - http://docs.python.org/library/socket.html#socket.setdefaulttimeout - """ - - def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): - httplib.HTTPConnection.__init__(self, host, port, strict) - self.timeout = timeout - self.proxy_info = proxy_info - - def connect(self): - """Connect to the host and port specified in __init__.""" - # Mostly verbatim from httplib.py. - if self.proxy_info and socks is None: - raise ProxiesUnavailableError("Proxy support missing but proxy use was requested!") - if self.proxy_info and self.proxy_info.isgood(): - use_proxy = True - ( - proxy_type, - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, - proxy_headers, - ) = self.proxy_info.astuple() - - host = proxy_host - port = proxy_port - else: - use_proxy = False - - host = self.host - port = self.port - - socket_err = None - - for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): - af, socktype, proto, canonname, sa = res - try: - if use_proxy: - self.sock = socks.socksocket(af, socktype, proto) - self.sock.setproxy( - proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers, - ) - else: - self.sock = socket.socket(af, socktype, proto) - self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - # Different from httplib: support timeouts. - if has_timeout(self.timeout): - self.sock.settimeout(self.timeout) - # End of difference from httplib. - if self.debuglevel > 0: - print("connect: (%s, %s) ************" % (self.host, self.port)) - if use_proxy: - print( - "proxy: %s ************" - % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) - ) - if use_proxy: - self.sock.connect((self.host, self.port) + sa[2:]) - else: - self.sock.connect(sa) - except socket.error as e: - socket_err = e - if self.debuglevel > 0: - print("connect fail: (%s, %s)" % (self.host, self.port)) - if use_proxy: - print( - "proxy: %s" - % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) - ) - if self.sock: - self.sock.close() - self.sock = None - continue - break - if not self.sock: - raise socket_err or socket.error("getaddrinfo returns an empty list") - - -class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): - """This class allows communication via SSL. - - All timeouts are in seconds. If None is passed for timeout then - Python's default timeout for sockets will be used. See for example - the docs of socket.setdefaulttimeout(): - http://docs.python.org/library/socket.html#socket.setdefaulttimeout - """ - - def __init__( - self, - host, - port=None, - key_file=None, - cert_file=None, - strict=None, - timeout=None, - proxy_info=None, - ca_certs=None, - disable_ssl_certificate_validation=False, - ssl_version=None, - key_password=None, - ): - if key_password: - httplib.HTTPSConnection.__init__(self, host, port=port, strict=strict) - self._context.load_cert_chain(cert_file, key_file, key_password) - self.key_file = key_file - self.cert_file = cert_file - self.key_password = key_password - else: - httplib.HTTPSConnection.__init__( - self, host, port=port, key_file=key_file, cert_file=cert_file, strict=strict - ) - self.key_password = None - self.timeout = timeout - self.proxy_info = proxy_info - if ca_certs is None: - ca_certs = CA_CERTS - self.ca_certs = ca_certs - self.disable_ssl_certificate_validation = disable_ssl_certificate_validation - self.ssl_version = ssl_version - - # The following two methods were adapted from https_wrapper.py, released - # with the Google Appengine SDK at - # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py - # under the following license: - # - # Copyright 2007 Google Inc. - # - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - - def _GetValidHostsForCert(self, cert): - """Returns a list of valid host globs for an SSL certificate. - - Args: - cert: A dictionary representing an SSL certificate. - Returns: - list: A list of valid host globs. - """ - if "subjectAltName" in cert: - return [x[1] for x in cert["subjectAltName"] if x[0].lower() == "dns"] - else: - return [x[0][1] for x in cert["subject"] if x[0][0].lower() == "commonname"] - - def _ValidateCertificateHostname(self, cert, hostname): - """Validates that a given hostname is valid for an SSL certificate. - - Args: - cert: A dictionary representing an SSL certificate. - hostname: The hostname to test. - Returns: - bool: Whether or not the hostname is valid for this certificate. - """ - hosts = self._GetValidHostsForCert(cert) - for host in hosts: - host_re = host.replace(".", "\.").replace("*", "[^.]*") - if re.search("^%s$" % (host_re,), hostname, re.I): - return True - return False - - def connect(self): - "Connect to a host on a given (SSL) port." - - if self.proxy_info and self.proxy_info.isgood(): - use_proxy = True - ( - proxy_type, - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, - proxy_headers, - ) = self.proxy_info.astuple() - - host = proxy_host - port = proxy_port - else: - use_proxy = False - - host = self.host - port = self.port - - socket_err = None - - address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM) - for family, socktype, proto, canonname, sockaddr in address_info: - try: - if use_proxy: - sock = socks.socksocket(family, socktype, proto) - - sock.setproxy( - proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers, - ) - else: - sock = socket.socket(family, socktype, proto) - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - if has_timeout(self.timeout): - sock.settimeout(self.timeout) - - if use_proxy: - sock.connect((self.host, self.port) + sockaddr[:2]) - else: - sock.connect(sockaddr) - self.sock = _ssl_wrap_socket( - sock, - self.key_file, - self.cert_file, - self.disable_ssl_certificate_validation, - self.ca_certs, - self.ssl_version, - self.host, - self.key_password, - ) - if self.debuglevel > 0: - print("connect: (%s, %s)" % (self.host, self.port)) - if use_proxy: - print( - "proxy: %s" - % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) - ) - if not self.disable_ssl_certificate_validation: - cert = self.sock.getpeercert() - hostname = self.host.split(":", 0)[0] - if not self._ValidateCertificateHostname(cert, hostname): - raise CertificateHostnameMismatch( - "Server presented certificate that does not match " "host %s: %s" % (hostname, cert), - hostname, - cert, - ) - except (ssl_SSLError, ssl_CertificateError, CertificateHostnameMismatch,) as e: - if sock: - sock.close() - if self.sock: - self.sock.close() - self.sock = None - # Unfortunately the ssl module doesn't seem to provide any way - # to get at more detailed error information, in particular - # whether the error is due to certificate validation or - # something else (such as SSL protocol mismatch). - if getattr(e, "errno", None) == ssl.SSL_ERROR_SSL: - raise SSLHandshakeError(e) - else: - raise - except (socket.timeout, socket.gaierror): - raise - except socket.error as e: - socket_err = e - if self.debuglevel > 0: - print("connect fail: (%s, %s)" % (self.host, self.port)) - if use_proxy: - print( - "proxy: %s" - % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) - ) - if self.sock: - self.sock.close() - self.sock = None - continue - break - if not self.sock: - raise socket_err or socket.error("getaddrinfo returns an empty list") - - -SCHEME_TO_CONNECTION = { - "http": HTTPConnectionWithTimeout, - "https": HTTPSConnectionWithTimeout, -} - - -def _new_fixed_fetch(validate_certificate): - def fixed_fetch( - url, payload=None, method="GET", headers={}, allow_truncated=False, follow_redirects=True, deadline=None, - ): - return fetch( - url, - payload=payload, - method=method, - headers=headers, - allow_truncated=allow_truncated, - follow_redirects=follow_redirects, - deadline=deadline, - validate_certificate=validate_certificate, - ) - - return fixed_fetch - - -class AppEngineHttpConnection(httplib.HTTPConnection): - """Use httplib on App Engine, but compensate for its weirdness. - - The parameters key_file, cert_file, proxy_info, ca_certs, - disable_ssl_certificate_validation, and ssl_version are all dropped on - the ground. - """ - - def __init__( - self, - host, - port=None, - key_file=None, - cert_file=None, - strict=None, - timeout=None, - proxy_info=None, - ca_certs=None, - disable_ssl_certificate_validation=False, - ssl_version=None, - ): - httplib.HTTPConnection.__init__(self, host, port=port, strict=strict, timeout=timeout) - - -class AppEngineHttpsConnection(httplib.HTTPSConnection): - """Same as AppEngineHttpConnection, but for HTTPS URIs. - - The parameters proxy_info, ca_certs, disable_ssl_certificate_validation, - and ssl_version are all dropped on the ground. - """ - - def __init__( - self, - host, - port=None, - key_file=None, - cert_file=None, - strict=None, - timeout=None, - proxy_info=None, - ca_certs=None, - disable_ssl_certificate_validation=False, - ssl_version=None, - key_password=None, - ): - if key_password: - raise NotSupportedOnThisPlatform("Certificate with password is not supported.") - httplib.HTTPSConnection.__init__( - self, host, port=port, key_file=key_file, cert_file=cert_file, strict=strict, timeout=timeout, - ) - self._fetch = _new_fixed_fetch(not disable_ssl_certificate_validation) - - -# Use a different connection object for Google App Engine Standard Environment. -def is_gae_instance(): - server_software = os.environ.get("SERVER_SOFTWARE", "") - if ( - server_software.startswith("Google App Engine/") - or server_software.startswith("Development/") - or server_software.startswith("testutil/") - ): - return True - return False - - -try: - if not is_gae_instance(): - raise NotRunningAppEngineEnvironment() - - from google.appengine.api import apiproxy_stub_map - - if apiproxy_stub_map.apiproxy.GetStub("urlfetch") is None: - raise ImportError - - from google.appengine.api.urlfetch import fetch - - # Update the connection classes to use the Googel App Engine specific ones. - SCHEME_TO_CONNECTION = { - "http": AppEngineHttpConnection, - "https": AppEngineHttpsConnection, - } -except (ImportError, NotRunningAppEngineEnvironment): - pass - - -class Http(object): - """An HTTP client that handles: - - - all methods - - caching - - ETags - - compression, - - HTTPS - - Basic - - Digest - - WSSE - - and more. - """ - - def __init__( - self, - cache=None, - timeout=None, - proxy_info=proxy_info_from_environment, - ca_certs=None, - disable_ssl_certificate_validation=False, - ssl_version=None, - ): - """If 'cache' is a string then it is used as a directory name for - a disk cache. Otherwise it must be an object that supports the - same interface as FileCache. - - All timeouts are in seconds. If None is passed for timeout - then Python's default timeout for sockets will be used. See - for example the docs of socket.setdefaulttimeout(): - http://docs.python.org/library/socket.html#socket.setdefaulttimeout - - `proxy_info` may be: - - a callable that takes the http scheme ('http' or 'https') and - returns a ProxyInfo instance per request. By default, uses - proxy_nfo_from_environment. - - a ProxyInfo instance (static proxy config). - - None (proxy disabled). - - ca_certs is the path of a file containing root CA certificates for SSL - server certificate validation. By default, a CA cert file bundled with - httplib2 is used. - - If disable_ssl_certificate_validation is true, SSL cert validation will - not be performed. - - By default, ssl.PROTOCOL_SSLv23 will be used for the ssl version. - """ - self.proxy_info = proxy_info - self.ca_certs = ca_certs - self.disable_ssl_certificate_validation = disable_ssl_certificate_validation - self.ssl_version = ssl_version - - # Map domain name to an httplib connection - self.connections = {} - # The location of the cache, for now a directory - # where cached responses are held. - if cache and isinstance(cache, basestring): - self.cache = FileCache(cache) - else: - self.cache = cache - - # Name/password - self.credentials = Credentials() - - # Key/cert - self.certificates = KeyCerts() - - # authorization objects - self.authorizations = [] - - # If set to False then no redirects are followed, even safe ones. - self.follow_redirects = True - - self.redirect_codes = REDIRECT_CODES - - # Which HTTP methods do we apply optimistic concurrency to, i.e. - # which methods get an "if-match:" etag header added to them. - self.optimistic_concurrency_methods = ["PUT", "PATCH"] - - self.safe_methods = list(SAFE_METHODS) - - # If 'follow_redirects' is True, and this is set to True then - # all redirecs are followed, including unsafe ones. - self.follow_all_redirects = False - - self.ignore_etag = False - - self.force_exception_to_status_code = False - - self.timeout = timeout - - # Keep Authorization: headers on a redirect. - self.forward_authorization_headers = False - - def close(self): - """Close persistent connections, clear sensitive data. - Not thread-safe, requires external synchronization against concurrent requests. - """ - existing, self.connections = self.connections, {} - for _, c in existing.iteritems(): - c.close() - self.certificates.clear() - self.clear_credentials() - - def __getstate__(self): - state_dict = copy.copy(self.__dict__) - # In case request is augmented by some foreign object such as - # credentials which handle auth - if "request" in state_dict: - del state_dict["request"] - if "connections" in state_dict: - del state_dict["connections"] - return state_dict - - def __setstate__(self, state): - self.__dict__.update(state) - self.connections = {} - - def _auth_from_challenge(self, host, request_uri, headers, response, content): - """A generator that creates Authorization objects - that can be applied to requests. - """ - challenges = auth._parse_www_authenticate(response, "www-authenticate") - for cred in self.credentials.iter(host): - for scheme in AUTH_SCHEME_ORDER: - if scheme in challenges: - yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) - - def add_credentials(self, name, password, domain=""): - """Add a name and password that will be used - any time a request requires authentication.""" - self.credentials.add(name, password, domain) - - def add_certificate(self, key, cert, domain, password=None): - """Add a key and cert that will be used - any time a request requires authentication.""" - self.certificates.add(key, cert, domain, password) - - def clear_credentials(self): - """Remove all the names and passwords - that are used for authentication""" - self.credentials.clear() - self.authorizations = [] - - def _conn_request(self, conn, request_uri, method, body, headers): - i = 0 - seen_bad_status_line = False - while i < RETRIES: - i += 1 - try: - if hasattr(conn, "sock") and conn.sock is None: - conn.connect() - conn.request(method, request_uri, body, headers) - except socket.timeout: - raise - except socket.gaierror: - conn.close() - raise ServerNotFoundError("Unable to find the server at %s" % conn.host) - except ssl_SSLError: - conn.close() - raise - except socket.error as e: - err = 0 - if hasattr(e, "args"): - err = getattr(e, "args")[0] - else: - err = e.errno - if err == errno.ECONNREFUSED: # Connection refused - raise - if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES: - continue # retry on potentially transient socket errors - except httplib.HTTPException: - # Just because the server closed the connection doesn't apparently mean - # that the server didn't send a response. - if hasattr(conn, "sock") and conn.sock is None: - if i < RETRIES - 1: - conn.close() - conn.connect() - continue - else: - conn.close() - raise - if i < RETRIES - 1: - conn.close() - conn.connect() - continue - try: - response = conn.getresponse() - except httplib.BadStatusLine: - # If we get a BadStatusLine on the first try then that means - # the connection just went stale, so retry regardless of the - # number of RETRIES set. - if not seen_bad_status_line and i == 1: - i = 0 - seen_bad_status_line = True - conn.close() - conn.connect() - continue - else: - conn.close() - raise - except (socket.error, httplib.HTTPException): - if i < RETRIES - 1: - conn.close() - conn.connect() - continue - else: - conn.close() - raise - else: - content = "" - if method == "HEAD": - conn.close() - else: - content = response.read() - response = Response(response) - if method != "HEAD": - content = _decompressContent(response, content) - break - return (response, content) - - def _request( - self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey, - ): - """Do the actual request using the connection object - and also follow one level of redirects if necessary""" - - auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] - auth = auths and sorted(auths)[0][1] or None - if auth: - auth.request(method, request_uri, headers, body) - - (response, content) = self._conn_request(conn, request_uri, method, body, headers) - - if auth: - if auth.response(response, body): - auth.request(method, request_uri, headers, body) - (response, content) = self._conn_request(conn, request_uri, method, body, headers) - response._stale_digest = 1 - - if response.status == 401: - for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): - authorization.request(method, request_uri, headers, body) - (response, content) = self._conn_request(conn, request_uri, method, body, headers) - if response.status != 401: - self.authorizations.append(authorization) - authorization.response(response, body) - break - - if self.follow_all_redirects or method in self.safe_methods or response.status in (303, 308): - if self.follow_redirects and response.status in self.redirect_codes: - # Pick out the location header and basically start from the beginning - # remembering first to strip the ETag header and decrement our 'depth' - if redirections: - if "location" not in response and response.status != 300: - raise RedirectMissingLocation( - _("Redirected but the response is missing a Location: header."), response, content, - ) - # Fix-up relative redirects (which violate an RFC 2616 MUST) - if "location" in response: - location = response["location"] - (scheme, authority, path, query, fragment) = parse_uri(location) - if authority == None: - response["location"] = urlparse.urljoin(absolute_uri, location) - if response.status == 308 or (response.status == 301 and method in self.safe_methods): - response["-x-permanent-redirect-url"] = response["location"] - if "content-location" not in response: - response["content-location"] = absolute_uri - _updateCache(headers, response, content, self.cache, cachekey) - if "if-none-match" in headers: - del headers["if-none-match"] - if "if-modified-since" in headers: - del headers["if-modified-since"] - if "authorization" in headers and not self.forward_authorization_headers: - del headers["authorization"] - if "location" in response: - location = response["location"] - old_response = copy.deepcopy(response) - if "content-location" not in old_response: - old_response["content-location"] = absolute_uri - redirect_method = method - if response.status in [302, 303]: - redirect_method = "GET" - body = None - (response, content) = self.request( - location, method=redirect_method, body=body, headers=headers, redirections=redirections - 1, - ) - response.previous = old_response - else: - raise RedirectLimit( - "Redirected more times than rediection_limit allows.", response, content, - ) - elif response.status in [200, 203] and method in self.safe_methods: - # Don't cache 206's since we aren't going to handle byte range requests - if "content-location" not in response: - response["content-location"] = absolute_uri - _updateCache(headers, response, content, self.cache, cachekey) - - return (response, content) - - def _normalize_headers(self, headers): - return _normalize_headers(headers) - - # Need to catch and rebrand some exceptions - # Then need to optionally turn all exceptions into status codes - # including all socket.* and httplib.* exceptions. - - def request( - self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None, - ): - """ Performs a single HTTP request. - - The 'uri' is the URI of the HTTP resource and can begin with either - 'http' or 'https'. The value of 'uri' must be an absolute URI. - - The 'method' is the HTTP method to perform, such as GET, POST, DELETE, - etc. There is no restriction on the methods allowed. - - The 'body' is the entity body to be sent with the request. It is a - string object. - - Any extra headers that are to be sent with the request should be - provided in the 'headers' dictionary. - - The maximum number of redirect to follow before raising an - exception is 'redirections. The default is 5. - - The return value is a tuple of (response, content), the first - being and instance of the 'Response' class, the second being - a string that contains the response entity body. - """ - conn_key = "" - - try: - if headers is None: - headers = {} - else: - headers = self._normalize_headers(headers) - - if "user-agent" not in headers: - headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__ - - uri = iri2uri(uri) - # Prevent CWE-75 space injection to manipulate request via part of uri. - # Prevent CWE-93 CRLF injection to modify headers via part of uri. - uri = uri.replace(" ", "%20").replace("\r", "%0D").replace("\n", "%0A") - - (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) - - proxy_info = self._get_proxy_info(scheme, authority) - - conn_key = scheme + ":" + authority - conn = self.connections.get(conn_key) - if conn is None: - if not connection_type: - connection_type = SCHEME_TO_CONNECTION[scheme] - certs = list(self.certificates.iter(authority)) - if scheme == "https": - if certs: - conn = self.connections[conn_key] = connection_type( - authority, - key_file=certs[0][0], - cert_file=certs[0][1], - timeout=self.timeout, - proxy_info=proxy_info, - ca_certs=self.ca_certs, - disable_ssl_certificate_validation=self.disable_ssl_certificate_validation, - ssl_version=self.ssl_version, - key_password=certs[0][2], - ) - else: - conn = self.connections[conn_key] = connection_type( - authority, - timeout=self.timeout, - proxy_info=proxy_info, - ca_certs=self.ca_certs, - disable_ssl_certificate_validation=self.disable_ssl_certificate_validation, - ssl_version=self.ssl_version, - ) - else: - conn = self.connections[conn_key] = connection_type( - authority, timeout=self.timeout, proxy_info=proxy_info - ) - conn.set_debuglevel(debuglevel) - - if "range" not in headers and "accept-encoding" not in headers: - headers["accept-encoding"] = "gzip, deflate" - - info = email.Message.Message() - cachekey = None - cached_value = None - if self.cache: - cachekey = defrag_uri.encode("utf-8") - cached_value = self.cache.get(cachekey) - if cached_value: - # info = email.message_from_string(cached_value) - # - # Need to replace the line above with the kludge below - # to fix the non-existent bug not fixed in this - # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html - try: - info, content = cached_value.split("\r\n\r\n", 1) - feedparser = email.FeedParser.FeedParser() - feedparser.feed(info) - info = feedparser.close() - feedparser._parse = None - except (IndexError, ValueError): - self.cache.delete(cachekey) - cachekey = None - cached_value = None - - if ( - method in self.optimistic_concurrency_methods - and self.cache - and "etag" in info - and not self.ignore_etag - and "if-match" not in headers - ): - # http://www.w3.org/1999/04/Editing/ - headers["if-match"] = info["etag"] - - # https://tools.ietf.org/html/rfc7234 - # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location - # when a non-error status code is received in response to an unsafe request method. - if self.cache and cachekey and method not in self.safe_methods: - self.cache.delete(cachekey) - - # Check the vary header in the cache to see if this request - # matches what varies in the cache. - if method in self.safe_methods and "vary" in info: - vary = info["vary"] - vary_headers = vary.lower().replace(" ", "").split(",") - for header in vary_headers: - key = "-varied-%s" % header - value = info[key] - if headers.get(header, None) != value: - cached_value = None - break - - if ( - self.cache - and cached_value - and (method in self.safe_methods or info["status"] == "308") - and "range" not in headers - ): - redirect_method = method - if info["status"] not in ("307", "308"): - redirect_method = "GET" - if "-x-permanent-redirect-url" in info: - # Should cached permanent redirects be counted in our redirection count? For now, yes. - if redirections <= 0: - raise RedirectLimit( - "Redirected more times than rediection_limit allows.", {}, "", - ) - (response, new_content) = self.request( - info["-x-permanent-redirect-url"], - method=redirect_method, - headers=headers, - redirections=redirections - 1, - ) - response.previous = Response(info) - response.previous.fromcache = True - else: - # Determine our course of action: - # Is the cached entry fresh or stale? - # Has the client requested a non-cached response? - # - # There seems to be three possible answers: - # 1. [FRESH] Return the cache entry w/o doing a GET - # 2. [STALE] Do the GET (but add in cache validators if available) - # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request - entry_disposition = _entry_disposition(info, headers) - - if entry_disposition == "FRESH": - if not cached_value: - info["status"] = "504" - content = "" - response = Response(info) - if cached_value: - response.fromcache = True - return (response, content) - - if entry_disposition == "STALE": - if "etag" in info and not self.ignore_etag and not "if-none-match" in headers: - headers["if-none-match"] = info["etag"] - if "last-modified" in info and not "last-modified" in headers: - headers["if-modified-since"] = info["last-modified"] - elif entry_disposition == "TRANSPARENT": - pass - - (response, new_content) = self._request( - conn, authority, uri, request_uri, method, body, headers, redirections, cachekey, - ) - - if response.status == 304 and method == "GET": - # Rewrite the cache entry with the new end-to-end headers - # Take all headers that are in response - # and overwrite their values in info. - # unless they are hop-by-hop, or are listed in the connection header. - - for key in _get_end2end_headers(response): - info[key] = response[key] - merged_response = Response(info) - if hasattr(response, "_stale_digest"): - merged_response._stale_digest = response._stale_digest - _updateCache(headers, merged_response, content, self.cache, cachekey) - response = merged_response - response.status = 200 - response.fromcache = True - - elif response.status == 200: - content = new_content - else: - self.cache.delete(cachekey) - content = new_content - else: - cc = _parse_cache_control(headers) - if "only-if-cached" in cc: - info["status"] = "504" - response = Response(info) - content = "" - else: - (response, content) = self._request( - conn, authority, uri, request_uri, method, body, headers, redirections, cachekey, - ) - except Exception as e: - is_timeout = isinstance(e, socket.timeout) - if is_timeout: - conn = self.connections.pop(conn_key, None) - if conn: - conn.close() - - if self.force_exception_to_status_code: - if isinstance(e, HttpLib2ErrorWithResponse): - response = e.response - content = e.content - response.status = 500 - response.reason = str(e) - elif is_timeout: - content = "Request Timeout" - response = Response({"content-type": "text/plain", "status": "408", "content-length": len(content),}) - response.reason = "Request Timeout" - else: - content = str(e) - response = Response({"content-type": "text/plain", "status": "400", "content-length": len(content),}) - response.reason = "Bad Request" - else: - raise - - return (response, content) - - def _get_proxy_info(self, scheme, authority): - """Return a ProxyInfo instance (or None) based on the scheme - and authority. - """ - hostname, port = urllib.splitport(authority) - proxy_info = self.proxy_info - if callable(proxy_info): - proxy_info = proxy_info(scheme) - - if hasattr(proxy_info, "applies_to") and not proxy_info.applies_to(hostname): - proxy_info = None - return proxy_info - - -class Response(dict): - """An object more like email.Message than httplib.HTTPResponse.""" - - """Is this response from our local cache""" - fromcache = False - """HTTP protocol version used by server. - - 10 for HTTP/1.0, 11 for HTTP/1.1. - """ - version = 11 - - "Status code returned by server. " - status = 200 - """Reason phrase returned by server.""" - reason = "Ok" - - previous = None - - def __init__(self, info): - # info is either an email.Message or - # an httplib.HTTPResponse object. - if isinstance(info, httplib.HTTPResponse): - for key, value in info.getheaders(): - self[key.lower()] = value - self.status = info.status - self["status"] = str(self.status) - self.reason = info.reason - self.version = info.version - elif isinstance(info, email.Message.Message): - for key, value in info.items(): - self[key.lower()] = value - self.status = int(self["status"]) - else: - for key, value in info.iteritems(): - self[key.lower()] = value - self.status = int(self.get("status", self.status)) - self.reason = self.get("reason", self.reason) - - def __getattr__(self, name): - if name == "dict": - return self - else: - raise AttributeError(name) diff --git a/shotgun_api3/lib/httplib2/python2/auth.py b/shotgun_api3/lib/httplib2/python2/auth.py deleted file mode 100644 index 7a1c2a7e8..000000000 --- a/shotgun_api3/lib/httplib2/python2/auth.py +++ /dev/null @@ -1,63 +0,0 @@ -import base64 -import re - -from ... import pyparsing as pp - -from .error import * - -UNQUOTE_PAIRS = re.compile(r"\\(.)") -unquote = lambda s, l, t: UNQUOTE_PAIRS.sub(r"\1", t[0][1:-1]) - -# https://tools.ietf.org/html/rfc7235#section-1.2 -# https://tools.ietf.org/html/rfc7235#appendix-B -tchar = "!#$%&'*+-.^_`|~" + pp.nums + pp.alphas -token = pp.Word(tchar).setName("token") -token68 = pp.Combine(pp.Word("-._~+/" + pp.nums + pp.alphas) + pp.Optional(pp.Word("=").leaveWhitespace())).setName( - "token68" -) - -quoted_string = pp.dblQuotedString.copy().setName("quoted-string").setParseAction(unquote) -auth_param_name = token.copy().setName("auth-param-name").addParseAction(pp.downcaseTokens) -auth_param = auth_param_name + pp.Suppress("=") + (quoted_string | token) -params = pp.Dict(pp.delimitedList(pp.Group(auth_param))) - -scheme = token("scheme") -challenge = scheme + (params("params") | token68("token")) - -authentication_info = params.copy() -www_authenticate = pp.delimitedList(pp.Group(challenge)) - - -def _parse_authentication_info(headers, headername="authentication-info"): - """https://tools.ietf.org/html/rfc7615 - """ - header = headers.get(headername, "").strip() - if not header: - return {} - try: - parsed = authentication_info.parseString(header) - except pp.ParseException as ex: - # print(ex.explain(ex)) - raise MalformedHeader(headername) - - return parsed.asDict() - - -def _parse_www_authenticate(headers, headername="www-authenticate"): - """Returns a dictionary of dictionaries, one dict per auth_scheme.""" - header = headers.get(headername, "").strip() - if not header: - return {} - try: - parsed = www_authenticate.parseString(header) - except pp.ParseException as ex: - # print(ex.explain(ex)) - raise MalformedHeader(headername) - - retval = { - challenge["scheme"].lower(): challenge["params"].asDict() - if "params" in challenge - else {"token": challenge.get("token")} - for challenge in parsed - } - return retval diff --git a/shotgun_api3/lib/httplib2/python2/cacerts.txt b/shotgun_api3/lib/httplib2/python2/cacerts.txt deleted file mode 100644 index 78a444c43..000000000 --- a/shotgun_api3/lib/httplib2/python2/cacerts.txt +++ /dev/null @@ -1,2225 +0,0 @@ -# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Label: "GTE CyberTrust Global Root" -# Serial: 421 -# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db -# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 -# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 ------BEGIN CERTIFICATE----- -MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD -VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv -bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv -b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV -UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU -cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds -b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH -iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS -r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 -04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r -GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 -3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P -lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ ------END CERTIFICATE----- - -# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Server CA" -# Serial: 1 -# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d -# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c -# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 ------BEGIN CERTIFICATE----- -MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm -MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx -MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT -DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 -dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl -cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 -DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD -gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 -yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX -L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj -EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG -7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e -QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ -qdq5snUb9kLy78fyGPmJvKP/iiMucEc= ------END CERTIFICATE----- - -# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Premium Server CA" -# Serial: 1 -# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a -# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a -# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 ------BEGIN CERTIFICATE----- -MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy -dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t -MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB -MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG -A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp -b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl -cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv -bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE -VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ -ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR -uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG -9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI -hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM -pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== ------END CERTIFICATE----- - -# Issuer: O=Equifax OU=Equifax Secure Certificate Authority -# Subject: O=Equifax OU=Equifax Secure Certificate Authority -# Label: "Equifax Secure CA" -# Serial: 903804111 -# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4 -# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a -# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78 ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV -UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy -dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 -MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx -dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f -BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A -cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC -AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ -MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm -aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw -ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj -IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF -MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA -A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y -7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh -1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 ------END CERTIFICATE----- - -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Label: "Verisign Class 3 Public Primary Certification Authority - G2" -# Serial: 167285380242319648451154478808036881606 -# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 -# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f -# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b ------BEGIN CERTIFICATE----- -MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh -c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy -MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp -emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X -DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw -FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg -UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo -YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 -MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB -AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 -pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 -13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID -AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk -U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i -F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY -oJ2daZH9 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Label: "ValiCert Class 1 VA" -# Serial: 1 -# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb -# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e -# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy -NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y -LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ -TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y -TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 -LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW -I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw -nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Label: "ValiCert Class 2 VA" -# Serial: 1 -# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 -# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 -# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy -NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY -dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 -WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS -v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v -UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu -IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC -W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Label: "RSA Root Certificate 1" -# Serial: 1 -# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 -# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb -# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy -NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD -cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs -2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY -JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE -Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ -n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A -PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 4 Public Primary Certification Authority - G3" -# Serial: 314531972711909413743075096039378935511 -# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df -# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d -# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 -GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ -+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd -U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm -NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY -ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ -ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 -CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq -g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm -fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c -2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ -bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Secure Server CA" -# Serial: 927650371 -# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee -# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 -# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 ------BEGIN CERTIFICATE----- -MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC -VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u -ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc -KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u -ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 -MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE -ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j -b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF -bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg -U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA -A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ -I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 -wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC -AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb -oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 -BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p -dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk -MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp -b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu -dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 -MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi -E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa -MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI -hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN -95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd -2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946059622 -# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc -# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe -# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f ------BEGIN CERTIFICATE----- -MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy -MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA -vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G -CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA -WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo -oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ -h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18 -f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN -B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy -vUxFnmG6v4SBkgPR0ml8xQ== ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure Global eBusiness CA" -# Serial: 1 -# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc -# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 -# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 ------BEGIN CERTIFICATE----- -MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT -ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw -MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj -dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l -c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC -UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc -58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ -o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr -aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA -A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA -Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv -8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV ------END CERTIFICATE----- - -# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure eBusiness CA 1" -# Serial: 4 -# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d -# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 -# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 ------BEGIN CERTIFICATE----- -MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT -ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw -MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j -LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ -KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo -RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu -WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw -Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD -AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK -eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM -zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ -WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN -/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== ------END CERTIFICATE----- - -# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 -# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 -# Label: "Equifax Secure eBusiness CA 2" -# Serial: 930140085 -# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca -# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc -# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20 ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV -UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj -dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 -NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD -VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G -vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ -BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C -AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX -MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl -IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw -NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq -y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF -MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA -A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy -0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 -E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Low-Value Services Root" -# Serial: 1 -# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc -# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d -# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 ------BEGIN CERTIFICATE----- -MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw -MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML -QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD -VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA -A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul -CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n -tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl -dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch -PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC -+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O -BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl -MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk -ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB -IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X -7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz -43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY -eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl -pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA -WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Public Services Root" -# Serial: 1 -# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f -# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5 -# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx -MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB -ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV -BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV -6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX -GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP -dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH -1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF -62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW -BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL -MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU -cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv -b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6 -IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/ -iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao -GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh -4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm -XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Qualified Certificates Root" -# Serial: 1 -# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb -# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf -# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16 ------BEGIN CERTIFICATE----- -MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1 -MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK -EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh -BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq -xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G -87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i -2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U -WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1 -0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G -A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr -pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL -ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm -aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv -hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm -hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X -dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3 -P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y -iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no -xqE= ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Global CA 2" -# Serial: 1 -# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9 -# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d -# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85 ------BEGIN CERTIFICATE----- -MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs -IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg -R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A -PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8 -Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL -TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL -5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7 -S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe -2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE -FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap -EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td -EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv -/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN -A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0 -abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF -I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz -4iIprn2DQKi6bA== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - -# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc. -# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc. -# Label: "America Online Root Certification Authority 1" -# Serial: 1 -# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e -# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a -# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3 ------BEGIN CERTIFICATE----- -MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP -bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2 -MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft -ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk -hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym -1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW -OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb -2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko -O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU -AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF -Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb -LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir -oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C -MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds -sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7 ------END CERTIFICATE----- - -# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc. -# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc. -# Label: "America Online Root Certification Authority 2" -# Serial: 1 -# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf -# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84 -# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP -bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2 -MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft -ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC -206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci -KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2 -JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9 -BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e -Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B -PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67 -Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq -Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ -o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3 -+L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj -YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj -FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn -xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2 -LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc -obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8 -CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe -IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA -DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F -AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX -Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb -AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl -Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw -RY8mkaKO/qk= ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=Secure Certificate Services O=Comodo CA Limited -# Subject: CN=Secure Certificate Services O=Comodo CA Limited -# Label: "Comodo Secure Services root" -# Serial: 1 -# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd -# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1 -# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8 ------BEGIN CERTIFICATE----- -MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp -ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow -fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV -BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM -cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S -HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996 -CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk -3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz -6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV -HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud -EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv -Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw -Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww -DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0 -5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj -Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI -gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ -aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl -izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk= ------END CERTIFICATE----- - -# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited -# Subject: CN=Trusted Certificate Services O=Comodo CA Limited -# Label: "Comodo Trusted Services root" -# Serial: 1 -# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27 -# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd -# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 -aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla -MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO -BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD -VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW -fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt -TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL -fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW -1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 -kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G -A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v -ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo -dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu -Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ -HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 -pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS -jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ -xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn -dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi ------END CERTIFICATE----- - -# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com -# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com -# Label: "UTN DATACorp SGC Root CA" -# Serial: 91374294542884689855167577680241077609 -# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06 -# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4 -# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48 ------BEGIN CERTIFICATE----- -MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB -kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw -IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG -EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD -VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu -dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 -E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ -D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK -4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq -lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW -bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB -o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT -MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js -LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr -BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB -AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft -Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj -j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH -KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv -2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 -mfnGV/TJVTl4uix5yaaIK/QI ------END CERTIFICATE----- - -# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com -# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com -# Label: "UTN USERFirst Hardware Root CA" -# Serial: 91374294542884704022267039221184531197 -# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39 -# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7 -# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37 ------BEGIN CERTIFICATE----- -MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB -lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt -SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG -A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe -MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v -d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh -cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn -0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ -M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a -MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd -oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI -DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy -oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD -VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 -dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy -bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF -BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM -//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli -CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE -CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t -3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS -KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Label: "StartCom Certification Authority" -# Serial: 1 -# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16 -# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f -# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea ------BEGIN CERTIFICATE----- -MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg -Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9 -MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi -U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh -cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk -pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf -OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C -Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT -Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi -HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM -Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w -+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ -Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 -Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B -26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID -AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE -FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j -ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js -LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM -BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0 -Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy -dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh -cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh -YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg -dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp -bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ -YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT -TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ -9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8 -jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW -FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz -ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1 -ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L -EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu -L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq -yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC -O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V -um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh -NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Label: "TC TrustCenter Class 2 CA II" -# Serial: 941389028203453866782103406992443 -# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23 -# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e -# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4 ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV -BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 -Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1 -OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i -SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc -VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf -tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg -uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J -XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK -8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99 -5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3 -kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy -dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6 -Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz -JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 -Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS -GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt -ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8 -au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV -hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI -dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA -# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA -# Label: "TC TrustCenter Class 3 CA II" -# Serial: 1506523511417715638772220530020799 -# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e -# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5 -# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV -BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 -Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1 -OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i -SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc -VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW -Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q -Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2 -1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq -ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1 -Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX -XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy -dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6 -Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz -JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 -Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN -irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8 -TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6 -g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB -95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj -S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Label: "TC TrustCenter Universal CA I" -# Serial: 601024842042189035295619584734726 -# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c -# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3 -# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV -BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1 -c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx -MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg -R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD -VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR -JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T -fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu -jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z -wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ -fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD -VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G -CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1 -7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn -8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs -ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT -ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/ -2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY ------END CERTIFICATE----- - -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G2" -# Serial: 80682863203381065782177908751794619243 -# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a -# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 -# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL -MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj -KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 -MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw -NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV -BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL -So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal -tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG -CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT -qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz -rD6ogRLQy7rQkgu2npaqBA+K ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Universal Root Certification Authority" -# Serial: 85209574734084581917763752644031726877 -# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 -# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 -# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB -vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W -ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 -IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y -IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh -bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF -9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH -H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H -LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN -/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT -rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw -WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs -exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 -sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ -seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz -4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ -BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR -lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 -7M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Label: "TC TrustCenter Universal CA III" -# Serial: 2010889993983507346460533407902964 -# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b -# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87 -# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d ------BEGIN CERTIFICATE----- -MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV -BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1 -c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy -MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl -ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm -BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF -5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv -DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v -zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT -yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj -dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh -MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI -4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz -dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY -aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G -DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV -CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH -LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg== ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Label: "StartCom Certification Authority" -# Serial: 45 -# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16 -# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0 -# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11 ------BEGIN CERTIFICATE----- -MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg -Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9 -MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi -U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh -cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk -pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf -OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C -Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT -Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi -HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM -Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w -+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ -Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 -Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B -26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID -AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul -F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC -ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w -ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk -aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0 -YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg -c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93 -d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG -CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1 -dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF -wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS -Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst -0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc -pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl -CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF -P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK -1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm -KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE -JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ -8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm -fyWl8kgAwKQB2j8= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd. -# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd. -# Label: "StartCom Certification Authority G2" -# Serial: 59 -# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64 -# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17 -# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95 ------BEGIN CERTIFICATE----- -MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1 -OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG -A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ -JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD -vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo -D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/ -Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW -RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK -HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN -nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM -0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i -UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9 -Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg -TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL -BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K -2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX -UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl -6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK -9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ -HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI -wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY -XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l -IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo -hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr -so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US -# Subject: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US -# Serial: 33af1e6a711a9a0bb2864b11d09fae5 -# MD5 Fingerprint: E4:A6:8A:C8:54:AC:52:42:46:0A:FD:72:48:1B:2A:44 -# SHA1 Fingerprint: DF:3C:24:F9:BF:D6:66:76:1B:26:80:73:FE:06:D1:CC:8D:4F:82:A4 -# SHA256 Fingerprint: CB:3C:CB:B7:60:31:E5:E0:13:8F:8D:D3:9A:23:F9:DE:47:FF:C3:5E:43:C1:14:4C:EA:27:D4:6A:5A:B1:CB:5F ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: /C=US/O=Internet Security Research Group/CN=ISRG Root X1 -# Subject: /C=US/O=Internet Security Research Group/CN=ISRG Root X1 -# Serial: 8210CFB0D240E3594463E0BB63828B00 -# SHA1 Fingerprint: CA:BD:2A:79:A1:07:6A:31:F2:1D:25:36:35:CB:03:9D:43:29:A5:E8 -# SHA256 Fingerprint: 96:BC:EC:06:26:49:76:F3:74:60:77:9A:CF:28:C5:A7:CF:E8:A3:C0:AA:E1:1A:8F:FC:EE:05:C0:BD:DF:08:C6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: /C=US/O=Internet Security Research Group/CN=ISRG Root X2 -# Subject: /C=US/O=Internet Security Research Group/CN=ISRG Root X2 -# Serial: 41D29DD172EAEEA780C12C6CE92F8752 -# SHA1 Fingerprint: BD:B1:B9:3C:D5:97:8D:45:C6:26:14:55:F8:DB:95:C7:5A:D1:53:AF -# SHA256 Fingerprint: 69:72:9B:8E:15:A8:6E:FC:17:7A:57:AF:B7:17:1D:FC:64:AD:D2:8C:2F:CA:8C:F1:50:7E:34:45:3C:CB:14:70 ------BEGIN CERTIFICATE----- -MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw -CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg -R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 -MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT -ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw -EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW -+1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 -ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI -zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW -tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 -/q4AaOeMSQ+2b1tbFfLn ------END CERTIFICATE----- diff --git a/shotgun_api3/lib/httplib2/python2/certs.py b/shotgun_api3/lib/httplib2/python2/certs.py deleted file mode 100644 index 59d1ffc70..000000000 --- a/shotgun_api3/lib/httplib2/python2/certs.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Utilities for certificate management.""" - -import os - -certifi_available = False -certifi_where = None -try: - from certifi import where as certifi_where - certifi_available = True -except ImportError: - pass - -custom_ca_locater_available = False -custom_ca_locater_where = None -try: - from ca_certs_locater import get as custom_ca_locater_where - custom_ca_locater_available = True -except ImportError: - pass - - -BUILTIN_CA_CERTS = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "cacerts.txt" -) - - -def where(): - env = os.environ.get("HTTPLIB2_CA_CERTS") - if env is not None: - if os.path.isfile(env): - return env - else: - raise RuntimeError("Environment variable HTTPLIB2_CA_CERTS not a valid file") - if custom_ca_locater_available: - return custom_ca_locater_where() - if certifi_available: - return certifi_where() - return BUILTIN_CA_CERTS - - -if __name__ == "__main__": - print(where()) diff --git a/shotgun_api3/lib/httplib2/python2/error.py b/shotgun_api3/lib/httplib2/python2/error.py deleted file mode 100644 index 0e68c12a8..000000000 --- a/shotgun_api3/lib/httplib2/python2/error.py +++ /dev/null @@ -1,48 +0,0 @@ -# All exceptions raised here derive from HttpLib2Error -class HttpLib2Error(Exception): - pass - - -# Some exceptions can be caught and optionally -# be turned back into responses. -class HttpLib2ErrorWithResponse(HttpLib2Error): - def __init__(self, desc, response, content): - self.response = response - self.content = content - HttpLib2Error.__init__(self, desc) - - -class RedirectMissingLocation(HttpLib2ErrorWithResponse): - pass - - -class RedirectLimit(HttpLib2ErrorWithResponse): - pass - - -class FailedToDecompressContent(HttpLib2ErrorWithResponse): - pass - - -class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): - pass - - -class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): - pass - - -class MalformedHeader(HttpLib2Error): - pass - - -class RelativeURIError(HttpLib2Error): - pass - - -class ServerNotFoundError(HttpLib2Error): - pass - - -class ProxiesUnavailableError(HttpLib2Error): - pass diff --git a/shotgun_api3/lib/httplib2/python2/iri2uri.py b/shotgun_api3/lib/httplib2/python2/iri2uri.py deleted file mode 100644 index 0a978a784..000000000 --- a/shotgun_api3/lib/httplib2/python2/iri2uri.py +++ /dev/null @@ -1,123 +0,0 @@ -"""Converts an IRI to a URI.""" - -__author__ = "Joe Gregorio (joe@bitworking.org)" -__copyright__ = "Copyright 2006, Joe Gregorio" -__contributors__ = [] -__version__ = "1.0.0" -__license__ = "MIT" - -import urlparse - -# Convert an IRI to a URI following the rules in RFC 3987 -# -# The characters we need to enocde and escape are defined in the spec: -# -# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD -# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF -# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD -# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD -# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD -# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD -# / %xD0000-DFFFD / %xE1000-EFFFD - -escape_range = [ - (0xA0, 0xD7FF), - (0xE000, 0xF8FF), - (0xF900, 0xFDCF), - (0xFDF0, 0xFFEF), - (0x10000, 0x1FFFD), - (0x20000, 0x2FFFD), - (0x30000, 0x3FFFD), - (0x40000, 0x4FFFD), - (0x50000, 0x5FFFD), - (0x60000, 0x6FFFD), - (0x70000, 0x7FFFD), - (0x80000, 0x8FFFD), - (0x90000, 0x9FFFD), - (0xA0000, 0xAFFFD), - (0xB0000, 0xBFFFD), - (0xC0000, 0xCFFFD), - (0xD0000, 0xDFFFD), - (0xE1000, 0xEFFFD), - (0xF0000, 0xFFFFD), - (0x100000, 0x10FFFD), -] - - -def encode(c): - retval = c - i = ord(c) - for low, high in escape_range: - if i < low: - break - if i >= low and i <= high: - retval = "".join(["%%%2X" % ord(o) for o in c.encode("utf-8")]) - break - return retval - - -def iri2uri(uri): - """Convert an IRI to a URI. Note that IRIs must be - passed in a unicode strings. That is, do not utf-8 encode - the IRI before passing it into the function.""" - if isinstance(uri, unicode): - (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) - authority = authority.encode("idna") - # For each character in 'ucschar' or 'iprivate' - # 1. encode as utf-8 - # 2. then %-encode each octet of that utf-8 - uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) - uri = "".join([encode(c) for c in uri]) - return uri - - -if __name__ == "__main__": - import unittest - - class Test(unittest.TestCase): - def test_uris(self): - """Test that URIs are invariant under the transformation.""" - invariant = [ - u"ftp://ftp.is.co.za/rfc/rfc1808.txt", - u"http://www.ietf.org/rfc/rfc2396.txt", - u"ldap://[2001:db8::7]/c=GB?objectClass?one", - u"mailto:John.Doe@example.com", - u"news:comp.infosystems.www.servers.unix", - u"tel:+1-816-555-1212", - u"telnet://192.0.2.16:80/", - u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2", - ] - for uri in invariant: - self.assertEqual(uri, iri2uri(uri)) - - def test_iri(self): - """Test that the right type of escaping is done for each part of the URI.""" - self.assertEqual( - "http://xn--o3h.com/%E2%98%84", - iri2uri(u"http://\N{COMET}.com/\N{COMET}"), - ) - self.assertEqual( - "http://bitworking.org/?fred=%E2%98%84", - iri2uri(u"http://bitworking.org/?fred=\N{COMET}"), - ) - self.assertEqual( - "http://bitworking.org/#%E2%98%84", - iri2uri(u"http://bitworking.org/#\N{COMET}"), - ) - self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}")) - self.assertEqual( - "/fred?bar=%E2%98%9A#%E2%98%84", - iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"), - ) - self.assertEqual( - "/fred?bar=%E2%98%9A#%E2%98%84", - iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")), - ) - self.assertNotEqual( - "/fred?bar=%E2%98%9A#%E2%98%84", - iri2uri( - u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8") - ), - ) - - unittest.main() diff --git a/shotgun_api3/lib/httplib2/python2/socks.py b/shotgun_api3/lib/httplib2/python2/socks.py deleted file mode 100644 index 71eb4ebf9..000000000 --- a/shotgun_api3/lib/httplib2/python2/socks.py +++ /dev/null @@ -1,518 +0,0 @@ -"""SocksiPy - Python SOCKS module. - -Version 1.00 - -Copyright 2006 Dan-Haim. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. -3. Neither the name of Dan Haim nor the names of his contributors may be used - to endorse or promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED -WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA -OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. - -This module provides a standard socket-like interface for Python -for tunneling connections through SOCKS proxies. - -Minor modifications made by Christopher Gilbert (http://motomastyle.com/) for -use in PyLoris (http://pyloris.sourceforge.net/). - -Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) -mainly to merge bug fixes found in Sourceforge. -""" - -import base64 -import socket -import struct -import sys - -if getattr(socket, "socket", None) is None: - raise ImportError("socket.socket missing, proxy support unusable") - -PROXY_TYPE_SOCKS4 = 1 -PROXY_TYPE_SOCKS5 = 2 -PROXY_TYPE_HTTP = 3 -PROXY_TYPE_HTTP_NO_TUNNEL = 4 - -_defaultproxy = None -_orgsocket = socket.socket - - -class ProxyError(Exception): - pass - - -class GeneralProxyError(ProxyError): - pass - - -class Socks5AuthError(ProxyError): - pass - - -class Socks5Error(ProxyError): - pass - - -class Socks4Error(ProxyError): - pass - - -class HTTPError(ProxyError): - pass - - -_generalerrors = ( - "success", - "invalid data", - "not connected", - "not available", - "bad proxy type", - "bad input", -) - -_socks5errors = ( - "succeeded", - "general SOCKS server failure", - "connection not allowed by ruleset", - "Network unreachable", - "Host unreachable", - "Connection refused", - "TTL expired", - "Command not supported", - "Address type not supported", - "Unknown error", -) - -_socks5autherrors = ( - "succeeded", - "authentication is required", - "all offered authentication methods were rejected", - "unknown username or invalid password", - "unknown error", -) - -_socks4errors = ( - "request granted", - "request rejected or failed", - "request rejected because SOCKS server cannot connect to identd on the client", - "request rejected because the client program and identd report different " - "user-ids", - "unknown error", -) - - -def setdefaultproxy( - proxytype=None, addr=None, port=None, rdns=True, username=None, password=None -): - """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) - Sets a default proxy which all further socksocket objects will use, - unless explicitly changed. - """ - global _defaultproxy - _defaultproxy = (proxytype, addr, port, rdns, username, password) - - -def wrapmodule(module): - """wrapmodule(module) - - Attempts to replace a module's socket library with a SOCKS socket. Must set - a default proxy using setdefaultproxy(...) first. - This will only work on modules that import socket directly into the - namespace; - most of the Python Standard Library falls into this category. - """ - if _defaultproxy != None: - module.socket.socket = socksocket - else: - raise GeneralProxyError((4, "no proxy specified")) - - -class socksocket(socket.socket): - """socksocket([family[, type[, proto]]]) -> socket object - Open a SOCKS enabled socket. The parameters are the same as - those of the standard socket init. In order for SOCKS to work, - you must specify family=AF_INET, type=SOCK_STREAM and proto=0. - """ - - def __init__( - self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None - ): - _orgsocket.__init__(self, family, type, proto, _sock) - if _defaultproxy != None: - self.__proxy = _defaultproxy - else: - self.__proxy = (None, None, None, None, None, None) - self.__proxysockname = None - self.__proxypeername = None - self.__httptunnel = True - - def __recvall(self, count): - """__recvall(count) -> data - Receive EXACTLY the number of bytes requested from the socket. - Blocks until the required number of bytes have been received. - """ - data = self.recv(count) - while len(data) < count: - d = self.recv(count - len(data)) - if not d: - raise GeneralProxyError((0, "connection closed unexpectedly")) - data = data + d - return data - - def sendall(self, content, *args): - """ override socket.socket.sendall method to rewrite the header - for non-tunneling proxies if needed - """ - if not self.__httptunnel: - content = self.__rewriteproxy(content) - return super(socksocket, self).sendall(content, *args) - - def __rewriteproxy(self, header): - """ rewrite HTTP request headers to support non-tunneling proxies - (i.e. those which do not support the CONNECT method). - This only works for HTTP (not HTTPS) since HTTPS requires tunneling. - """ - host, endpt = None, None - hdrs = header.split("\r\n") - for hdr in hdrs: - if hdr.lower().startswith("host:"): - host = hdr - elif hdr.lower().startswith("get") or hdr.lower().startswith("post"): - endpt = hdr - if host and endpt: - hdrs.remove(host) - hdrs.remove(endpt) - host = host.split(" ")[1] - endpt = endpt.split(" ") - if self.__proxy[4] != None and self.__proxy[5] != None: - hdrs.insert(0, self.__getauthheader()) - hdrs.insert(0, "Host: %s" % host) - hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2])) - return "\r\n".join(hdrs) - - def __getauthheader(self): - auth = self.__proxy[4] + ":" + self.__proxy[5] - return "Proxy-Authorization: Basic " + base64.b64encode(auth) - - def setproxy( - self, - proxytype=None, - addr=None, - port=None, - rdns=True, - username=None, - password=None, - headers=None, - ): - """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) - - Sets the proxy to be used. - proxytype - The type of the proxy to be used. Three types - are supported: PROXY_TYPE_SOCKS4 (including socks4a), - PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP - addr - The address of the server (IP or DNS). - port - The port of the server. Defaults to 1080 for SOCKS - servers and 8080 for HTTP proxy servers. - rdns - Should DNS queries be preformed on the remote side - (rather than the local side). The default is True. - Note: This has no effect with SOCKS4 servers. - username - Username to authenticate with to the server. - The default is no authentication. - password - Password to authenticate with to the server. - Only relevant when username is also provided. - headers - Additional or modified headers for the proxy connect - request. - """ - self.__proxy = ( - proxytype, - addr, - port, - rdns, - username.encode() if username else None, - password.encode() if password else None, - headers, - ) - - def __negotiatesocks5(self, destaddr, destport): - """__negotiatesocks5(self,destaddr,destport) - Negotiates a connection through a SOCKS5 server. - """ - # First we'll send the authentication packages we support. - if (self.__proxy[4] != None) and (self.__proxy[5] != None): - # The username/password details were supplied to the - # setproxy method so we support the USERNAME/PASSWORD - # authentication (in addition to the standard none). - self.sendall(struct.pack("BBBB", 0x05, 0x02, 0x00, 0x02)) - else: - # No username/password were entered, therefore we - # only support connections with no authentication. - self.sendall(struct.pack("BBB", 0x05, 0x01, 0x00)) - # We'll receive the server's response to determine which - # method was selected - chosenauth = self.__recvall(2) - if chosenauth[0:1] != chr(0x05).encode(): - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - # Check the chosen authentication method - if chosenauth[1:2] == chr(0x00).encode(): - # No authentication is required - pass - elif chosenauth[1:2] == chr(0x02).encode(): - # Okay, we need to perform a basic username/password - # authentication. - self.sendall( - chr(0x01).encode() - + chr(len(self.__proxy[4])) - + self.__proxy[4] - + chr(len(self.__proxy[5])) - + self.__proxy[5] - ) - authstat = self.__recvall(2) - if authstat[0:1] != chr(0x01).encode(): - # Bad response - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - if authstat[1:2] != chr(0x00).encode(): - # Authentication failed - self.close() - raise Socks5AuthError((3, _socks5autherrors[3])) - # Authentication succeeded - else: - # Reaching here is always bad - self.close() - if chosenauth[1] == chr(0xFF).encode(): - raise Socks5AuthError((2, _socks5autherrors[2])) - else: - raise GeneralProxyError((1, _generalerrors[1])) - # Now we can request the actual connection - req = struct.pack("BBB", 0x05, 0x01, 0x00) - # If the given destination address is an IP address, we'll - # use the IPv4 address request even if remote resolving was specified. - try: - ipaddr = socket.inet_aton(destaddr) - req = req + chr(0x01).encode() + ipaddr - except socket.error: - # Well it's not an IP number, so it's probably a DNS name. - if self.__proxy[3]: - # Resolve remotely - ipaddr = None - req = ( - req - + chr(0x03).encode() - + chr(len(destaddr)).encode() - + destaddr.encode() - ) - else: - # Resolve locally - ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) - req = req + chr(0x01).encode() + ipaddr - req = req + struct.pack(">H", destport) - self.sendall(req) - # Get the response - resp = self.__recvall(4) - if resp[0:1] != chr(0x05).encode(): - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - elif resp[1:2] != chr(0x00).encode(): - # Connection failed - self.close() - if ord(resp[1:2]) <= 8: - raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])])) - else: - raise Socks5Error((9, _socks5errors[9])) - # Get the bound address/port - elif resp[3:4] == chr(0x01).encode(): - boundaddr = self.__recvall(4) - elif resp[3:4] == chr(0x03).encode(): - resp = resp + self.recv(1) - boundaddr = self.__recvall(ord(resp[4:5])) - else: - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - boundport = struct.unpack(">H", self.__recvall(2))[0] - self.__proxysockname = (boundaddr, boundport) - if ipaddr != None: - self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) - else: - self.__proxypeername = (destaddr, destport) - - def getproxysockname(self): - """getsockname() -> address info - Returns the bound IP address and port number at the proxy. - """ - return self.__proxysockname - - def getproxypeername(self): - """getproxypeername() -> address info - Returns the IP and port number of the proxy. - """ - return _orgsocket.getpeername(self) - - def getpeername(self): - """getpeername() -> address info - Returns the IP address and port number of the destination - machine (note: getproxypeername returns the proxy) - """ - return self.__proxypeername - - def __negotiatesocks4(self, destaddr, destport): - """__negotiatesocks4(self,destaddr,destport) - Negotiates a connection through a SOCKS4 server. - """ - # Check if the destination address provided is an IP address - rmtrslv = False - try: - ipaddr = socket.inet_aton(destaddr) - except socket.error: - # It's a DNS name. Check where it should be resolved. - if self.__proxy[3]: - ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01) - rmtrslv = True - else: - ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) - # Construct the request packet - req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr - # The username parameter is considered userid for SOCKS4 - if self.__proxy[4] != None: - req = req + self.__proxy[4] - req = req + chr(0x00).encode() - # DNS name if remote resolving is required - # NOTE: This is actually an extension to the SOCKS4 protocol - # called SOCKS4A and may not be supported in all cases. - if rmtrslv: - req = req + destaddr + chr(0x00).encode() - self.sendall(req) - # Get the response from the server - resp = self.__recvall(8) - if resp[0:1] != chr(0x00).encode(): - # Bad data - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - if resp[1:2] != chr(0x5A).encode(): - # Server returned an error - self.close() - if ord(resp[1:2]) in (91, 92, 93): - self.close() - raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90])) - else: - raise Socks4Error((94, _socks4errors[4])) - # Get the bound address/port - self.__proxysockname = ( - socket.inet_ntoa(resp[4:]), - struct.unpack(">H", resp[2:4])[0], - ) - if rmtrslv != None: - self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) - else: - self.__proxypeername = (destaddr, destport) - - def __negotiatehttp(self, destaddr, destport): - """__negotiatehttp(self,destaddr,destport) - Negotiates a connection through an HTTP server. - """ - # If we need to resolve locally, we do this now - if not self.__proxy[3]: - addr = socket.gethostbyname(destaddr) - else: - addr = destaddr - headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"] - wrote_host_header = False - wrote_auth_header = False - if self.__proxy[6] != None: - for key, val in self.__proxy[6].iteritems(): - headers += [key, ": ", val, "\r\n"] - wrote_host_header = key.lower() == "host" - wrote_auth_header = key.lower() == "proxy-authorization" - if not wrote_host_header: - headers += ["Host: ", destaddr, "\r\n"] - if not wrote_auth_header: - if self.__proxy[4] != None and self.__proxy[5] != None: - headers += [self.__getauthheader(), "\r\n"] - headers.append("\r\n") - self.sendall("".join(headers).encode()) - # We read the response until we get the string "\r\n\r\n" - resp = self.recv(1) - while resp.find("\r\n\r\n".encode()) == -1: - resp = resp + self.recv(1) - # We just need the first line to check if the connection - # was successful - statusline = resp.splitlines()[0].split(" ".encode(), 2) - if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()): - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - try: - statuscode = int(statusline[1]) - except ValueError: - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - if statuscode != 200: - self.close() - raise HTTPError((statuscode, statusline[2])) - self.__proxysockname = ("0.0.0.0", 0) - self.__proxypeername = (addr, destport) - - def connect(self, destpair): - """connect(self, despair) - Connects to the specified destination through a proxy. - destpar - A tuple of the IP/DNS address and the port number. - (identical to socket's connect). - To select the proxy server use setproxy(). - """ - # Do a minimal input check first - if ( - (not type(destpair) in (list, tuple)) - or (len(destpair) < 2) - or (not isinstance(destpair[0], basestring)) - or (type(destpair[1]) != int) - ): - raise GeneralProxyError((5, _generalerrors[5])) - if self.__proxy[0] == PROXY_TYPE_SOCKS5: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 1080 - _orgsocket.connect(self, (self.__proxy[1], portnum)) - self.__negotiatesocks5(destpair[0], destpair[1]) - elif self.__proxy[0] == PROXY_TYPE_SOCKS4: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 1080 - _orgsocket.connect(self, (self.__proxy[1], portnum)) - self.__negotiatesocks4(destpair[0], destpair[1]) - elif self.__proxy[0] == PROXY_TYPE_HTTP: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 8080 - _orgsocket.connect(self, (self.__proxy[1], portnum)) - self.__negotiatehttp(destpair[0], destpair[1]) - elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 8080 - _orgsocket.connect(self, (self.__proxy[1], portnum)) - if destpair[1] == 443: - self.__negotiatehttp(destpair[0], destpair[1]) - else: - self.__httptunnel = False - elif self.__proxy[0] == None: - _orgsocket.connect(self, (destpair[0], destpair[1])) - else: - raise GeneralProxyError((4, _generalerrors[4])) diff --git a/shotgun_api3/lib/httplib2/python3/__init__.py b/shotgun_api3/lib/httplib2/python3/__init__.py deleted file mode 100644 index ba5fa2f23..000000000 --- a/shotgun_api3/lib/httplib2/python3/__init__.py +++ /dev/null @@ -1,1799 +0,0 @@ -# -*- coding: utf-8 -*- -"""Small, fast HTTP client library for Python.""" - -__author__ = "Joe Gregorio (joe@bitworking.org)" -__copyright__ = "Copyright 2006, Joe Gregorio" -__contributors__ = [ - "Thomas Broyer (t.broyer@ltgt.net)", - "James Antill", - "Xavier Verges Farrero", - "Jonathan Feinberg", - "Blair Zajac", - "Sam Ruby", - "Louis Nyffenegger", - "Mark Pilgrim", - "Alex Yu", - "Lai Han", -] -__license__ = "MIT" -__version__ = "0.22.0" - -import base64 -import calendar -import copy -import email -import email.feedparser -from email import header -import email.message -import email.utils -import errno -from gettext import gettext as _ -import gzip -from hashlib import md5 as _md5 -from hashlib import sha1 as _sha -import hmac -import http.client -import io -import os -import random -import re -import socket -import ssl -import sys -import time -import urllib.parse -import zlib - -try: - import socks -except ImportError: - # TODO: remove this fallback and copypasted socksipy module upon py2/3 merge, - # idea is to have soft-dependency on any compatible module called socks - from . import socks -from . import auth -from .error import * -from .iri2uri import iri2uri - - -def has_timeout(timeout): - if hasattr(socket, "_GLOBAL_DEFAULT_TIMEOUT"): - return timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT - return timeout is not None - - -__all__ = [ - "debuglevel", - "FailedToDecompressContent", - "Http", - "HttpLib2Error", - "ProxyInfo", - "RedirectLimit", - "RedirectMissingLocation", - "Response", - "RETRIES", - "UnimplementedDigestAuthOptionError", - "UnimplementedHmacDigestAuthOptionError", -] - -# The httplib debug level, set to a non-zero value to get debug output -debuglevel = 0 - -# A request will be tried 'RETRIES' times if it fails at the socket/connection level. -RETRIES = 2 - - -# Open Items: -# ----------- - -# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) - -# Pluggable cache storage (supports storing the cache in -# flat files by default. We need a plug-in architecture -# that can support Berkeley DB and Squid) - -# == Known Issues == -# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. -# Does not handle Cache-Control: max-stale -# Does not use Age: headers when calculating cache freshness. - -# The number of redirections to follow before giving up. -# Note that only GET redirects are automatically followed. -# Will also honor 301 requests by saving that info and never -# requesting that URI again. -DEFAULT_MAX_REDIRECTS = 5 - -# Which headers are hop-by-hop headers by default -HOP_BY_HOP = [ - "connection", - "keep-alive", - "proxy-authenticate", - "proxy-authorization", - "te", - "trailers", - "transfer-encoding", - "upgrade", -] - -# https://tools.ietf.org/html/rfc7231#section-8.1.3 -SAFE_METHODS = ("GET", "HEAD", "OPTIONS", "TRACE") - -# To change, assign to `Http().redirect_codes` -REDIRECT_CODES = frozenset((300, 301, 302, 303, 307, 308)) - - -from . import certs - -CA_CERTS = certs.where() - -# PROTOCOL_TLS is python 3.5.3+. PROTOCOL_SSLv23 is deprecated. -# Both PROTOCOL_TLS and PROTOCOL_SSLv23 are equivalent and means: -# > Selects the highest protocol version that both the client and server support. -# > Despite the name, this option can select “TLS” protocols as well as “SSL”. -# source: https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_SSLv23 - -# PROTOCOL_TLS_CLIENT is python 3.10.0+. PROTOCOL_TLS is deprecated. -# > Auto-negotiate the highest protocol version that both the client and server support, and configure the context client-side connections. -# > The protocol enables CERT_REQUIRED and check_hostname by default. -# source: https://docs.python.org/3.10/library/ssl.html#ssl.PROTOCOL_TLS - -DEFAULT_TLS_VERSION = getattr(ssl, "PROTOCOL_TLS_CLIENT", None) or getattr(ssl, "PROTOCOL_TLS", None) or getattr(ssl, "PROTOCOL_SSLv23") - - -def _build_ssl_context( - disable_ssl_certificate_validation, - ca_certs, - cert_file=None, - key_file=None, - maximum_version=None, - minimum_version=None, - key_password=None, -): - if not hasattr(ssl, "SSLContext"): - raise RuntimeError("httplib2 requires Python 3.2+ for ssl.SSLContext") - - context = ssl.SSLContext(DEFAULT_TLS_VERSION) - # check_hostname and verify_mode should be set in opposite order during disable - # https://bugs.python.org/issue31431 - if disable_ssl_certificate_validation and hasattr(context, "check_hostname"): - context.check_hostname = not disable_ssl_certificate_validation - context.verify_mode = ssl.CERT_NONE if disable_ssl_certificate_validation else ssl.CERT_REQUIRED - - # SSLContext.maximum_version and SSLContext.minimum_version are python 3.7+. - # source: https://docs.python.org/3/library/ssl.html#ssl.SSLContext.maximum_version - if maximum_version is not None: - if hasattr(context, "maximum_version"): - if isinstance(maximum_version, str): - maximum_version = getattr(ssl.TLSVersion, maximum_version) - context.maximum_version = maximum_version - else: - raise RuntimeError("setting tls_maximum_version requires Python 3.7 and OpenSSL 1.1 or newer") - if minimum_version is not None: - if hasattr(context, "minimum_version"): - if isinstance(minimum_version, str): - minimum_version = getattr(ssl.TLSVersion, minimum_version) - context.minimum_version = minimum_version - else: - raise RuntimeError("setting tls_minimum_version requires Python 3.7 and OpenSSL 1.1 or newer") - # check_hostname requires python 3.4+ - # we will perform the equivalent in HTTPSConnectionWithTimeout.connect() by calling ssl.match_hostname - # if check_hostname is not supported. - if hasattr(context, "check_hostname"): - context.check_hostname = not disable_ssl_certificate_validation - - context.load_verify_locations(ca_certs) - - if cert_file: - context.load_cert_chain(cert_file, key_file, key_password) - - return context - - -def _get_end2end_headers(response): - hopbyhop = list(HOP_BY_HOP) - hopbyhop.extend([x.strip() for x in response.get("connection", "").split(",")]) - return [header for header in list(response.keys()) if header not in hopbyhop] - - -_missing = object() - - -def _errno_from_exception(e): - # TODO python 3.11+ cheap try: return e.errno except AttributeError: pass - errno = getattr(e, "errno", _missing) - if errno is not _missing: - return errno - - # socket.error and common wrap in .args - args = getattr(e, "args", None) - if args: - return _errno_from_exception(args[0]) - - # pysocks.ProxyError wraps in .socket_err - # https://github.com/httplib2/httplib2/pull/202 - socket_err = getattr(e, "socket_err", None) - if socket_err: - return _errno_from_exception(socket_err) - - return None - - -URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") - - -def parse_uri(uri): - """Parses a URI using the regex given in Appendix B of RFC 3986. - - (scheme, authority, path, query, fragment) = parse_uri(uri) - """ - groups = URI.match(uri).groups() - return (groups[1], groups[3], groups[4], groups[6], groups[8]) - - -def urlnorm(uri): - (scheme, authority, path, query, fragment) = parse_uri(uri) - if not scheme or not authority: - raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) - authority = authority.lower() - scheme = scheme.lower() - if not path: - path = "/" - # Could do syntax based normalization of the URI before - # computing the digest. See Section 6.2.2 of Std 66. - request_uri = query and "?".join([path, query]) or path - scheme = scheme.lower() - defrag_uri = scheme + "://" + authority + request_uri - return scheme, authority, request_uri, defrag_uri - - -# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) -re_url_scheme = re.compile(r"^\w+://") -re_unsafe = re.compile(r"[^\w\-_.()=!]+", re.ASCII) - - -def safename(filename): - """Return a filename suitable for the cache. - Strips dangerous and common characters to create a filename we - can use to store the cache in. - """ - if isinstance(filename, bytes): - filename_bytes = filename - filename = filename.decode("utf-8") - else: - filename_bytes = filename.encode("utf-8") - filemd5 = _md5(filename_bytes).hexdigest() - filename = re_url_scheme.sub("", filename) - filename = re_unsafe.sub("", filename) - - # limit length of filename (vital for Windows) - # https://github.com/httplib2/httplib2/pull/74 - # C:\Users\ \AppData\Local\Temp\ , - # 9 chars + max 104 chars + 20 chars + x + 1 + 32 = max 259 chars - # Thus max safe filename x = 93 chars. Let it be 90 to make a round sum: - filename = filename[:90] - - return ",".join((filename, filemd5)) - - -NORMALIZE_SPACE = re.compile(r"(?:\r\n)?[ \t]+") - - -def _normalize_headers(headers): - return dict( - [ - (_convert_byte_str(key).lower(), NORMALIZE_SPACE.sub(_convert_byte_str(value), " ").strip(),) - for (key, value) in headers.items() - ] - ) - - -def _convert_byte_str(s): - if not isinstance(s, str): - return str(s, "utf-8") - return s - - -def _parse_cache_control(headers): - retval = {} - if "cache-control" in headers: - parts = headers["cache-control"].split(",") - parts_with_args = [ - tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=") - ] - parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] - retval = dict(parts_with_args + parts_wo_args) - return retval - - -# Whether to use a strict mode to parse WWW-Authenticate headers -# Might lead to bad results in case of ill-formed header value, -# so disabled by default, falling back to relaxed parsing. -# Set to true to turn on, useful for testing servers. -USE_WWW_AUTH_STRICT_PARSING = 0 - - -def _entry_disposition(response_headers, request_headers): - """Determine freshness from the Date, Expires and Cache-Control headers. - - We don't handle the following: - - 1. Cache-Control: max-stale - 2. Age: headers are not used in the calculations. - - Not that this algorithm is simpler than you might think - because we are operating as a private (non-shared) cache. - This lets us ignore 's-maxage'. We can also ignore - 'proxy-invalidate' since we aren't a proxy. - We will never return a stale document as - fresh as a design decision, and thus the non-implementation - of 'max-stale'. This also lets us safely ignore 'must-revalidate' - since we operate as if every server has sent 'must-revalidate'. - Since we are private we get to ignore both 'public' and - 'private' parameters. We also ignore 'no-transform' since - we don't do any transformations. - The 'no-store' parameter is handled at a higher level. - So the only Cache-Control parameters we look at are: - - no-cache - only-if-cached - max-age - min-fresh - """ - - retval = "STALE" - cc = _parse_cache_control(request_headers) - cc_response = _parse_cache_control(response_headers) - - if "pragma" in request_headers and request_headers["pragma"].lower().find("no-cache") != -1: - retval = "TRANSPARENT" - if "cache-control" not in request_headers: - request_headers["cache-control"] = "no-cache" - elif "no-cache" in cc: - retval = "TRANSPARENT" - elif "no-cache" in cc_response: - retval = "STALE" - elif "only-if-cached" in cc: - retval = "FRESH" - elif "date" in response_headers: - date = calendar.timegm(email.utils.parsedate_tz(response_headers["date"])) - now = time.time() - current_age = max(0, now - date) - if "max-age" in cc_response: - try: - freshness_lifetime = int(cc_response["max-age"]) - except ValueError: - freshness_lifetime = 0 - elif "expires" in response_headers: - expires = email.utils.parsedate_tz(response_headers["expires"]) - if None == expires: - freshness_lifetime = 0 - else: - freshness_lifetime = max(0, calendar.timegm(expires) - date) - else: - freshness_lifetime = 0 - if "max-age" in cc: - try: - freshness_lifetime = int(cc["max-age"]) - except ValueError: - freshness_lifetime = 0 - if "min-fresh" in cc: - try: - min_fresh = int(cc["min-fresh"]) - except ValueError: - min_fresh = 0 - current_age += min_fresh - if freshness_lifetime > current_age: - retval = "FRESH" - return retval - - -def _decompressContent(response, new_content): - content = new_content - try: - encoding = response.get("content-encoding", None) - if encoding in ["gzip", "deflate"]: - if encoding == "gzip": - content = gzip.GzipFile(fileobj=io.BytesIO(new_content)).read() - if encoding == "deflate": - try: - content = zlib.decompress(content, zlib.MAX_WBITS) - except (IOError, zlib.error): - content = zlib.decompress(content, -zlib.MAX_WBITS) - response["content-length"] = str(len(content)) - # Record the historical presence of the encoding in a way the won't interfere. - response["-content-encoding"] = response["content-encoding"] - del response["content-encoding"] - except (IOError, zlib.error): - content = "" - raise FailedToDecompressContent( - _("Content purported to be compressed with %s but failed to decompress.") % response.get("content-encoding"), - response, - content, - ) - return content - - -def _bind_write_headers(msg): - def _write_headers(self): - # Self refers to the Generator object. - for h, v in msg.items(): - print("%s:" % h, end=" ", file=self._fp) - if isinstance(v, header.Header): - print(v.encode(maxlinelen=self._maxheaderlen), file=self._fp) - else: - # email.Header got lots of smarts, so use it. - headers = header.Header(v, maxlinelen=self._maxheaderlen, charset="utf-8", header_name=h) - print(headers.encode(), file=self._fp) - # A blank line always separates headers from body. - print(file=self._fp) - - return _write_headers - - -def _updateCache(request_headers, response_headers, content, cache, cachekey): - if cachekey: - cc = _parse_cache_control(request_headers) - cc_response = _parse_cache_control(response_headers) - if "no-store" in cc or "no-store" in cc_response: - cache.delete(cachekey) - else: - info = email.message.Message() - for key, value in response_headers.items(): - if key not in ["status", "content-encoding", "transfer-encoding"]: - info[key] = value - - # Add annotations to the cache to indicate what headers - # are variant for this request. - vary = response_headers.get("vary", None) - if vary: - vary_headers = vary.lower().replace(" ", "").split(",") - for header in vary_headers: - key = "-varied-%s" % header - try: - info[key] = request_headers[header] - except KeyError: - pass - - status = response_headers.status - if status == 304: - status = 200 - - status_header = "status: %d\r\n" % status - - try: - header_str = info.as_string() - except UnicodeEncodeError: - setattr(info, "_write_headers", _bind_write_headers(info)) - header_str = info.as_string() - - header_str = re.sub("\r(?!\n)|(? 0: - service = "cl" - # No point in guessing Base or Spreadsheet - # elif request_uri.find("spreadsheets") > 0: - # service = "wise" - - auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers["user-agent"],) - resp, content = self.http.request( - "https://www.google.com/accounts/ClientLogin", - method="POST", - body=urlencode(auth), - headers={"Content-Type": "application/x-www-form-urlencoded"}, - ) - lines = content.split("\n") - d = dict([tuple(line.split("=", 1)) for line in lines if line]) - if resp.status == 403: - self.Auth = "" - else: - self.Auth = d["Auth"] - - def request(self, method, request_uri, headers, content): - """Modify the request headers to add the appropriate - Authorization header.""" - headers["authorization"] = "GoogleLogin Auth=" + self.Auth - - -AUTH_SCHEME_CLASSES = { - "basic": BasicAuthentication, - "wsse": WsseAuthentication, - "digest": DigestAuthentication, - "hmacdigest": HmacDigestAuthentication, - "googlelogin": GoogleLoginAuthentication, -} - -AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] - - -class FileCache(object): - """Uses a local directory as a store for cached files. - Not really safe to use if multiple threads or processes are going to - be running on the same cache. - """ - - def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior - self.cache = cache - self.safe = safe - if not os.path.exists(cache): - os.makedirs(self.cache) - - def get(self, key): - retval = None - cacheFullPath = os.path.join(self.cache, self.safe(key)) - try: - f = open(cacheFullPath, "rb") - retval = f.read() - f.close() - except IOError: - pass - return retval - - def set(self, key, value): - cacheFullPath = os.path.join(self.cache, self.safe(key)) - f = open(cacheFullPath, "wb") - f.write(value) - f.close() - - def delete(self, key): - cacheFullPath = os.path.join(self.cache, self.safe(key)) - if os.path.exists(cacheFullPath): - os.remove(cacheFullPath) - - -class Credentials(object): - def __init__(self): - self.credentials = [] - - def add(self, name, password, domain=""): - self.credentials.append((domain.lower(), name, password)) - - def clear(self): - self.credentials = [] - - def iter(self, domain): - for (cdomain, name, password) in self.credentials: - if cdomain == "" or domain == cdomain: - yield (name, password) - - -class KeyCerts(Credentials): - """Identical to Credentials except that - name/password are mapped to key/cert.""" - - def add(self, key, cert, domain, password): - self.credentials.append((domain.lower(), key, cert, password)) - - def iter(self, domain): - for (cdomain, key, cert, password) in self.credentials: - if cdomain == "" or domain == cdomain: - yield (key, cert, password) - - -class AllHosts(object): - pass - - -class ProxyInfo(object): - """Collect information required to use a proxy.""" - - bypass_hosts = () - - def __init__( - self, proxy_type, proxy_host, proxy_port, proxy_rdns=True, proxy_user=None, proxy_pass=None, proxy_headers=None, - ): - """Args: - - proxy_type: The type of proxy server. This must be set to one of - socks.PROXY_TYPE_XXX constants. For example: p = - ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', - proxy_port=8000) - proxy_host: The hostname or IP address of the proxy server. - proxy_port: The port that the proxy server is running on. - proxy_rdns: If True (default), DNS queries will not be performed - locally, and instead, handed to the proxy to resolve. This is useful - if the network does not allow resolution of non-local names. In - httplib2 0.9 and earlier, this defaulted to False. - proxy_user: The username used to authenticate with the proxy server. - proxy_pass: The password used to authenticate with the proxy server. - proxy_headers: Additional or modified headers for the proxy connect - request. - """ - if isinstance(proxy_user, bytes): - proxy_user = proxy_user.decode() - if isinstance(proxy_pass, bytes): - proxy_pass = proxy_pass.decode() - ( - self.proxy_type, - self.proxy_host, - self.proxy_port, - self.proxy_rdns, - self.proxy_user, - self.proxy_pass, - self.proxy_headers, - ) = ( - proxy_type, - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, - proxy_headers, - ) - - def astuple(self): - return ( - self.proxy_type, - self.proxy_host, - self.proxy_port, - self.proxy_rdns, - self.proxy_user, - self.proxy_pass, - self.proxy_headers, - ) - - def isgood(self): - return socks and (self.proxy_host != None) and (self.proxy_port != None) - - def applies_to(self, hostname): - return not self.bypass_host(hostname) - - def bypass_host(self, hostname): - """Has this host been excluded from the proxy config""" - if self.bypass_hosts is AllHosts: - return True - - hostname = "." + hostname.lstrip(".") - for skip_name in self.bypass_hosts: - # *.suffix - if skip_name.startswith(".") and hostname.endswith(skip_name): - return True - # exact match - if hostname == "." + skip_name: - return True - return False - - def __repr__(self): - return ( - "" - ).format(p=self) - - -def proxy_info_from_environment(method="http"): - """Read proxy info from the environment variables. - """ - if method not in ("http", "https"): - return - - env_var = method + "_proxy" - url = os.environ.get(env_var, os.environ.get(env_var.upper())) - if not url: - return - return proxy_info_from_url(url, method, noproxy=None) - - -def proxy_info_from_url(url, method="http", noproxy=None): - """Construct a ProxyInfo from a URL (such as http_proxy env var) - """ - url = urllib.parse.urlparse(url) - - proxy_type = 3 # socks.PROXY_TYPE_HTTP - pi = ProxyInfo( - proxy_type=proxy_type, - proxy_host=url.hostname, - proxy_port=url.port or dict(https=443, http=80)[method], - proxy_user=url.username or None, - proxy_pass=url.password or None, - proxy_headers=None, - ) - - bypass_hosts = [] - # If not given an explicit noproxy value, respect values in env vars. - if noproxy is None: - noproxy = os.environ.get("no_proxy", os.environ.get("NO_PROXY", "")) - # Special case: A single '*' character means all hosts should be bypassed. - if noproxy == "*": - bypass_hosts = AllHosts - elif noproxy.strip(): - bypass_hosts = noproxy.split(",") - bypass_hosts = tuple(filter(bool, bypass_hosts)) # To exclude empty string. - - pi.bypass_hosts = bypass_hosts - return pi - - -class HTTPConnectionWithTimeout(http.client.HTTPConnection): - """HTTPConnection subclass that supports timeouts - - HTTPConnection subclass that supports timeouts - - All timeouts are in seconds. If None is passed for timeout then - Python's default timeout for sockets will be used. See for example - the docs of socket.setdefaulttimeout(): - http://docs.python.org/library/socket.html#socket.setdefaulttimeout - """ - - def __init__(self, host, port=None, timeout=None, proxy_info=None): - http.client.HTTPConnection.__init__(self, host, port=port, timeout=timeout) - - self.proxy_info = proxy_info - if proxy_info and not isinstance(proxy_info, ProxyInfo): - self.proxy_info = proxy_info("http") - - def connect(self): - """Connect to the host and port specified in __init__.""" - if self.proxy_info and socks is None: - raise ProxiesUnavailableError("Proxy support missing but proxy use was requested!") - if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host): - use_proxy = True - ( - proxy_type, - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, - proxy_headers, - ) = self.proxy_info.astuple() - - host = proxy_host - port = proxy_port - else: - use_proxy = False - - host = self.host - port = self.port - proxy_type = None - - socket_err = None - - for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): - af, socktype, proto, canonname, sa = res - try: - if use_proxy: - self.sock = socks.socksocket(af, socktype, proto) - self.sock.setproxy( - proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, - ) - else: - self.sock = socket.socket(af, socktype, proto) - self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - if has_timeout(self.timeout): - self.sock.settimeout(self.timeout) - if self.debuglevel > 0: - print("connect: ({0}, {1}) ************".format(self.host, self.port)) - if use_proxy: - print( - "proxy: {0} ************".format( - str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) - ) - ) - - self.sock.connect((self.host, self.port) + sa[2:]) - except socket.error as e: - socket_err = e - if self.debuglevel > 0: - print("connect fail: ({0}, {1})".format(self.host, self.port)) - if use_proxy: - print( - "proxy: {0}".format( - str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) - ) - ) - if self.sock: - self.sock.close() - self.sock = None - continue - break - if not self.sock: - raise socket_err - - -class HTTPSConnectionWithTimeout(http.client.HTTPSConnection): - """This class allows communication via SSL. - - All timeouts are in seconds. If None is passed for timeout then - Python's default timeout for sockets will be used. See for example - the docs of socket.setdefaulttimeout(): - http://docs.python.org/library/socket.html#socket.setdefaulttimeout - """ - - def __init__( - self, - host, - port=None, - key_file=None, - cert_file=None, - timeout=None, - proxy_info=None, - ca_certs=None, - disable_ssl_certificate_validation=False, - tls_maximum_version=None, - tls_minimum_version=None, - key_password=None, - ): - - self.disable_ssl_certificate_validation = disable_ssl_certificate_validation - self.ca_certs = ca_certs if ca_certs else CA_CERTS - - self.proxy_info = proxy_info - if proxy_info and not isinstance(proxy_info, ProxyInfo): - self.proxy_info = proxy_info("https") - - context = _build_ssl_context( - self.disable_ssl_certificate_validation, - self.ca_certs, - cert_file, - key_file, - maximum_version=tls_maximum_version, - minimum_version=tls_minimum_version, - key_password=key_password, - ) - super(HTTPSConnectionWithTimeout, self).__init__( - host, port=port, timeout=timeout, context=context, - ) - self.key_file = key_file - self.cert_file = cert_file - self.key_password = key_password - - def connect(self): - """Connect to a host on a given (SSL) port.""" - if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host): - use_proxy = True - ( - proxy_type, - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, - proxy_headers, - ) = self.proxy_info.astuple() - - host = proxy_host - port = proxy_port - else: - use_proxy = False - - host = self.host - port = self.port - proxy_type = None - proxy_headers = None - - socket_err = None - - address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM) - for family, socktype, proto, canonname, sockaddr in address_info: - try: - if use_proxy: - sock = socks.socksocket(family, socktype, proto) - - sock.setproxy( - proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, - ) - else: - sock = socket.socket(family, socktype, proto) - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - if has_timeout(self.timeout): - sock.settimeout(self.timeout) - sock.connect((self.host, self.port)) - - self.sock = self._context.wrap_socket(sock, server_hostname=self.host) - - # Python 3.3 compatibility: emulate the check_hostname behavior - if not hasattr(self._context, "check_hostname") and not self.disable_ssl_certificate_validation: - try: - ssl.match_hostname(self.sock.getpeercert(), self.host) - except Exception: - self.sock.shutdown(socket.SHUT_RDWR) - self.sock.close() - raise - - if self.debuglevel > 0: - print("connect: ({0}, {1})".format(self.host, self.port)) - if use_proxy: - print( - "proxy: {0}".format( - str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) - ) - ) - except (ssl.SSLError, ssl.CertificateError) as e: - if sock: - sock.close() - if self.sock: - self.sock.close() - self.sock = None - raise - except (socket.timeout, socket.gaierror): - raise - except socket.error as e: - socket_err = e - if self.debuglevel > 0: - print("connect fail: ({0}, {1})".format(self.host, self.port)) - if use_proxy: - print( - "proxy: {0}".format( - str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) - ) - ) - if self.sock: - self.sock.close() - self.sock = None - continue - break - if not self.sock: - raise socket_err - - -SCHEME_TO_CONNECTION = { - "http": HTTPConnectionWithTimeout, - "https": HTTPSConnectionWithTimeout, -} - - -class Http(object): - """An HTTP client that handles: - - - all methods - - caching - - ETags - - compression, - - HTTPS - - Basic - - Digest - - WSSE - - and more. - """ - - def __init__( - self, - cache=None, - timeout=None, - proxy_info=proxy_info_from_environment, - ca_certs=None, - disable_ssl_certificate_validation=False, - tls_maximum_version=None, - tls_minimum_version=None, - ): - """If 'cache' is a string then it is used as a directory name for - a disk cache. Otherwise it must be an object that supports the - same interface as FileCache. - - All timeouts are in seconds. If None is passed for timeout - then Python's default timeout for sockets will be used. See - for example the docs of socket.setdefaulttimeout(): - http://docs.python.org/library/socket.html#socket.setdefaulttimeout - - `proxy_info` may be: - - a callable that takes the http scheme ('http' or 'https') and - returns a ProxyInfo instance per request. By default, uses - proxy_info_from_environment. - - a ProxyInfo instance (static proxy config). - - None (proxy disabled). - - ca_certs is the path of a file containing root CA certificates for SSL - server certificate validation. By default, a CA cert file bundled with - httplib2 is used. - - If disable_ssl_certificate_validation is true, SSL cert validation will - not be performed. - - tls_maximum_version / tls_minimum_version require Python 3.7+ / - OpenSSL 1.1.0g+. A value of "TLSv1_3" requires OpenSSL 1.1.1+. - """ - self.proxy_info = proxy_info - self.ca_certs = ca_certs - self.disable_ssl_certificate_validation = disable_ssl_certificate_validation - self.tls_maximum_version = tls_maximum_version - self.tls_minimum_version = tls_minimum_version - # Map domain name to an httplib connection - self.connections = {} - # The location of the cache, for now a directory - # where cached responses are held. - if cache and isinstance(cache, str): - self.cache = FileCache(cache) - else: - self.cache = cache - - # Name/password - self.credentials = Credentials() - - # Key/cert - self.certificates = KeyCerts() - - # authorization objects - self.authorizations = [] - - # If set to False then no redirects are followed, even safe ones. - self.follow_redirects = True - - self.redirect_codes = REDIRECT_CODES - - # Which HTTP methods do we apply optimistic concurrency to, i.e. - # which methods get an "if-match:" etag header added to them. - self.optimistic_concurrency_methods = ["PUT", "PATCH"] - - self.safe_methods = list(SAFE_METHODS) - - # If 'follow_redirects' is True, and this is set to True then - # all redirecs are followed, including unsafe ones. - self.follow_all_redirects = False - - self.ignore_etag = False - - self.force_exception_to_status_code = False - - self.timeout = timeout - - # Keep Authorization: headers on a redirect. - self.forward_authorization_headers = False - - def close(self): - """Close persistent connections, clear sensitive data. - Not thread-safe, requires external synchronization against concurrent requests. - """ - existing, self.connections = self.connections, {} - for _, c in existing.items(): - c.close() - self.certificates.clear() - self.clear_credentials() - - def __getstate__(self): - state_dict = copy.copy(self.__dict__) - # In case request is augmented by some foreign object such as - # credentials which handle auth - if "request" in state_dict: - del state_dict["request"] - if "connections" in state_dict: - del state_dict["connections"] - return state_dict - - def __setstate__(self, state): - self.__dict__.update(state) - self.connections = {} - - def _auth_from_challenge(self, host, request_uri, headers, response, content): - """A generator that creates Authorization objects - that can be applied to requests. - """ - challenges = auth._parse_www_authenticate(response, "www-authenticate") - for cred in self.credentials.iter(host): - for scheme in AUTH_SCHEME_ORDER: - if scheme in challenges: - yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) - - def add_credentials(self, name, password, domain=""): - """Add a name and password that will be used - any time a request requires authentication.""" - self.credentials.add(name, password, domain) - - def add_certificate(self, key, cert, domain, password=None): - """Add a key and cert that will be used - any time a request requires authentication.""" - self.certificates.add(key, cert, domain, password) - - def clear_credentials(self): - """Remove all the names and passwords - that are used for authentication""" - self.credentials.clear() - self.authorizations = [] - - def _conn_request(self, conn, request_uri, method, body, headers): - i = 0 - seen_bad_status_line = False - while i < RETRIES: - i += 1 - try: - if conn.sock is None: - conn.connect() - conn.request(method, request_uri, body, headers) - except socket.timeout: - conn.close() - raise - except socket.gaierror: - conn.close() - raise ServerNotFoundError("Unable to find the server at %s" % conn.host) - except socket.error as e: - errno_ = _errno_from_exception(e) - if errno_ in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES: - continue # retry on potentially transient errors - raise - except http.client.HTTPException: - if conn.sock is None: - if i < RETRIES - 1: - conn.close() - conn.connect() - continue - else: - conn.close() - raise - if i < RETRIES - 1: - conn.close() - conn.connect() - continue - # Just because the server closed the connection doesn't apparently mean - # that the server didn't send a response. - pass - try: - response = conn.getresponse() - except (http.client.BadStatusLine, http.client.ResponseNotReady): - # If we get a BadStatusLine on the first try then that means - # the connection just went stale, so retry regardless of the - # number of RETRIES set. - if not seen_bad_status_line and i == 1: - i = 0 - seen_bad_status_line = True - conn.close() - conn.connect() - continue - else: - conn.close() - raise - except socket.timeout: - raise - except (socket.error, http.client.HTTPException): - conn.close() - if i == 0: - conn.close() - conn.connect() - continue - else: - raise - else: - content = b"" - if method == "HEAD": - conn.close() - else: - content = response.read() - response = Response(response) - if method != "HEAD": - content = _decompressContent(response, content) - - break - return (response, content) - - def _request( - self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey, - ): - """Do the actual request using the connection object - and also follow one level of redirects if necessary""" - - auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] - auth = auths and sorted(auths)[0][1] or None - if auth: - auth.request(method, request_uri, headers, body) - - (response, content) = self._conn_request(conn, request_uri, method, body, headers) - - if auth: - if auth.response(response, body): - auth.request(method, request_uri, headers, body) - (response, content) = self._conn_request(conn, request_uri, method, body, headers) - response._stale_digest = 1 - - if response.status == 401: - for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): - authorization.request(method, request_uri, headers, body) - (response, content) = self._conn_request(conn, request_uri, method, body, headers) - if response.status != 401: - self.authorizations.append(authorization) - authorization.response(response, body) - break - - if self.follow_all_redirects or method in self.safe_methods or response.status in (303, 308): - if self.follow_redirects and response.status in self.redirect_codes: - # Pick out the location header and basically start from the beginning - # remembering first to strip the ETag header and decrement our 'depth' - if redirections: - if "location" not in response and response.status != 300: - raise RedirectMissingLocation( - _("Redirected but the response is missing a Location: header."), response, content, - ) - # Fix-up relative redirects (which violate an RFC 2616 MUST) - if "location" in response: - location = response["location"] - (scheme, authority, path, query, fragment) = parse_uri(location) - if authority == None: - response["location"] = urllib.parse.urljoin(absolute_uri, location) - if response.status == 308 or (response.status == 301 and (method in self.safe_methods)): - response["-x-permanent-redirect-url"] = response["location"] - if "content-location" not in response: - response["content-location"] = absolute_uri - _updateCache(headers, response, content, self.cache, cachekey) - if "if-none-match" in headers: - del headers["if-none-match"] - if "if-modified-since" in headers: - del headers["if-modified-since"] - if "authorization" in headers and not self.forward_authorization_headers: - del headers["authorization"] - if "location" in response: - location = response["location"] - old_response = copy.deepcopy(response) - if "content-location" not in old_response: - old_response["content-location"] = absolute_uri - redirect_method = method - if response.status in [302, 303]: - redirect_method = "GET" - body = None - (response, content) = self.request( - location, method=redirect_method, body=body, headers=headers, redirections=redirections - 1, - ) - response.previous = old_response - else: - raise RedirectLimit( - "Redirected more times than redirection_limit allows.", response, content, - ) - elif response.status in [200, 203] and method in self.safe_methods: - # Don't cache 206's since we aren't going to handle byte range requests - if "content-location" not in response: - response["content-location"] = absolute_uri - _updateCache(headers, response, content, self.cache, cachekey) - - return (response, content) - - def _normalize_headers(self, headers): - return _normalize_headers(headers) - - # Need to catch and rebrand some exceptions - # Then need to optionally turn all exceptions into status codes - # including all socket.* and httplib.* exceptions. - - def request( - self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None, - ): - """ Performs a single HTTP request. -The 'uri' is the URI of the HTTP resource and can begin -with either 'http' or 'https'. The value of 'uri' must be an absolute URI. - -The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. -There is no restriction on the methods allowed. - -The 'body' is the entity body to be sent with the request. It is a string -object. - -Any extra headers that are to be sent with the request should be provided in the -'headers' dictionary. - -The maximum number of redirect to follow before raising an -exception is 'redirections. The default is 5. - -The return value is a tuple of (response, content), the first -being and instance of the 'Response' class, the second being -a string that contains the response entity body. - """ - conn_key = "" - - try: - if headers is None: - headers = {} - else: - headers = self._normalize_headers(headers) - - if "user-agent" not in headers: - headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__ - - uri = iri2uri(uri) - # Prevent CWE-75 space injection to manipulate request via part of uri. - # Prevent CWE-93 CRLF injection to modify headers via part of uri. - uri = uri.replace(" ", "%20").replace("\r", "%0D").replace("\n", "%0A") - - (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) - - conn_key = scheme + ":" + authority - conn = self.connections.get(conn_key) - if conn is None: - if not connection_type: - connection_type = SCHEME_TO_CONNECTION[scheme] - certs = list(self.certificates.iter(authority)) - if issubclass(connection_type, HTTPSConnectionWithTimeout): - if certs: - conn = self.connections[conn_key] = connection_type( - authority, - key_file=certs[0][0], - cert_file=certs[0][1], - timeout=self.timeout, - proxy_info=self.proxy_info, - ca_certs=self.ca_certs, - disable_ssl_certificate_validation=self.disable_ssl_certificate_validation, - tls_maximum_version=self.tls_maximum_version, - tls_minimum_version=self.tls_minimum_version, - key_password=certs[0][2], - ) - else: - conn = self.connections[conn_key] = connection_type( - authority, - timeout=self.timeout, - proxy_info=self.proxy_info, - ca_certs=self.ca_certs, - disable_ssl_certificate_validation=self.disable_ssl_certificate_validation, - tls_maximum_version=self.tls_maximum_version, - tls_minimum_version=self.tls_minimum_version, - ) - else: - conn = self.connections[conn_key] = connection_type( - authority, timeout=self.timeout, proxy_info=self.proxy_info - ) - conn.set_debuglevel(debuglevel) - - if "range" not in headers and "accept-encoding" not in headers: - headers["accept-encoding"] = "gzip, deflate" - - info = email.message.Message() - cachekey = None - cached_value = None - if self.cache: - cachekey = defrag_uri - cached_value = self.cache.get(cachekey) - if cached_value: - try: - info, content = cached_value.split(b"\r\n\r\n", 1) - info = email.message_from_bytes(info) - for k, v in info.items(): - if v.startswith("=?") and v.endswith("?="): - info.replace_header(k, str(*email.header.decode_header(v)[0])) - except (IndexError, ValueError): - self.cache.delete(cachekey) - cachekey = None - cached_value = None - - if ( - method in self.optimistic_concurrency_methods - and self.cache - and "etag" in info - and not self.ignore_etag - and "if-match" not in headers - ): - # http://www.w3.org/1999/04/Editing/ - headers["if-match"] = info["etag"] - - # https://tools.ietf.org/html/rfc7234 - # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location - # when a non-error status code is received in response to an unsafe request method. - if self.cache and cachekey and method not in self.safe_methods: - self.cache.delete(cachekey) - - # Check the vary header in the cache to see if this request - # matches what varies in the cache. - if method in self.safe_methods and "vary" in info: - vary = info["vary"] - vary_headers = vary.lower().replace(" ", "").split(",") - for header in vary_headers: - key = "-varied-%s" % header - value = info[key] - if headers.get(header, None) != value: - cached_value = None - break - - if ( - self.cache - and cached_value - and (method in self.safe_methods or info["status"] == "308") - and "range" not in headers - ): - redirect_method = method - if info["status"] not in ("307", "308"): - redirect_method = "GET" - if "-x-permanent-redirect-url" in info: - # Should cached permanent redirects be counted in our redirection count? For now, yes. - if redirections <= 0: - raise RedirectLimit( - "Redirected more times than redirection_limit allows.", {}, "", - ) - (response, new_content) = self.request( - info["-x-permanent-redirect-url"], - method=redirect_method, - headers=headers, - redirections=redirections - 1, - ) - response.previous = Response(info) - response.previous.fromcache = True - else: - # Determine our course of action: - # Is the cached entry fresh or stale? - # Has the client requested a non-cached response? - # - # There seems to be three possible answers: - # 1. [FRESH] Return the cache entry w/o doing a GET - # 2. [STALE] Do the GET (but add in cache validators if available) - # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request - entry_disposition = _entry_disposition(info, headers) - - if entry_disposition == "FRESH": - response = Response(info) - response.fromcache = True - return (response, content) - - if entry_disposition == "STALE": - if "etag" in info and not self.ignore_etag and not "if-none-match" in headers: - headers["if-none-match"] = info["etag"] - if "last-modified" in info and not "last-modified" in headers: - headers["if-modified-since"] = info["last-modified"] - elif entry_disposition == "TRANSPARENT": - pass - - (response, new_content) = self._request( - conn, authority, uri, request_uri, method, body, headers, redirections, cachekey, - ) - - if response.status == 304 and method == "GET": - # Rewrite the cache entry with the new end-to-end headers - # Take all headers that are in response - # and overwrite their values in info. - # unless they are hop-by-hop, or are listed in the connection header. - - for key in _get_end2end_headers(response): - info[key] = response[key] - merged_response = Response(info) - if hasattr(response, "_stale_digest"): - merged_response._stale_digest = response._stale_digest - _updateCache(headers, merged_response, content, self.cache, cachekey) - response = merged_response - response.status = 200 - response.fromcache = True - - elif response.status == 200: - content = new_content - else: - self.cache.delete(cachekey) - content = new_content - else: - cc = _parse_cache_control(headers) - if "only-if-cached" in cc: - info["status"] = "504" - response = Response(info) - content = b"" - else: - (response, content) = self._request( - conn, authority, uri, request_uri, method, body, headers, redirections, cachekey, - ) - except Exception as e: - is_timeout = isinstance(e, socket.timeout) - if is_timeout: - conn = self.connections.pop(conn_key, None) - if conn: - conn.close() - - if self.force_exception_to_status_code: - if isinstance(e, HttpLib2ErrorWithResponse): - response = e.response - content = e.content - response.status = 500 - response.reason = str(e) - elif isinstance(e, socket.timeout): - content = b"Request Timeout" - response = Response({"content-type": "text/plain", "status": "408", "content-length": len(content),}) - response.reason = "Request Timeout" - else: - content = str(e).encode("utf-8") - response = Response({"content-type": "text/plain", "status": "400", "content-length": len(content),}) - response.reason = "Bad Request" - else: - raise - - return (response, content) - - -class Response(dict): - """An object more like email.message than httplib.HTTPResponse.""" - - """Is this response from our local cache""" - fromcache = False - """HTTP protocol version used by server. - - 10 for HTTP/1.0, 11 for HTTP/1.1. - """ - version = 11 - - "Status code returned by server. " - status = 200 - """Reason phrase returned by server.""" - reason = "Ok" - - previous = None - - def __init__(self, info): - # info is either an email.message or - # an httplib.HTTPResponse object. - if isinstance(info, http.client.HTTPResponse): - for key, value in info.getheaders(): - key = key.lower() - prev = self.get(key) - if prev is not None: - value = ", ".join((prev, value)) - self[key] = value - self.status = info.status - self["status"] = str(self.status) - self.reason = info.reason - self.version = info.version - elif isinstance(info, email.message.Message): - for key, value in list(info.items()): - self[key.lower()] = value - self.status = int(self["status"]) - else: - for key, value in info.items(): - self[key.lower()] = value - self.status = int(self.get("status", self.status)) - - def __getattr__(self, name): - if name == "dict": - return self - else: - raise AttributeError(name) diff --git a/shotgun_api3/lib/httplib2/python3/auth.py b/shotgun_api3/lib/httplib2/python3/auth.py deleted file mode 100644 index 53f427be1..000000000 --- a/shotgun_api3/lib/httplib2/python3/auth.py +++ /dev/null @@ -1,69 +0,0 @@ -import base64 -import re - -from ... import pyparsing as pp - -from .error import * - - -try: # pyparsing>=3.0.0 - downcaseTokens = pp.common.downcaseTokens -except AttributeError: - downcaseTokens = pp.downcaseTokens - -UNQUOTE_PAIRS = re.compile(r"\\(.)") -unquote = lambda s, l, t: UNQUOTE_PAIRS.sub(r"\1", t[0][1:-1]) - -# https://tools.ietf.org/html/rfc7235#section-1.2 -# https://tools.ietf.org/html/rfc7235#appendix-B -tchar = "!#$%&'*+-.^_`|~" + pp.nums + pp.alphas -token = pp.Word(tchar).setName("token") -token68 = pp.Combine(pp.Word("-._~+/" + pp.nums + pp.alphas) + pp.Optional(pp.Word("=").leaveWhitespace())).setName( - "token68" -) - -quoted_string = pp.dblQuotedString.copy().setName("quoted-string").setParseAction(unquote) -auth_param_name = token.copy().setName("auth-param-name").addParseAction(downcaseTokens) -auth_param = auth_param_name + pp.Suppress("=") + (quoted_string | token) -params = pp.Dict(pp.delimitedList(pp.Group(auth_param))) - -scheme = token("scheme") -challenge = scheme + (params("params") | token68("token")) - -authentication_info = params.copy() -www_authenticate = pp.delimitedList(pp.Group(challenge)) - - -def _parse_authentication_info(headers, headername="authentication-info"): - """https://tools.ietf.org/html/rfc7615 - """ - header = headers.get(headername, "").strip() - if not header: - return {} - try: - parsed = authentication_info.parseString(header) - except pp.ParseException as ex: - # print(ex.explain(ex)) - raise MalformedHeader(headername) - - return parsed.asDict() - - -def _parse_www_authenticate(headers, headername="www-authenticate"): - """Returns a dictionary of dictionaries, one dict per auth_scheme.""" - header = headers.get(headername, "").strip() - if not header: - return {} - try: - parsed = www_authenticate.parseString(header) - except pp.ParseException as ex: - # print(ex.explain(ex)) - raise MalformedHeader(headername) - - retval = { - challenge["scheme"].lower(): challenge["params"].asDict() - if "params" in challenge - else {"token": challenge.get("token")} - for challenge in parsed - } - return retval diff --git a/shotgun_api3/lib/httplib2/python3/cacerts.txt b/shotgun_api3/lib/httplib2/python3/cacerts.txt deleted file mode 100644 index 78a444c43..000000000 --- a/shotgun_api3/lib/httplib2/python3/cacerts.txt +++ /dev/null @@ -1,2225 +0,0 @@ -# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. -# Label: "GTE CyberTrust Global Root" -# Serial: 421 -# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db -# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 -# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 ------BEGIN CERTIFICATE----- -MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD -VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv -bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv -b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV -UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU -cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds -b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH -iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS -r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 -04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r -GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 -3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P -lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ ------END CERTIFICATE----- - -# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Server CA" -# Serial: 1 -# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d -# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c -# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 ------BEGIN CERTIFICATE----- -MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm -MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx -MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT -DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 -dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl -cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 -DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD -gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 -yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX -L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj -EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG -7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e -QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ -qdq5snUb9kLy78fyGPmJvKP/iiMucEc= ------END CERTIFICATE----- - -# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division -# Label: "Thawte Premium Server CA" -# Serial: 1 -# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a -# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a -# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 ------BEGIN CERTIFICATE----- -MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy -dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t -MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB -MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG -A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp -b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl -cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv -bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE -VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ -ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR -uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG -9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI -hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM -pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== ------END CERTIFICATE----- - -# Issuer: O=Equifax OU=Equifax Secure Certificate Authority -# Subject: O=Equifax OU=Equifax Secure Certificate Authority -# Label: "Equifax Secure CA" -# Serial: 903804111 -# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4 -# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a -# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78 ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV -UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy -dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 -MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx -dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f -BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A -cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC -AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ -MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm -aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw -ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj -IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF -MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA -A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y -7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh -1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 ------END CERTIFICATE----- - -# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network -# Label: "Verisign Class 3 Public Primary Certification Authority - G2" -# Serial: 167285380242319648451154478808036881606 -# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 -# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f -# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b ------BEGIN CERTIFICATE----- -MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh -c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy -MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp -emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X -DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw -FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg -UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo -YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 -MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB -AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 -pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 -13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID -AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk -U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i -F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY -oJ2daZH9 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority -# Label: "ValiCert Class 1 VA" -# Serial: 1 -# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb -# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e -# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy -NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y -LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ -TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y -TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 -LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW -I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw -nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority -# Label: "ValiCert Class 2 VA" -# Serial: 1 -# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 -# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 -# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy -NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY -dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 -WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS -v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v -UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu -IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC -W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd ------END CERTIFICATE----- - -# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority -# Label: "RSA Root Certificate 1" -# Serial: 1 -# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 -# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb -# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy -NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD -cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs -2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY -JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE -Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ -n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A -PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 4 Public Primary Certification Authority - G3" -# Serial: 314531972711909413743075096039378935511 -# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df -# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d -# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 -GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ -+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd -U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm -NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY -ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ -ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 -CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq -g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm -fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c -2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ -bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Secure Server CA" -# Serial: 927650371 -# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee -# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 -# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 ------BEGIN CERTIFICATE----- -MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC -VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u -ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc -KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u -ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 -MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE -ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j -b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF -bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg -U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA -A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ -I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 -wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC -AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb -oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 -BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p -dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk -MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp -b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu -dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 -MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi -E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa -MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI -hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN -95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd -2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946059622 -# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc -# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe -# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f ------BEGIN CERTIFICATE----- -MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy -MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA -vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G -CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA -WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo -oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ -h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18 -f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN -B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy -vUxFnmG6v4SBkgPR0ml8xQ== ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure Global eBusiness CA" -# Serial: 1 -# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc -# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 -# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 ------BEGIN CERTIFICATE----- -MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT -ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw -MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj -dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l -c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC -UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc -58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ -o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr -aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA -A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA -Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv -8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV ------END CERTIFICATE----- - -# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. -# Label: "Equifax Secure eBusiness CA 1" -# Serial: 4 -# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d -# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 -# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 ------BEGIN CERTIFICATE----- -MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT -ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw -MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j -LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ -KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo -RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu -WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw -Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD -AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK -eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM -zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ -WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN -/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== ------END CERTIFICATE----- - -# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 -# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 -# Label: "Equifax Secure eBusiness CA 2" -# Serial: 930140085 -# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca -# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc -# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20 ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV -UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj -dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 -NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD -VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G -vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ -BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C -AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX -MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl -IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw -NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq -y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF -MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA -A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy -0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 -E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Low-Value Services Root" -# Serial: 1 -# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc -# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d -# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 ------BEGIN CERTIFICATE----- -MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw -MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML -QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD -VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA -A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul -CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n -tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl -dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch -PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC -+Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O -BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl -MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk -ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB -IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X -7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz -43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY -eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl -pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA -WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Public Services Root" -# Serial: 1 -# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f -# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5 -# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx -MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB -ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV -BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV -6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX -GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP -dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH -1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF -62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW -BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL -MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU -cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv -b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6 -IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/ -iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao -GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh -4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm -XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY= ------END CERTIFICATE----- - -# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network -# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network -# Label: "AddTrust Qualified Certificates Root" -# Serial: 1 -# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb -# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf -# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16 ------BEGIN CERTIFICATE----- -MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 -b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1 -MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK -EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh -BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq -xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G -87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i -2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U -WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1 -0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G -A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr -pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL -ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm -aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv -hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm -hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X -dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3 -P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y -iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no -xqE= ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Global CA 2" -# Serial: 1 -# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9 -# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d -# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85 ------BEGIN CERTIFICATE----- -MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs -IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg -R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A -PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8 -Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL -TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL -5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7 -S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe -2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE -FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap -EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td -EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv -/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN -A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0 -abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF -I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz -4iIprn2DQKi6bA== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - -# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc. -# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc. -# Label: "America Online Root Certification Authority 1" -# Serial: 1 -# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e -# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a -# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3 ------BEGIN CERTIFICATE----- -MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP -bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2 -MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft -ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk -hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym -1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW -OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb -2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko -O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU -AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF -Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb -LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir -oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C -MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds -sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7 ------END CERTIFICATE----- - -# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc. -# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc. -# Label: "America Online Root Certification Authority 2" -# Serial: 1 -# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf -# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84 -# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP -bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2 -MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft -ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg -Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC -206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci -KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2 -JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9 -BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e -Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B -PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67 -Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq -Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ -o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3 -+L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj -YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj -FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn -xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2 -LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc -obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8 -CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe -IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA -DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F -AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX -Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb -AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl -Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw -RY8mkaKO/qk= ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=Secure Certificate Services O=Comodo CA Limited -# Subject: CN=Secure Certificate Services O=Comodo CA Limited -# Label: "Comodo Secure Services root" -# Serial: 1 -# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd -# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1 -# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8 ------BEGIN CERTIFICATE----- -MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp -ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow -fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV -BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM -cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S -HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996 -CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk -3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz -6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV -HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud -EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv -Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw -Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww -DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0 -5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj -Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI -gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ -aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl -izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk= ------END CERTIFICATE----- - -# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited -# Subject: CN=Trusted Certificate Services O=Comodo CA Limited -# Label: "Comodo Trusted Services root" -# Serial: 1 -# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27 -# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd -# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 -aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla -MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO -BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD -VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW -fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt -TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL -fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW -1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 -kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G -A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v -ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo -dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu -Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ -HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 -pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS -jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ -xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn -dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi ------END CERTIFICATE----- - -# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com -# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com -# Label: "UTN DATACorp SGC Root CA" -# Serial: 91374294542884689855167577680241077609 -# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06 -# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4 -# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48 ------BEGIN CERTIFICATE----- -MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB -kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw -IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG -EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD -VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu -dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 -E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ -D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK -4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq -lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW -bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB -o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT -MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js -LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr -BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB -AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft -Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj -j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH -KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv -2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 -mfnGV/TJVTl4uix5yaaIK/QI ------END CERTIFICATE----- - -# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com -# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com -# Label: "UTN USERFirst Hardware Root CA" -# Serial: 91374294542884704022267039221184531197 -# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39 -# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7 -# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37 ------BEGIN CERTIFICATE----- -MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB -lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt -SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG -A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe -MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v -d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh -cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn -0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ -M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a -MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd -oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI -DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy -oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD -VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 -dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy -bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF -BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM -//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli -CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE -CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t -3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS -KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Label: "StartCom Certification Authority" -# Serial: 1 -# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16 -# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f -# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea ------BEGIN CERTIFICATE----- -MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg -Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9 -MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi -U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh -cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk -pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf -OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C -Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT -Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi -HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM -Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w -+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ -Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 -Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B -26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID -AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE -FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j -ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js -LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM -BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0 -Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy -dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh -cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh -YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg -dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp -bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ -YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT -TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ -9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8 -jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW -FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz -ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1 -ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L -EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu -L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq -yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC -O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V -um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh -NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA -# Label: "TC TrustCenter Class 2 CA II" -# Serial: 941389028203453866782103406992443 -# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23 -# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e -# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4 ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV -BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 -Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1 -OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i -SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc -VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf -tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg -uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J -XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK -8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99 -5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3 -kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy -dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6 -Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz -JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 -Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS -GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt -ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8 -au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV -hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI -dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA -# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA -# Label: "TC TrustCenter Class 3 CA II" -# Serial: 1506523511417715638772220530020799 -# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e -# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5 -# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e ------BEGIN CERTIFICATE----- -MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV -BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 -Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1 -OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i -SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc -VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW -Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q -Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2 -1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq -ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1 -Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX -XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy -dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6 -Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz -JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 -Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u -TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN -irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8 -TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6 -g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB -95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj -S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A== ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Label: "TC TrustCenter Universal CA I" -# Serial: 601024842042189035295619584734726 -# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c -# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3 -# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV -BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1 -c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx -MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg -R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD -VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR -JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T -fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu -jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z -wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ -fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD -VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G -CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1 -7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn -8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs -ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT -ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/ -2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY ------END CERTIFICATE----- - -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G2" -# Serial: 80682863203381065782177908751794619243 -# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a -# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 -# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL -MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj -KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 -MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw -NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV -BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL -So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal -tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG -CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT -qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz -rD6ogRLQy7rQkgu2npaqBA+K ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Universal Root Certification Authority" -# Serial: 85209574734084581917763752644031726877 -# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 -# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 -# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB -vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W -ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 -IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y -IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh -bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF -9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH -H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H -LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN -/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT -rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw -WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs -exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 -sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ -seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz -4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ -BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR -lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 -7M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA -# Label: "TC TrustCenter Universal CA III" -# Serial: 2010889993983507346460533407902964 -# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b -# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87 -# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d ------BEGIN CERTIFICATE----- -MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL -MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV -BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1 -c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy -MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl -ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm -BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF -5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv -DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v -zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT -yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj -dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh -MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI -4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz -dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY -aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G -DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV -CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH -LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg== ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing -# Label: "StartCom Certification Authority" -# Serial: 45 -# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16 -# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0 -# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11 ------BEGIN CERTIFICATE----- -MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg -Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9 -MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi -U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh -cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk -pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf -OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C -Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT -Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi -HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM -Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w -+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ -Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 -Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B -26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID -AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul -F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC -ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w -ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk -aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0 -YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg -c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93 -d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG -CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1 -dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF -wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS -Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst -0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc -pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl -CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF -P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK -1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm -KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE -JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ -8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm -fyWl8kgAwKQB2j8= ------END CERTIFICATE----- - -# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd. -# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd. -# Label: "StartCom Certification Authority G2" -# Serial: 59 -# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64 -# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17 -# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95 ------BEGIN CERTIFICATE----- -MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW -MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1 -OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG -A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ -JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD -vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo -D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/ -Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW -RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK -HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN -nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM -0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i -UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9 -Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg -TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL -BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K -2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX -UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl -6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK -9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ -HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI -wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY -XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l -IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo -hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr -so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US -# Subject: CN=DigiCert Global Root G2, OU=www.digicert.com, O=DigiCert Inc, C=US -# Serial: 33af1e6a711a9a0bb2864b11d09fae5 -# MD5 Fingerprint: E4:A6:8A:C8:54:AC:52:42:46:0A:FD:72:48:1B:2A:44 -# SHA1 Fingerprint: DF:3C:24:F9:BF:D6:66:76:1B:26:80:73:FE:06:D1:CC:8D:4F:82:A4 -# SHA256 Fingerprint: CB:3C:CB:B7:60:31:E5:E0:13:8F:8D:D3:9A:23:F9:DE:47:FF:C3:5E:43:C1:14:4C:EA:27:D4:6A:5A:B1:CB:5F ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: /C=US/O=Internet Security Research Group/CN=ISRG Root X1 -# Subject: /C=US/O=Internet Security Research Group/CN=ISRG Root X1 -# Serial: 8210CFB0D240E3594463E0BB63828B00 -# SHA1 Fingerprint: CA:BD:2A:79:A1:07:6A:31:F2:1D:25:36:35:CB:03:9D:43:29:A5:E8 -# SHA256 Fingerprint: 96:BC:EC:06:26:49:76:F3:74:60:77:9A:CF:28:C5:A7:CF:E8:A3:C0:AA:E1:1A:8F:FC:EE:05:C0:BD:DF:08:C6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: /C=US/O=Internet Security Research Group/CN=ISRG Root X2 -# Subject: /C=US/O=Internet Security Research Group/CN=ISRG Root X2 -# Serial: 41D29DD172EAEEA780C12C6CE92F8752 -# SHA1 Fingerprint: BD:B1:B9:3C:D5:97:8D:45:C6:26:14:55:F8:DB:95:C7:5A:D1:53:AF -# SHA256 Fingerprint: 69:72:9B:8E:15:A8:6E:FC:17:7A:57:AF:B7:17:1D:FC:64:AD:D2:8C:2F:CA:8C:F1:50:7E:34:45:3C:CB:14:70 ------BEGIN CERTIFICATE----- -MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw -CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg -R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 -MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT -ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw -EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW -+1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 -ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI -zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW -tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 -/q4AaOeMSQ+2b1tbFfLn ------END CERTIFICATE----- diff --git a/shotgun_api3/lib/httplib2/python3/certs.py b/shotgun_api3/lib/httplib2/python3/certs.py deleted file mode 100644 index 59d1ffc70..000000000 --- a/shotgun_api3/lib/httplib2/python3/certs.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Utilities for certificate management.""" - -import os - -certifi_available = False -certifi_where = None -try: - from certifi import where as certifi_where - certifi_available = True -except ImportError: - pass - -custom_ca_locater_available = False -custom_ca_locater_where = None -try: - from ca_certs_locater import get as custom_ca_locater_where - custom_ca_locater_available = True -except ImportError: - pass - - -BUILTIN_CA_CERTS = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "cacerts.txt" -) - - -def where(): - env = os.environ.get("HTTPLIB2_CA_CERTS") - if env is not None: - if os.path.isfile(env): - return env - else: - raise RuntimeError("Environment variable HTTPLIB2_CA_CERTS not a valid file") - if custom_ca_locater_available: - return custom_ca_locater_where() - if certifi_available: - return certifi_where() - return BUILTIN_CA_CERTS - - -if __name__ == "__main__": - print(where()) diff --git a/shotgun_api3/lib/httplib2/python3/error.py b/shotgun_api3/lib/httplib2/python3/error.py deleted file mode 100644 index 0e68c12a8..000000000 --- a/shotgun_api3/lib/httplib2/python3/error.py +++ /dev/null @@ -1,48 +0,0 @@ -# All exceptions raised here derive from HttpLib2Error -class HttpLib2Error(Exception): - pass - - -# Some exceptions can be caught and optionally -# be turned back into responses. -class HttpLib2ErrorWithResponse(HttpLib2Error): - def __init__(self, desc, response, content): - self.response = response - self.content = content - HttpLib2Error.__init__(self, desc) - - -class RedirectMissingLocation(HttpLib2ErrorWithResponse): - pass - - -class RedirectLimit(HttpLib2ErrorWithResponse): - pass - - -class FailedToDecompressContent(HttpLib2ErrorWithResponse): - pass - - -class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): - pass - - -class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): - pass - - -class MalformedHeader(HttpLib2Error): - pass - - -class RelativeURIError(HttpLib2Error): - pass - - -class ServerNotFoundError(HttpLib2Error): - pass - - -class ProxiesUnavailableError(HttpLib2Error): - pass diff --git a/shotgun_api3/lib/httplib2/python3/iri2uri.py b/shotgun_api3/lib/httplib2/python3/iri2uri.py deleted file mode 100644 index 86e361e62..000000000 --- a/shotgun_api3/lib/httplib2/python3/iri2uri.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -"""Converts an IRI to a URI.""" - -__author__ = "Joe Gregorio (joe@bitworking.org)" -__copyright__ = "Copyright 2006, Joe Gregorio" -__contributors__ = [] -__version__ = "1.0.0" -__license__ = "MIT" - -import urllib.parse - -# Convert an IRI to a URI following the rules in RFC 3987 -# -# The characters we need to enocde and escape are defined in the spec: -# -# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD -# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF -# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD -# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD -# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD -# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD -# / %xD0000-DFFFD / %xE1000-EFFFD - -escape_range = [ - (0xA0, 0xD7FF), - (0xE000, 0xF8FF), - (0xF900, 0xFDCF), - (0xFDF0, 0xFFEF), - (0x10000, 0x1FFFD), - (0x20000, 0x2FFFD), - (0x30000, 0x3FFFD), - (0x40000, 0x4FFFD), - (0x50000, 0x5FFFD), - (0x60000, 0x6FFFD), - (0x70000, 0x7FFFD), - (0x80000, 0x8FFFD), - (0x90000, 0x9FFFD), - (0xA0000, 0xAFFFD), - (0xB0000, 0xBFFFD), - (0xC0000, 0xCFFFD), - (0xD0000, 0xDFFFD), - (0xE1000, 0xEFFFD), - (0xF0000, 0xFFFFD), - (0x100000, 0x10FFFD), -] - - -def encode(c): - retval = c - i = ord(c) - for low, high in escape_range: - if i < low: - break - if i >= low and i <= high: - retval = "".join(["%%%2X" % o for o in c.encode("utf-8")]) - break - return retval - - -def iri2uri(uri): - """Convert an IRI to a URI. Note that IRIs must be - passed in a unicode strings. That is, do not utf-8 encode - the IRI before passing it into the function.""" - if isinstance(uri, str): - (scheme, authority, path, query, fragment) = urllib.parse.urlsplit(uri) - authority = authority.encode("idna").decode("utf-8") - # For each character in 'ucschar' or 'iprivate' - # 1. encode as utf-8 - # 2. then %-encode each octet of that utf-8 - uri = urllib.parse.urlunsplit((scheme, authority, path, query, fragment)) - uri = "".join([encode(c) for c in uri]) - return uri - - -if __name__ == "__main__": - import unittest - - class Test(unittest.TestCase): - def test_uris(self): - """Test that URIs are invariant under the transformation.""" - invariant = [ - "ftp://ftp.is.co.za/rfc/rfc1808.txt", - "http://www.ietf.org/rfc/rfc2396.txt", - "ldap://[2001:db8::7]/c=GB?objectClass?one", - "mailto:John.Doe@example.com", - "news:comp.infosystems.www.servers.unix", - "tel:+1-816-555-1212", - "telnet://192.0.2.16:80/", - "urn:oasis:names:specification:docbook:dtd:xml:4.1.2", - ] - for uri in invariant: - self.assertEqual(uri, iri2uri(uri)) - - def test_iri(self): - """Test that the right type of escaping is done for each part of the URI.""" - self.assertEqual( - "http://xn--o3h.com/%E2%98%84", - iri2uri("http://\N{COMET}.com/\N{COMET}"), - ) - self.assertEqual( - "http://bitworking.org/?fred=%E2%98%84", - iri2uri("http://bitworking.org/?fred=\N{COMET}"), - ) - self.assertEqual( - "http://bitworking.org/#%E2%98%84", - iri2uri("http://bitworking.org/#\N{COMET}"), - ) - self.assertEqual("#%E2%98%84", iri2uri("#\N{COMET}")) - self.assertEqual( - "/fred?bar=%E2%98%9A#%E2%98%84", - iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"), - ) - self.assertEqual( - "/fred?bar=%E2%98%9A#%E2%98%84", - iri2uri(iri2uri("/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")), - ) - self.assertNotEqual( - "/fred?bar=%E2%98%9A#%E2%98%84", - iri2uri( - "/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode("utf-8") - ), - ) - - unittest.main() diff --git a/shotgun_api3/lib/httplib2/python3/socks.py b/shotgun_api3/lib/httplib2/python3/socks.py deleted file mode 100644 index cc68e634c..000000000 --- a/shotgun_api3/lib/httplib2/python3/socks.py +++ /dev/null @@ -1,518 +0,0 @@ -"""SocksiPy - Python SOCKS module. - -Version 1.00 - -Copyright 2006 Dan-Haim. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. -3. Neither the name of Dan Haim nor the names of his contributors may be used - to endorse or promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED -WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA -OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. - -This module provides a standard socket-like interface for Python -for tunneling connections through SOCKS proxies. - -Minor modifications made by Christopher Gilbert (http://motomastyle.com/) for -use in PyLoris (http://pyloris.sourceforge.net/). - -Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) -mainly to merge bug fixes found in Sourceforge. -""" - -import base64 -import socket -import struct -import sys - -if getattr(socket, "socket", None) is None: - raise ImportError("socket.socket missing, proxy support unusable") - -PROXY_TYPE_SOCKS4 = 1 -PROXY_TYPE_SOCKS5 = 2 -PROXY_TYPE_HTTP = 3 -PROXY_TYPE_HTTP_NO_TUNNEL = 4 - -_defaultproxy = None -_orgsocket = socket.socket - - -class ProxyError(Exception): - pass - - -class GeneralProxyError(ProxyError): - pass - - -class Socks5AuthError(ProxyError): - pass - - -class Socks5Error(ProxyError): - pass - - -class Socks4Error(ProxyError): - pass - - -class HTTPError(ProxyError): - pass - - -_generalerrors = ( - "success", - "invalid data", - "not connected", - "not available", - "bad proxy type", - "bad input", -) - -_socks5errors = ( - "succeeded", - "general SOCKS server failure", - "connection not allowed by ruleset", - "Network unreachable", - "Host unreachable", - "Connection refused", - "TTL expired", - "Command not supported", - "Address type not supported", - "Unknown error", -) - -_socks5autherrors = ( - "succeeded", - "authentication is required", - "all offered authentication methods were rejected", - "unknown username or invalid password", - "unknown error", -) - -_socks4errors = ( - "request granted", - "request rejected or failed", - "request rejected because SOCKS server cannot connect to identd on the client", - "request rejected because the client program and identd report different " - "user-ids", - "unknown error", -) - - -def setdefaultproxy( - proxytype=None, addr=None, port=None, rdns=True, username=None, password=None -): - """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) - Sets a default proxy which all further socksocket objects will use, - unless explicitly changed. - """ - global _defaultproxy - _defaultproxy = (proxytype, addr, port, rdns, username, password) - - -def wrapmodule(module): - """wrapmodule(module) - - Attempts to replace a module's socket library with a SOCKS socket. Must set - a default proxy using setdefaultproxy(...) first. - This will only work on modules that import socket directly into the - namespace; - most of the Python Standard Library falls into this category. - """ - if _defaultproxy != None: - module.socket.socket = socksocket - else: - raise GeneralProxyError((4, "no proxy specified")) - - -class socksocket(socket.socket): - """socksocket([family[, type[, proto]]]) -> socket object - Open a SOCKS enabled socket. The parameters are the same as - those of the standard socket init. In order for SOCKS to work, - you must specify family=AF_INET, type=SOCK_STREAM and proto=0. - """ - - def __init__( - self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None - ): - _orgsocket.__init__(self, family, type, proto, _sock) - if _defaultproxy != None: - self.__proxy = _defaultproxy - else: - self.__proxy = (None, None, None, None, None, None) - self.__proxysockname = None - self.__proxypeername = None - self.__httptunnel = True - - def __recvall(self, count): - """__recvall(count) -> data - Receive EXACTLY the number of bytes requested from the socket. - Blocks until the required number of bytes have been received. - """ - data = self.recv(count) - while len(data) < count: - d = self.recv(count - len(data)) - if not d: - raise GeneralProxyError((0, "connection closed unexpectedly")) - data = data + d - return data - - def sendall(self, content, *args): - """ override socket.socket.sendall method to rewrite the header - for non-tunneling proxies if needed - """ - if not self.__httptunnel: - content = self.__rewriteproxy(content) - return super(socksocket, self).sendall(content, *args) - - def __rewriteproxy(self, header): - """ rewrite HTTP request headers to support non-tunneling proxies - (i.e. those which do not support the CONNECT method). - This only works for HTTP (not HTTPS) since HTTPS requires tunneling. - """ - host, endpt = None, None - hdrs = header.split("\r\n") - for hdr in hdrs: - if hdr.lower().startswith("host:"): - host = hdr - elif hdr.lower().startswith("get") or hdr.lower().startswith("post"): - endpt = hdr - if host and endpt: - hdrs.remove(host) - hdrs.remove(endpt) - host = host.split(" ")[1] - endpt = endpt.split(" ") - if self.__proxy[4] != None and self.__proxy[5] != None: - hdrs.insert(0, self.__getauthheader()) - hdrs.insert(0, "Host: %s" % host) - hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2])) - return "\r\n".join(hdrs) - - def __getauthheader(self): - auth = self.__proxy[4] + b":" + self.__proxy[5] - return "Proxy-Authorization: Basic " + base64.b64encode(auth).decode() - - def setproxy( - self, - proxytype=None, - addr=None, - port=None, - rdns=True, - username=None, - password=None, - headers=None, - ): - """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) - - Sets the proxy to be used. - proxytype - The type of the proxy to be used. Three types - are supported: PROXY_TYPE_SOCKS4 (including socks4a), - PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP - addr - The address of the server (IP or DNS). - port - The port of the server. Defaults to 1080 for SOCKS - servers and 8080 for HTTP proxy servers. - rdns - Should DNS queries be preformed on the remote side - (rather than the local side). The default is True. - Note: This has no effect with SOCKS4 servers. - username - Username to authenticate with to the server. - The default is no authentication. - password - Password to authenticate with to the server. - Only relevant when username is also provided. - headers - Additional or modified headers for the proxy connect - request. - """ - self.__proxy = ( - proxytype, - addr, - port, - rdns, - username.encode() if username else None, - password.encode() if password else None, - headers, - ) - - def __negotiatesocks5(self, destaddr, destport): - """__negotiatesocks5(self,destaddr,destport) - Negotiates a connection through a SOCKS5 server. - """ - # First we'll send the authentication packages we support. - if (self.__proxy[4] != None) and (self.__proxy[5] != None): - # The username/password details were supplied to the - # setproxy method so we support the USERNAME/PASSWORD - # authentication (in addition to the standard none). - self.sendall(struct.pack("BBBB", 0x05, 0x02, 0x00, 0x02)) - else: - # No username/password were entered, therefore we - # only support connections with no authentication. - self.sendall(struct.pack("BBB", 0x05, 0x01, 0x00)) - # We'll receive the server's response to determine which - # method was selected - chosenauth = self.__recvall(2) - if chosenauth[0:1] != chr(0x05).encode(): - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - # Check the chosen authentication method - if chosenauth[1:2] == chr(0x00).encode(): - # No authentication is required - pass - elif chosenauth[1:2] == chr(0x02).encode(): - # Okay, we need to perform a basic username/password - # authentication. - packet = bytearray() - packet.append(0x01) - packet.append(len(self.__proxy[4])) - packet.extend(self.__proxy[4]) - packet.append(len(self.__proxy[5])) - packet.extend(self.__proxy[5]) - self.sendall(packet) - authstat = self.__recvall(2) - if authstat[0:1] != chr(0x01).encode(): - # Bad response - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - if authstat[1:2] != chr(0x00).encode(): - # Authentication failed - self.close() - raise Socks5AuthError((3, _socks5autherrors[3])) - # Authentication succeeded - else: - # Reaching here is always bad - self.close() - if chosenauth[1] == chr(0xFF).encode(): - raise Socks5AuthError((2, _socks5autherrors[2])) - else: - raise GeneralProxyError((1, _generalerrors[1])) - # Now we can request the actual connection - req = struct.pack("BBB", 0x05, 0x01, 0x00) - # If the given destination address is an IP address, we'll - # use the IPv4 address request even if remote resolving was specified. - try: - ipaddr = socket.inet_aton(destaddr) - req = req + chr(0x01).encode() + ipaddr - except socket.error: - # Well it's not an IP number, so it's probably a DNS name. - if self.__proxy[3]: - # Resolve remotely - ipaddr = None - req = ( - req - + chr(0x03).encode() - + chr(len(destaddr)).encode() - + destaddr.encode() - ) - else: - # Resolve locally - ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) - req = req + chr(0x01).encode() + ipaddr - req = req + struct.pack(">H", destport) - self.sendall(req) - # Get the response - resp = self.__recvall(4) - if resp[0:1] != chr(0x05).encode(): - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - elif resp[1:2] != chr(0x00).encode(): - # Connection failed - self.close() - if ord(resp[1:2]) <= 8: - raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])])) - else: - raise Socks5Error((9, _socks5errors[9])) - # Get the bound address/port - elif resp[3:4] == chr(0x01).encode(): - boundaddr = self.__recvall(4) - elif resp[3:4] == chr(0x03).encode(): - resp = resp + self.recv(1) - boundaddr = self.__recvall(ord(resp[4:5])) - else: - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - boundport = struct.unpack(">H", self.__recvall(2))[0] - self.__proxysockname = (boundaddr, boundport) - if ipaddr != None: - self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) - else: - self.__proxypeername = (destaddr, destport) - - def getproxysockname(self): - """getsockname() -> address info - Returns the bound IP address and port number at the proxy. - """ - return self.__proxysockname - - def getproxypeername(self): - """getproxypeername() -> address info - Returns the IP and port number of the proxy. - """ - return _orgsocket.getpeername(self) - - def getpeername(self): - """getpeername() -> address info - Returns the IP address and port number of the destination - machine (note: getproxypeername returns the proxy) - """ - return self.__proxypeername - - def __negotiatesocks4(self, destaddr, destport): - """__negotiatesocks4(self,destaddr,destport) - Negotiates a connection through a SOCKS4 server. - """ - # Check if the destination address provided is an IP address - rmtrslv = False - try: - ipaddr = socket.inet_aton(destaddr) - except socket.error: - # It's a DNS name. Check where it should be resolved. - if self.__proxy[3]: - ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01) - rmtrslv = True - else: - ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) - # Construct the request packet - req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr - # The username parameter is considered userid for SOCKS4 - if self.__proxy[4] != None: - req = req + self.__proxy[4] - req = req + chr(0x00).encode() - # DNS name if remote resolving is required - # NOTE: This is actually an extension to the SOCKS4 protocol - # called SOCKS4A and may not be supported in all cases. - if rmtrslv: - req = req + destaddr + chr(0x00).encode() - self.sendall(req) - # Get the response from the server - resp = self.__recvall(8) - if resp[0:1] != chr(0x00).encode(): - # Bad data - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - if resp[1:2] != chr(0x5A).encode(): - # Server returned an error - self.close() - if ord(resp[1:2]) in (91, 92, 93): - self.close() - raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90])) - else: - raise Socks4Error((94, _socks4errors[4])) - # Get the bound address/port - self.__proxysockname = ( - socket.inet_ntoa(resp[4:]), - struct.unpack(">H", resp[2:4])[0], - ) - if rmtrslv != None: - self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) - else: - self.__proxypeername = (destaddr, destport) - - def __negotiatehttp(self, destaddr, destport): - """__negotiatehttp(self,destaddr,destport) - Negotiates a connection through an HTTP server. - """ - # If we need to resolve locally, we do this now - if not self.__proxy[3]: - addr = socket.gethostbyname(destaddr) - else: - addr = destaddr - headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"] - wrote_host_header = False - wrote_auth_header = False - if self.__proxy[6] != None: - for key, val in self.__proxy[6].iteritems(): - headers += [key, ": ", val, "\r\n"] - wrote_host_header = key.lower() == "host" - wrote_auth_header = key.lower() == "proxy-authorization" - if not wrote_host_header: - headers += ["Host: ", destaddr, "\r\n"] - if not wrote_auth_header: - if self.__proxy[4] != None and self.__proxy[5] != None: - headers += [self.__getauthheader(), "\r\n"] - headers.append("\r\n") - self.sendall("".join(headers).encode()) - # We read the response until we get the string "\r\n\r\n" - resp = self.recv(1) - while resp.find("\r\n\r\n".encode()) == -1: - resp = resp + self.recv(1) - # We just need the first line to check if the connection - # was successful - statusline = resp.splitlines()[0].split(" ".encode(), 2) - if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()): - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - try: - statuscode = int(statusline[1]) - except ValueError: - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - if statuscode != 200: - self.close() - raise HTTPError((statuscode, statusline[2])) - self.__proxysockname = ("0.0.0.0", 0) - self.__proxypeername = (addr, destport) - - def connect(self, destpair): - """connect(self, despair) - Connects to the specified destination through a proxy. - destpar - A tuple of the IP/DNS address and the port number. - (identical to socket's connect). - To select the proxy server use setproxy(). - """ - # Do a minimal input check first - if ( - (not type(destpair) in (list, tuple)) - or (len(destpair) < 2) - or (not isinstance(destpair[0], (str, bytes))) - or (type(destpair[1]) != int) - ): - raise GeneralProxyError((5, _generalerrors[5])) - if self.__proxy[0] == PROXY_TYPE_SOCKS5: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 1080 - _orgsocket.connect(self, (self.__proxy[1], portnum)) - self.__negotiatesocks5(destpair[0], destpair[1]) - elif self.__proxy[0] == PROXY_TYPE_SOCKS4: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 1080 - _orgsocket.connect(self, (self.__proxy[1], portnum)) - self.__negotiatesocks4(destpair[0], destpair[1]) - elif self.__proxy[0] == PROXY_TYPE_HTTP: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 8080 - _orgsocket.connect(self, (self.__proxy[1], portnum)) - self.__negotiatehttp(destpair[0], destpair[1]) - elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 8080 - _orgsocket.connect(self, (self.__proxy[1], portnum)) - if destpair[1] == 443: - self.__negotiatehttp(destpair[0], destpair[1]) - else: - self.__httptunnel = False - elif self.__proxy[0] == None: - _orgsocket.connect(self, (destpair[0], destpair[1])) - else: - raise GeneralProxyError((4, _generalerrors[4])) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 778393309..4390fea17 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -58,7 +58,7 @@ from .lib import six from .lib import sgsix from .lib import sgutils -from .lib.httplib2 import Http, ProxyInfo, socks, ssl_error_classes +from .lib.httplib2 import Http, ProxyInfo, socks from .lib.sgtimezone import SgTimezone @@ -3873,7 +3873,7 @@ def _make_call(self, verb, path, body, headers): # otherwise and will not re-attempt. # When we drop support of Python 2 and we will probably drop the # next except, we might want to remove this except too. - except ssl_error_classes as e: + except (ssl.SSLError, ssl.CertificateError) as e: # Test whether the exception is due to the fact that this is an older version of # Python that cannot validate certificates encrypted with SHA-2. If it is, then # fall back on disabling the certificate validation and try again - unless the diff --git a/tests/test_api.py b/tests/test_api.py index d42328f3c..f4cb42cf7 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -3434,10 +3434,6 @@ class TestLibImports(base.LiveTestBase): def test_import_httplib(self): """ Ensure that httplib2 is importable and objects are available - - This is important, because httplib2 imports switch between - the Python 2 and 3 compatible versions, and the module imports are - proxied to allow this. """ from shotgun_api3.lib import httplib2 @@ -3446,17 +3442,6 @@ def test_import_httplib(self): self.assertTrue(hasattr(httplib2, "Http")) self.assertTrue(isinstance(httplib2.Http, object)) - # Ensure that the version of httplib2 compatible with the current Python - # version was imported. - # (The last module name for __module__ should be either python2 or - # python3, depending on what has been imported. Make sure we got the - # right one.) - httplib2_compat_version = httplib2.Http.__module__.split(".")[-1] - if six.PY2: - self.assertEqual(httplib2_compat_version, "python2") - elif six.PY3: - self.assertTrue(httplib2_compat_version, "python3") - # Ensure that socks submodule is present and importable using a from # import -- this is a good indication that external httplib2 imports # from shotgun_api3 will work as expected. diff --git a/tests/test_unit.py b/tests/test_unit.py index 445d1fe07..42f882af4 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -11,13 +11,14 @@ # not expressly granted therein are reserved by Shotgun Software Inc. import os +import ssl import unittest from unittest import mock import urllib.request import urllib.error import shotgun_api3 as api -from shotgun_api3.lib.httplib2 import Http, ssl_error_classes +from shotgun_api3.lib.httplib2 import Http class TestShotgunInit(unittest.TestCase): @@ -771,7 +772,7 @@ def test_httplib(self): """ # First check that we get an error when trying to connect to a known dummy bad URL self.assertRaises( - ssl_error_classes, + (ssl.SSLError, ssl.CertificateError), self._check_url_with_sg_api_httplib2, self.bad_url, self.certs, diff --git a/update_httplib2.py b/update_httplib2.py index 30422e0c2..7124860e6 100755 --- a/update_httplib2.py +++ b/update_httplib2.py @@ -63,7 +63,7 @@ def sanitize_file(self, file_path): contents = contents.replace("from httplib2.", "from .") contents = contents.replace("from httplib2", "from .") contents = contents.replace( - "import pyparsing as pp", "from ... import pyparsing as pp" + "import pyparsing as pp", "from .. import pyparsing as pp" ) with open(file_path, "w") as f: @@ -73,8 +73,6 @@ def sanitize_file(self, file_path): def main(temp_path, repo_root, version): # Paths and file names httplib2_dir = repo_root / "shotgun_api3" / "lib" / "httplib2" - python2_dir = str(httplib2_dir / "python2") - python3_dir = str(httplib2_dir / "python3") file_name = f"{version}.zip" file_path = temp_path / file_name @@ -88,20 +86,15 @@ def main(temp_path, repo_root, version): unzipped_folder.mkdir() utilities.unzip_archive(file_path, file_name, unzipped_folder) - # Remove current httplib2/python2 and httplib2/python3 folders - utilities.remove_folder(python2_dir) - utilities.remove_folder(python3_dir) - # Removes the previous version of httplib2 - utilities.git_remove([str(python2_dir), str(python3_dir)]) + utilities.git_remove([str(httplib2_dir)]) + utilities.remove_folder(httplib2_dir) # Copies a new version into place. print("Copying new version of httplib2") root_folder = unzipped_folder / f"httplib2-{version[1:]}" - utilities.copy_folder(str(root_folder / "python2" / "httplib2"), python2_dir) - utilities.copy_folder(str(root_folder / "python3" / "httplib2"), python3_dir) - utilities.remove_folder(f"{python2_dir}/test") - utilities.remove_folder(f"{python3_dir}/test") + utilities.copy_folder(str(root_folder / "python3" / "httplib2"), httplib2_dir) + utilities.remove_folder(f"{httplib2_dir}/test") # Patches the httplib2 imports so they are relative instead of absolute. print("Patching imports") @@ -110,7 +103,7 @@ def main(temp_path, repo_root, version): # Adding files to the git repo. print("Adding to git") - subprocess.check_output(["git", "add", str(python2_dir), str(python3_dir)]) + subprocess.check_output(["git", "add", str(httplib2_dir)]) # nosec B607 if __name__ == "__main__": From 59b8f059fff88f7be0f9f237685b073e88334dbd Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 10 Sep 2025 11:11:17 -0700 Subject: [PATCH 37/59] SG-38306 Python2 Removal - Part 7 - various (#404) --- README.md | 2 +- azure-pipelines-templates/run-tests.yml | 10 ++--- shotgun_api3/shotgun.py | 59 +++++++------------------ tests/base.py | 11 ++--- tests/test_api.py | 51 ++------------------- tests/test_client.py | 5 +-- 6 files changed, 31 insertions(+), 107 deletions(-) diff --git a/README.md b/README.md index 33e493821..f37c5dc21 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,7 @@ Integration and unit tests are provided. - (Note: Running `pip install -r tests/ci_requirements.txt` will install this package) - A `tests/config` file (you can copy an example from `tests/example_config`). - Tests can be run individually like this: `nosetests --config="nose.cfg" tests/test_client.py` - - Make sure to not forget the `--config="nose.cfg"` option. This option tells nose to use our config file. This will exclude python 2- and 3-specific files in the `/lib` directory, preventing a failure from being reported by nose for compilation due to incompatible syntax in those files. + - Make sure to not forget the `--config="nose.cfg"` option. This option tells nose to use our config file. - `test_client` and `tests_unit` use mock server interaction and do not require a Flow Production Tracking instance to be available (no modifications to `tests/config` are necessary). - `test_api` and `test_api_long` *do* require a Flow Production Tracking instance, with a script key available for the tests. The server and script user values must be supplied in the `tests/config` file. The tests will add test data to your server based on information in your config. This data will be manipulated by the tests, and should not be used for other purposes. - To run all of the tests, use the shell script `run-tests`. diff --git a/azure-pipelines-templates/run-tests.yml b/azure-pipelines-templates/run-tests.yml index 6c60b39c8..c1a1a4ef8 100644 --- a/azure-pipelines-templates/run-tests.yml +++ b/azure-pipelines-templates/run-tests.yml @@ -33,9 +33,9 @@ parameters: jobs: # The job will be named after the OS and Azure will suffix the strategy to make it unique - # so we'll have a job name "Windows Python 2.7" for example. What's a strategy? Strategies are the - # name of the keys under the strategy.matrix scope. So for each OS we'll have " Python 2.7" and - # " Python 3.7". + # so we'll have a job name "Windows Python 3.9" for example. What's a strategy? Strategies are the + # name of the keys under the strategy.matrix scope. So for each OS we'll have " Python 3.9" and + # " Python 3.10". - job: ${{ parameters.name }} pool: vmImage: ${{ parameters.vm_image }} @@ -68,8 +68,8 @@ jobs: versionSpec: '$(python.version)' addToPath: True - # Install all dependencies needed for running the tests. This command is good for - # Python 2 and 3, but also for all OSes + # Install all dependencies needed for running the tests. This command is good + # for all OSes - script: | python -m pip install --upgrade pip setuptools wheel python -m pip install -r tests/ci_requirements.txt diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 4390fea17..38d68bab7 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -35,7 +35,7 @@ import json import http.client # Used for secure file upload import http.cookiejar # used for attachment upload -import io # used for attachment upload +import io import logging import mimetypes import os @@ -49,6 +49,7 @@ import urllib.parse import urllib.request import uuid # used for attachment upload +import xml.etree.ElementTree # Import Error and ResponseError (even though they're unused in this file) since they need # to be exposed as part of the API. @@ -56,7 +57,6 @@ # Python 2/3 compatibility from .lib import six -from .lib import sgsix from .lib import sgutils from .lib.httplib2 import Http, ProxyInfo, socks from .lib.sgtimezone import SgTimezone @@ -329,7 +329,7 @@ class ClientCapabilities(object): ``windows``, or ``None`` (if the current platform couldn't be determined). :ivar str local_path_field: The PTR field used for local file paths. This is calculated using the value of ``platform``. Ex. ``local_path_mac``. - :ivar str py_version: Simple version of Python executable as a string. Eg. ``2.7``. + :ivar str py_version: Simple version of Python executable as a string. Eg. ``3.9``. :ivar str ssl_version: Version of OpenSSL installed. Eg. ``OpenSSL 1.0.2g 1 Mar 2016``. This info is only available in Python 2.7+ if the ssl module was imported successfully. Defaults to ``unknown`` @@ -567,18 +567,6 @@ def __init__( :class:`~shotgun_api3.MissingTwoFactorAuthenticationFault` will be raised if the ``auth_token`` is invalid. .. todo: Add this info to the Authentication section of the docs - - .. note:: A note about proxy connections: If you are using Python <= v2.6.2, HTTPS - connections through a proxy server will not work due to a bug in the :mod:`urllib2` - library (see http://bugs.python.org/issue1424152). This will affect upload and - download-related methods in the Shotgun API (eg. :meth:`~shotgun_api3.Shotgun.upload`, - :meth:`~shotgun_api3.Shotgun.upload_thumbnail`, - :meth:`~shotgun_api3.Shotgun.upload_filmstrip_thumbnail`, - :meth:`~shotgun_api3.Shotgun.download_attachment`. Normal CRUD methods for passing JSON - data should still work fine. If you cannot upgrade your Python installation, you can see - the patch merged into Python v2.6.3 (http://hg.python.org/cpython/rev/0f57b30a152f/) and - try and hack it into your installation but YMMV. For older versions of Python there - are other patches that were proposed in the bug report that may help you as well. """ # verify authentication arguments @@ -617,13 +605,7 @@ def __init__( if script_name is not None or api_key is not None: raise ValueError("cannot provide an auth_code with script_name/api_key") - # Can't use 'all' with python 2.4 - if ( - len( - [x for x in [session_token, script_name, api_key, login, password] if x] - ) - == 0 - ): + if not any([session_token, script_name, api_key, login, password]): if connect: raise ValueError( "must provide login/password, session_token or script_name/api_key" @@ -2879,8 +2861,7 @@ def download_attachment(self, attachment=False, file_path=None, attachment_id=No This parameter exists only for backwards compatibility for scripts specifying the parameter with keywords. :returns: If ``file_path`` is provided, returns the path to the file on disk. If - ``file_path`` is ``None``, returns the actual data of the file, as str in Python 2 or - bytes in Python 3. + ``file_path`` is ``None``, returns the actual data of the file, as bytes. :rtype: str | bytes """ # backwards compatibility when passed via keyword argument @@ -2941,12 +2922,13 @@ def download_attachment(self, attachment=False, file_path=None, attachment_id=No ] if body: - xml = "".join(body) - # Once python 2.4 support is not needed we can think about using - # elementtree. The doc is pretty small so this shouldn't be an issue. - match = re.search("(.*)", xml) - if match: - err += " - %s" % (match.group(1)) + try: + root = xml.etree.ElementTree.fromstring("".join(body)) + message_elem = root.find(".//Message") + if message_elem is not None and message_elem.text: + err = f"{err} - {message_elem.text}" + except xml.etree.ElementTree.ParseError: + err = f"{err}\n{''.join(body)}\n" elif e.code == 409 or e.code == 410: # we may be dealing with a file that is pending/failed a malware scan, e.g: # 409: This file is undergoing a malware scan, please try again in a few minutes @@ -4693,17 +4675,12 @@ class FormPostHandler(urllib.request.BaseHandler): handler_order = urllib.request.HTTPHandler.handler_order - 10 # needs to run first def http_request(self, request): - # get_data was removed in 3.4. since we're testing against 3.6 and - # 3.7, this should be sufficient. - if six.PY3: - data = request.data - else: - data = request.get_data() + data = request.data if data is not None and not isinstance(data, str): files = [] params = [] for key, value in data.items(): - if isinstance(value, sgsix.file_types): + if isinstance(value, io.IOBase): files.append((key, value)) else: params.append((key, value)) @@ -4714,12 +4691,8 @@ def http_request(self, request): boundary, data = self.encode(params, files) content_type = "multipart/form-data; boundary=%s" % boundary request.add_unredirected_header("Content-Type", content_type) - # add_data was removed in 3.4. since we're testing against 3.6 and - # 3.7, this should be sufficient. - if six.PY3: - request.data = data - else: - request.add_data(data) + request.data = data + return request def encode(self, params, files, boundary=None, buffer=None): diff --git a/tests/base.py b/tests/base.py index d1f138f47..3795d93af 100644 --- a/tests/base.py +++ b/tests/base.py @@ -176,13 +176,10 @@ def _mock_http(self, data, headers=None, status=None): return if not isinstance(data, str): - if six.PY2: - data = json.dumps(data, ensure_ascii=False, encoding="utf-8") - else: - data = json.dumps( - data, - ensure_ascii=False, - ) + data = json.dumps( + data, + ensure_ascii=False, + ) resp_headers = { "cache-control": "no-cache", diff --git a/tests/test_api.py b/tests/test_api.py index f4cb42cf7..85ed97285 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -32,11 +32,6 @@ from shotgun_api3.lib import six from shotgun_api3.lib.httplib2 import Http -# To mock the correct exception when testion on Python 2 and 3, use the -# ShotgunSSLError variable from sgsix that contains the appropriate exception -# class for the current Python version. -from shotgun_api3.lib.sgsix import ShotgunSSLError - import shotgun_api3 from . import base @@ -272,44 +267,6 @@ def test_upload_download(self): "sg_uploaded_movie", tag_list="monkeys, everywhere, send, help", ) - if six.PY2: - # In Python2, make sure that non-utf-8 encoded paths raise when they - # can't be converted to utf-8. For Python3, we'll skip these tests - # since string encoding is handled differently. - - # We need to touch the file we're going to test with first. We can't - # bundle a file with this filename in the repo due to some pip install - # problems on Windows. Note that the path below is utf-8 encoding of - # what we'll eventually encode as shift-jis. - file_path_s = os.path.join(this_dir, "./\xe3\x81\x94.shift-jis") - file_path_u = file_path_s.decode("utf-8") - - with open( - file_path_u if sys.platform.startswith("win") else file_path_s, "w" - ) as fh: - fh.write("This is just a test file with some random data in it.") - - self.assertRaises( - shotgun_api3.ShotgunError, - self.sg.upload, - "Version", - self.version["id"], - file_path_u.encode("shift-jis"), - "sg_uploaded_movie", - tag_list="monkeys, everywhere, send, help", - ) - - # But it should work in all cases if a unicode string is used. - self.sg.upload( - "Version", - self.version["id"], - file_path_u, - "sg_uploaded_movie", - tag_list="monkeys, everywhere, send, help", - ) - - # cleanup - os.remove(file_path_u) # cleanup os.remove(file_path) @@ -2265,7 +2222,7 @@ def my_side_effect2(*args, **kwargs): @unittest.mock.patch("shotgun_api3.shotgun.Http.request") def test_sha2_error(self, mock_request): # Simulate the exception raised with SHA-2 errors - mock_request.side_effect = ShotgunSSLError( + mock_request.side_effect = ssl.SSLError( "[Errno 1] _ssl.c:480: error:0D0C50A1:asn1 " "encoding routines:ASN1_item_verify: unknown message digest " "algorithm" @@ -2292,7 +2249,7 @@ def test_sha2_error(self, mock_request): try: self.sg.info() - except ShotgunSSLError: + except ssl.SSLError: # ensure the api has reset the values in the correct fallback behavior self.assertTrue(self.sg.config.no_ssl_validation) self.assertTrue(shotgun_api3.shotgun.NO_SSL_VALIDATION) @@ -2305,7 +2262,7 @@ def test_sha2_error(self, mock_request): @unittest.mock.patch("shotgun_api3.shotgun.Http.request") def test_sha2_error_with_strict(self, mock_request): # Simulate the exception raised with SHA-2 errors - mock_request.side_effect = ShotgunSSLError( + mock_request.side_effect = ssl.SSLError( "[Errno 1] _ssl.c:480: error:0D0C50A1:asn1 " "encoding routines:ASN1_item_verify: unknown message digest " "algorithm" @@ -2322,7 +2279,7 @@ def test_sha2_error_with_strict(self, mock_request): try: self.sg.info() - except ShotgunSSLError: + except ssl.SSLError: # ensure the api has NOT reset the values in the fallback behavior because we have # set the env variable to force validation self.assertFalse(self.sg.config.no_ssl_validation) diff --git a/tests/test_client.py b/tests/test_client.py index f99a79806..90fc94a9c 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -651,10 +651,7 @@ def _assert_decode_resonse(self, ensure_ascii, data): connect=False, ) - if six.PY3: - j = json.dumps(d, ensure_ascii=ensure_ascii) - else: - j = json.dumps(d, ensure_ascii=ensure_ascii, encoding="utf-8") + j = json.dumps(d, ensure_ascii=ensure_ascii) self.assertEqual(d, sg._decode_response(headers, j)) headers["content-type"] = "text/javascript" From 592377c2939a82cc662e1fdfe26fa54257bb4793 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 10 Sep 2025 11:50:18 -0700 Subject: [PATCH 38/59] SG-38306 Python2 Removal - Part 8 - Remove deprecated ensure_ascii parameter from SG object (#405) --- shotgun_api3/lib/mockgun/mockgun.py | 1 - shotgun_api3/shotgun.py | 33 ----------------------- tests/test_api.py | 42 ++++++++++------------------- tests/test_client.py | 1 - 4 files changed, 14 insertions(+), 63 deletions(-) diff --git a/shotgun_api3/lib/mockgun/mockgun.py b/shotgun_api3/lib/mockgun/mockgun.py index 18e4a142c..45d0b2aa5 100644 --- a/shotgun_api3/lib/mockgun/mockgun.py +++ b/shotgun_api3/lib/mockgun/mockgun.py @@ -177,7 +177,6 @@ def __init__(self, api_key=None, convert_datetimes_to_utc=True, http_proxy=None, - ensure_ascii=True, connect=True, ca_certs=None, login=None, diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 38d68bab7..16c073d01 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -491,7 +491,6 @@ def __init__( api_key=None, convert_datetimes_to_utc=True, http_proxy=None, - ensure_ascii=True, connect=True, ca_certs=None, login=None, @@ -709,9 +708,6 @@ def __init__( {self.config.scheme: proxy_addr} ) - if ensure_ascii: - self._json_loads = self._json_loads_ascii - self.client_caps = ClientCapabilities() # this relies on self.client_caps being set first self.reset_user_agent() @@ -3981,35 +3977,6 @@ def _decode_response(self, headers, body): def _json_loads(self, body): return json.loads(body) - def _json_loads_ascii(self, body): - """ - See http://stackoverflow.com/questions/956867 - """ - - def _decode_list(lst): - newlist = [] - for i in lst: - if isinstance(i, str): - i = sgutils.ensure_str(i) - elif isinstance(i, list): - i = _decode_list(i) - newlist.append(i) - return newlist - - def _decode_dict(dct): - newdict = {} - for k, v in dct.items(): - if isinstance(k, str): - k = sgutils.ensure_str(k) - if isinstance(v, str): - v = sgutils.ensure_str(v) - elif isinstance(v, list): - v = _decode_list(v) - newdict[k] = v - return newdict - - return json.loads(body, object_hook=_decode_dict) - def _response_errors(self, sg_response): """ Raise any API errors specified in the response. diff --git a/tests/test_api.py b/tests/test_api.py index 85ed97285..cbbc115fe 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -29,7 +29,6 @@ import uuid import warnings -from shotgun_api3.lib import six from shotgun_api3.lib.httplib2 import Http import shotgun_api3 @@ -828,28 +827,24 @@ def test_summary_values(self): sorted(result["groups"], key=lambda x: x["group_name"]), groups ) - def test_ensure_ascii(self): - """test_ensure_ascii tests ensure_unicode flag.""" - sg_ascii = shotgun_api3.Shotgun( - self.config.server_url, ensure_ascii=True, **self.auth_args - ) + def test_json_dumps_default_ensure_ascii_disabled(self): + """Make sure SG'payload is using ensure_ascii for json dumps""" + sg = shotgun_api3.Shotgun(self.config.server_url, **self.auth_args) - result = sg_ascii.find_one( - "Note", [["id", "is", self.note["id"]]], fields=["content"] - ) - if six.PY2: - # In Python3 there isn't a separate unicode type. - self.assertFalse(_has_unicode(result)) + # Mock the _http_request method + sg._orig_http_request = sg._http_request + sg._http_request = unittest.mock.Mock(wraps=sg._orig_http_request) - def test_ensure_unicode(self): - """test_ensure_unicode tests ensure_unicode flag.""" - sg_unicode = shotgun_api3.Shotgun( - self.config.server_url, ensure_ascii=False, **self.auth_args + sg.find_one( + "Note", + [["content", "is", "Noëlご"]], # Force a non-ascii character ) - result = sg_unicode.find_one( - "Note", [["id", "is", self.note["id"]]], fields=["content"] + + sg._http_request.assert_called_once() + self.assertIn( + b"No\xc3\xabl\xe3\x81\x94", # utf-8 encoded version of Noëlご + sg._http_request.call_args.args[2], # Get the body of the request ) - self.assertTrue(_has_unicode(result)) def test_work_schedule(self): """test_work_schedule tests WorkDayRules api""" @@ -3409,15 +3404,6 @@ def test_import_httplib(self): self.assertTrue(hasattr(socks, "HTTPError")) -def _has_unicode(data): - for k, v in data.items(): - if isinstance(k, str): - return True - if isinstance(v, str): - return True - return False - - def _get_path(url): """Returns path component of a url without the sheme, host, query, anchor, or any other additional elements. diff --git a/tests/test_client.py b/tests/test_client.py index 90fc94a9c..65678cc18 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -647,7 +647,6 @@ def _assert_decode_resonse(self, ensure_ascii, data): self.config.script_name, self.config.api_key, http_proxy=self.config.http_proxy, - ensure_ascii=ensure_ascii, connect=False, ) From f7d076b1869d16c28587647c562965316bc18326 Mon Sep 17 00:00:00 2001 From: Eduardo Chauca <166560435+eduardoChaucaGallegos@users.noreply.github.com> Date: Wed, 10 Sep 2025 17:07:09 -0500 Subject: [PATCH 39/59] SG-38306 Python2 Removal - Part 9 - six module (#395) --- .coveragerc | 1 - .flake8 | 2 +- docs/cookbook/examples/ami_handler.rst | 1 - shotgun_api3/lib/README.md | 10 - shotgun_api3/lib/requirements.txt | 3 +- shotgun_api3/lib/sgsix.py | 87 --- shotgun_api3/lib/sgutils.py | 62 -- shotgun_api3/lib/six.py | 964 ------------------------- shotgun_api3/shotgun.py | 7 +- tests/base.py | 1 - tests/test_api_long.py | 1 - tests/test_client.py | 2 - 12 files changed, 4 insertions(+), 1137 deletions(-) delete mode 100644 shotgun_api3/lib/sgsix.py delete mode 100644 shotgun_api3/lib/sgutils.py delete mode 100644 shotgun_api3/lib/six.py diff --git a/.coveragerc b/.coveragerc index 97715b18d..21ce1f03d 100644 --- a/.coveragerc +++ b/.coveragerc @@ -16,6 +16,5 @@ source=shotgun_api3 omit= shotgun_api3/lib/httplib2/* - shotgun_api3/lib/six.py shotgun_api3/lib/certify/* shotgun_api3/lib/pyparsing.py diff --git a/.flake8 b/.flake8 index 4fc6605a0..50cb9acdc 100644 --- a/.flake8 +++ b/.flake8 @@ -10,4 +10,4 @@ [flake8] max-line-length = 120 -exclude = shotgun_api3/lib/httplib2/*,shotgun_api3/lib/six.py,tests/httplib2test.py +exclude = shotgun_api3/lib/httplib2/*,tests/httplib2test.py diff --git a/docs/cookbook/examples/ami_handler.rst b/docs/cookbook/examples/ami_handler.rst index 6b8f3384b..aee16f356 100644 --- a/docs/cookbook/examples/ami_handler.rst +++ b/docs/cookbook/examples/ami_handler.rst @@ -95,7 +95,6 @@ via ``POST``. If you're using a custom protocol the data is sent via ``GET``. # Imports # --------------------------------------------------------------------------------------------- import sys, os - import six import logging as logger # --------------------------------------------------------------------------------------------- diff --git a/shotgun_api3/lib/README.md b/shotgun_api3/lib/README.md index 7097b6c62..afdd28437 100644 --- a/shotgun_api3/lib/README.md +++ b/shotgun_api3/lib/README.md @@ -10,18 +10,8 @@ Some third-party modules are bundled with `python-api` inside lib. The version of `httplib2` bundled should be updated manually, however its version is included in the unused `shotgun_api3/lib/requirements.txt` to allow Github's automated CVE notifications to work. -### six - -Six is a Python 2/3 compatibility library. In python-api, it's used to make simultaneous support for Python on 2 and 3 easier to maintain and more readable, but allowing the use of common helper functions, unified interfaces for modules that changed, and variables to ease type comparisons. For more on six, see the [documentation](https://six.readthedocs.io/). - -The version of `six` bundled should be updated manually, however its version is included in the unused `shotgun_api3/lib/requirements.txt` to allow Github's automated CVE notifications to work. - ## Flow Production Tracking Modules -### sgsix - -`sgsix` is a module that contains extensions to `six`. These might be additional helper functions, variables, etc. that supplement six's functionality. It is intended that `sgsix` can be used within other packages that include or depend on the `python-api` package as well. - ### sgtimezone `sgtimezone` contains classes for easing the conversion between the server (UTC) timezone and client timezone. diff --git a/shotgun_api3/lib/requirements.txt b/shotgun_api3/lib/requirements.txt index f91a3ae19..a3bc11436 100644 --- a/shotgun_api3/lib/requirements.txt +++ b/shotgun_api3/lib/requirements.txt @@ -29,6 +29,5 @@ # This file is unused. It is left there so Github can warn us is a CVE is # released for our dependencies. httplib2==0.22.0 -six==1.13.0 -certifi==2025.7.14 +certifi==2024.7.4 pyparsing==2.4.7 diff --git a/shotgun_api3/lib/sgsix.py b/shotgun_api3/lib/sgsix.py deleted file mode 100644 index 6c2af1abc..000000000 --- a/shotgun_api3/lib/sgsix.py +++ /dev/null @@ -1,87 +0,0 @@ -""" - ----------------------------------------------------------------------------- - Copyright (c) 2009-2019, Shotgun Software Inc. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - - Neither the name of the Shotgun Software Inc nor the names of its - contributors may be used to endorse or promote products derived from this - software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""" - -# This module contains addtional functions and variables to supplement the six -# module for python 2/3 compatibility. - -from . import six -import io -import sys - -# For python 3, the `file` type no longer exists, and open() returns an -# io.IOBase instance. We add file_types to allow comparison across python -# versions. See https://stackoverflow.com/questions/36321030#36321030 -# -# This means that to test if a variable contains a file in both Python 2 and 3 -# you can use an isinstance test like: -# isinstance(value, sgsix.file_types) -if six.PY3: - file_types = (io.IOBase, ) -else: - file_types = (file, io.IOBase) # noqa warning for undefined `file` in python 3 - -# For python-api calls that result in an SSL error, the exception raised is -# different on Python 2 and 3. Store the approriate exception class in a -# variable to allow easier exception handling across Python 2/3. -if six.PY3: - import ssl - ShotgunSSLError = ssl.SSLError -else: - from .httplib2 import SSLHandshakeError - ShotgunSSLError = SSLHandshakeError - - -def normalize_platform(platform, python2=True): - """ - Normalize the return of sys.platform between Python 2 and 3. - - On Python 2 on linux hosts, sys.platform was 'linux' appended with the - current kernel version that Python was built on. In Python3, this was - changed and sys.platform now returns 'linux' regardless of the kernel version. - See https://bugs.python.org/issue12326 - This function will normalize platform strings to always conform to Python2 or - Python3 behavior. - - :param str platform: The platform string to normalize - :param bool python2: The python version behavior to target. If True, a - Python2-style platform string will be returned (i.e. 'linux2'), otherwise - the modern 'linux' platform string will be returned. - - :returns: The normalized platform string. - :rtype: str - """ - if python2: - return "linux2" if platform.startswith("linux") else platform - return "linux" if platform.startswith("linux") else platform - - -# sgsix.platform will mimick the python2 sys.platform behavior to ensure -# compatibility with existing comparisons and dict keys. -platform = normalize_platform(sys.platform) diff --git a/shotgun_api3/lib/sgutils.py b/shotgun_api3/lib/sgutils.py deleted file mode 100644 index 0d49e4b39..000000000 --- a/shotgun_api3/lib/sgutils.py +++ /dev/null @@ -1,62 +0,0 @@ -""" - ----------------------------------------------------------------------------- - Copyright (c) 2009-2024, Shotgun Software Inc. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - - Neither the name of the Shotgun Software Inc nor the names of its - contributors may be used to endorse or promote products derived from this - software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""" - - -def ensure_binary(s, encoding='utf-8', errors='strict'): - """ - Coerce **s** to bytes. - - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, str): - return s.encode(encoding, errors) - elif isinstance(s, bytes): - return s - else: - raise TypeError(f"not expecting type '{type(s)}'") - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """Coerce *s* to `str`. - - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, str): - return s - - elif isinstance(s, bytes): - return s.decode(encoding, errors) - - raise TypeError(f"not expecting type '{type(s)}'") - - -ensure_text = ensure_str diff --git a/shotgun_api3/lib/six.py b/shotgun_api3/lib/six.py deleted file mode 100644 index b22d2e57d..000000000 --- a/shotgun_api3/lib/six.py +++ /dev/null @@ -1,964 +0,0 @@ -# Copyright (c) 2010-2019 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Utilities for writing code that runs on Python 2 and 3""" - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.13.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) -PY38 = sys.version_info[0:2] >= (3, 8) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("getoutput", "commands", "subprocess"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("splitvalue", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), - MovedAttribute("parse_http_list", "urllib2", "urllib.request"), - MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - del io - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - try: - raise tp, value, tb - finally: - tb = None -""") - - -if sys.version_info[:2] == (3, 2): - exec_("""def raise_from(value, from_value): - try: - if from_value is None: - raise value - raise value from from_value - finally: - value = None -""") -elif sys.version_info[:2] > (3, 2): - exec_("""def raise_from(value, from_value): - try: - raise value from from_value - finally: - value = None -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - def wrapper(f): - f = functools.wraps(wrapped, assigned, updated)(f) - f.__wrapped__ = wrapped - return f - return wrapper -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - - def __new__(cls, name, this_bases, d): - if sys.version_info[:2] >= (3, 7): - # This version introduced PEP 560 that requires a bit - # of extra care (we mimic what is done by __build_class__). - resolved_bases = types.resolve_bases(bases) - if resolved_bases is not bases: - d['__orig_bases__'] = bases - else: - resolved_bases = bases - return meta(name, resolved_bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - if hasattr(cls, '__qualname__'): - orig_vars['__qualname__'] = cls.__qualname__ - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def ensure_binary(s, encoding='utf-8', errors='strict'): - """Coerce **s** to six.binary_type. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, text_type): - return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """Coerce *s* to `str`. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) - return s - - -def ensure_text(s, encoding='utf-8', errors='strict'): - """Coerce *s* to six.text_type. - - For Python 2: - - `unicode` -> `unicode` - - `str` -> `unicode` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, binary_type): - return s.decode(encoding, errors) - elif isinstance(s, text_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - - -def python_2_unicode_compatible(klass): - """ - A decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 16c073d01..54179cdfd 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -55,9 +55,6 @@ # to be exposed as part of the API. from xmlrpc.client import Error, ProtocolError, ResponseError # noqa -# Python 2/3 compatibility -from .lib import six -from .lib import sgutils from .lib.httplib2 import Http, ProxyInfo, socks from .lib.sgtimezone import SgTimezone @@ -734,7 +731,7 @@ def _split_url(self, base_url): In python 3.8 `urllib.parse.splituser` was deprecated warning devs to use `urllib.parse.urlparse`. """ - if six.PY38: + if (sys.version_info.major, sys.version_info.minor) >= (3, 8): auth = None results = urllib.parse.urlparse(base_url) server = results.hostname @@ -4603,7 +4600,7 @@ def connect(self): "Connect to a host on a given (SSL) port." super().connect(self) # Now that the regular HTTP socket has been created, wrap it with our SSL certs. - if six.PY38: + if (sys.version_info.major, sys.version_info.minor) >= (3, 8): context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) context.verify_mode = ssl.CERT_REQUIRED context.check_hostname = False diff --git a/tests/base.py b/tests/base.py index 3795d93af..b20b5f1ef 100644 --- a/tests/base.py +++ b/tests/base.py @@ -13,7 +13,6 @@ import shotgun_api3 as api from shotgun_api3.shotgun import ServerCapabilities -from shotgun_api3.lib import six THUMBNAIL_MAX_ATTEMPTS = 30 diff --git a/tests/test_api_long.py b/tests/test_api_long.py index 29a34e991..4b652fc4a 100644 --- a/tests/test_api_long.py +++ b/tests/test_api_long.py @@ -16,7 +16,6 @@ from . import base import random import shotgun_api3 -from shotgun_api3.lib import six class TestShotgunApiLong(base.LiveTestBase): diff --git a/tests/test_client.py b/tests/test_client.py index 65678cc18..6c254264b 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -26,8 +26,6 @@ import urllib.parse import urllib.error -from shotgun_api3.lib import six, sgutils - import shotgun_api3.lib.httplib2 as httplib2 import shotgun_api3 as api from shotgun_api3.shotgun import ServerCapabilities, SG_TIMEZONE From 0a03a6b460fcf34867ecac06076a61f823c8b698 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 10 Sep 2025 15:50:58 -0700 Subject: [PATCH 40/59] SG-38306 Remove Python 2 - Part 10 - SSL (#372) * SSL cleanups --------- Co-authored-by: Eduardo Chauca --- docs/advanced/iron_python.rst | 12 ++-- docs/reference.rst | 2 +- shotgun_api3/shotgun.py | 111 ++++------------------------------ tests/test_api.py | 71 ---------------------- tests/test_client.py | 9 +-- 5 files changed, 21 insertions(+), 184 deletions(-) diff --git a/docs/advanced/iron_python.rst b/docs/advanced/iron_python.rst index 62ad6d791..06786ab96 100644 --- a/docs/advanced/iron_python.rst +++ b/docs/advanced/iron_python.rst @@ -7,6 +7,12 @@ that we will be compatible with future releases of IronPython. While we don't of IronPython, we certainly will do our best to figure out any issues that come up while using it and how to avoid them. + +Legacy Information +------------------ + +This following information is provided for historical purposes only. + As of July 9, 2015 you can look at this fork of the repo to see what changes were needed as of that date to make things work. The original fork was as of v3.0.20 of the API. Big thanks to our awesome clients Pixomondo for making their work public and letting us refer to it: @@ -20,12 +26,6 @@ v3.0.20 can be used with IronPython with a little bit of added work: https://bitbucket.org/jdhardy/ironpythonzlib/src/. And the blog post about it here http://blog.jdhardy.ca/2008/12/solving-zlib-problem-ironpythonzlib.html -- If you encounter any SSL errors like - ``unknown field: SERIALNUMBER=0123456789`` or ``:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed``. - For now you can workaround this problem by disabling ssl certificate validation which we've - encountered some intermittent issues with. Set ``NO_SSL_VALIDATION = True`` for either case. - See :const:`shotgun_api3.shotgun.NO_SSL_VALIDATION` - - If you encounter ``LookupError: unknown encoding: idna``, you can force utf-8 by changing iri2uri.py ~ln 71 from ``authority = authority.encode('idna')`` to ``authority = authority.encode('utf-8')`` diff --git a/docs/reference.rst b/docs/reference.rst index 96c917469..6b16a37bd 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -15,7 +15,7 @@ The :mod:`~shotgun_api3.shotgun` module is a container for the :class:`~shotgun. class. There are a couple of useful attributes to note. .. automodule:: shotgun_api3.shotgun - :members: NO_SSL_VALIDATION, LOG + :members: LOG :private-members: :special-members: diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 54179cdfd..128896cc7 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -78,14 +78,6 @@ SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION = False -NO_SSL_VALIDATION = False -""" -Turns off hostname matching validation for SSL certificates - -Sometimes there are cases where certificate validation should be disabled. For example, if you -have a self-signed internal certificate that isn't included in our certificate bundle, you may -not require the added security provided by enforcing this. -""" # ---------------------------------------------------------------------------- # Version @@ -327,9 +319,7 @@ class ClientCapabilities(object): :ivar str local_path_field: The PTR field used for local file paths. This is calculated using the value of ``platform``. Ex. ``local_path_mac``. :ivar str py_version: Simple version of Python executable as a string. Eg. ``3.9``. - :ivar str ssl_version: Version of OpenSSL installed. Eg. ``OpenSSL 1.0.2g 1 Mar 2016``. This - info is only available in Python 2.7+ if the ssl module was imported successfully. - Defaults to ``unknown`` + :ivar str ssl_version: Version of OpenSSL installed. Eg. ``OpenSSL 1.0.2g 1 Mar 2016``. """ def __init__(self): @@ -350,14 +340,7 @@ def __init__(self): self.local_path_field = None self.py_version = ".".join(str(x) for x in sys.version_info[:2]) - - # extract the OpenSSL version if we can. The version is only available in Python 2.7 and - # only if we successfully imported ssl - self.ssl_version = "unknown" - try: - self.ssl_version = ssl.OPENSSL_VERSION - except (AttributeError, NameError): - pass + self.ssl_version = ssl.OPENSSL_VERSION def __str__(self): return ( @@ -425,7 +408,6 @@ def __init__(self, sg): self.proxy_pass = None self.session_token = None self.authorization = None - self.no_ssl_validation = False self.localized = False def set_server_params(self, base_url): @@ -616,7 +598,6 @@ def __init__( self.config.session_token = session_token self.config.sudo_as_login = sudo_as_login self.config.convert_datetimes_to_utc = convert_datetimes_to_utc - self.config.no_ssl_validation = NO_SSL_VALIDATION self.config.raw_http_proxy = http_proxy try: @@ -2264,14 +2245,10 @@ def reset_user_agent(self): ua_platform = self.client_caps.platform.capitalize() # create ssl validation string based on settings - validation_str = "validate" - if self.config.no_ssl_validation: - validation_str = "no-validate" - self._user_agents = [ "shotgun-json (%s)" % __version__, "Python %s (%s)" % (self.client_caps.py_version, ua_platform), - "ssl %s (%s)" % (self.client_caps.ssl_version, validation_str), + "ssl %s" % (self.client_caps.ssl_version), ] def set_session_uuid(self, session_uuid): @@ -3543,8 +3520,14 @@ def _build_opener(self, handler): Build urllib2 opener with appropriate proxy handler. """ handlers = [] - if self.__ca_certs and not NO_SSL_VALIDATION: - handlers.append(CACertsHTTPSHandler(self.__ca_certs)) + if self.__ca_certs: + handlers.append( + urllib.request.HTTPSHandler( + context=ssl.create_default_context( + cafile=self.__ca_certs, + ), + ), + ) if self.config.proxy_handler: handlers.append(self.config.proxy_handler) @@ -3613,23 +3596,6 @@ def _get_certs_file(cls, ca_certs): cert_file = os.path.join(cur_dir, "lib", "certifi", "cacert.pem") return cert_file - def _turn_off_ssl_validation(self): - """ - Turn off SSL certificate validation. - """ - global NO_SSL_VALIDATION - self.config.no_ssl_validation = True - NO_SSL_VALIDATION = True - # reset ssl-validation in user-agents - self._user_agents = [ - ( - "ssl %s (no-validate)" % self.client_caps.ssl_version - if ua.startswith("ssl ") - else ua - ) - for ua in self._user_agents - ] - # Deprecated methods from old wrapper def schema(self, entity_type): """ @@ -3843,44 +3809,7 @@ def _make_call(self, verb, path, body, headers): if attempt == max_rpc_attempts: LOG.debug("Request failed. Giving up after %d attempts." % attempt) raise - # This is the exact same block as the "except Exception" bellow. - # We need to do it here because the next except will match it - # otherwise and will not re-attempt. - # When we drop support of Python 2 and we will probably drop the - # next except, we might want to remove this except too. except (ssl.SSLError, ssl.CertificateError) as e: - # Test whether the exception is due to the fact that this is an older version of - # Python that cannot validate certificates encrypted with SHA-2. If it is, then - # fall back on disabling the certificate validation and try again - unless the - # SHOTGUN_FORCE_CERTIFICATE_VALIDATION environment variable has been set by the - # user. In that case we simply raise the exception. Any other exceptions simply - # get raised as well. - # - # For more info see: - # https://www.shotgridsoftware.com/blog/important-ssl-certificate-renewal-and-sha-2/ - # - # SHA-2 errors look like this: - # [Errno 1] _ssl.c:480: error:0D0C50A1:asn1 encoding routines:ASN1_item_verify: - # unknown message digest algorithm - # - # Any other exceptions simply get raised. - if ( - "unknown message digest algorithm" not in str(e) - or "SHOTGUN_FORCE_CERTIFICATE_VALIDATION" in os.environ - ): - raise - - if self.config.no_ssl_validation is False: - LOG.warning( - "SSL Error: this Python installation is incompatible with " - "certificates signed with SHA-2. Disabling certificate validation. " - "For more information, see https://www.shotgridsoftware.com/blog/" - "important-ssl-certificate-renewal-and-sha-2/" - ) - self._turn_off_ssl_validation() - # reload user agent to reflect that we have turned off ssl validation - req_headers["user-agent"] = "; ".join(self._user_agents) - self._close_connection() if attempt == max_rpc_attempts: LOG.debug("Request failed. Giving up after %d attempts." % attempt) @@ -4142,14 +4071,12 @@ def _get_connection(self): timeout=self.config.timeout_secs, ca_certs=self.__ca_certs, proxy_info=pi, - disable_ssl_certificate_validation=self.config.no_ssl_validation, ) else: self._connection = Http( timeout=self.config.timeout_secs, ca_certs=self.__ca_certs, proxy_info=None, - disable_ssl_certificate_validation=self.config.no_ssl_validation, ) return self._connection @@ -4613,22 +4540,6 @@ def connect(self): ) -class CACertsHTTPSHandler(urllib.request.HTTPHandler): - """ - Handler that ensures https connections are created with the custom CA certs. - """ - - def __init__(self, cacerts): - super().__init__(self) - self.__ca_certs = cacerts - - def https_open(self, req): - return self.do_open(self.create_https_connection, req) - - def create_https_connection(self, *args, **kwargs): - return CACertsHTTPSConnection(*args, ca_certs=self.__ca_certs, **kwargs) - - # Helpers from the previous API, left as is. # Based on http://code.activestate.com/recipes/146306/ class FormPostHandler(urllib.request.BaseHandler): diff --git a/tests/test_api.py b/tests/test_api.py index cbbc115fe..19b738f60 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -2214,77 +2214,6 @@ def my_side_effect2(*args, **kwargs): finally: self.sg.config.rpc_attempt_interval = bak_rpc_attempt_interval - @unittest.mock.patch("shotgun_api3.shotgun.Http.request") - def test_sha2_error(self, mock_request): - # Simulate the exception raised with SHA-2 errors - mock_request.side_effect = ssl.SSLError( - "[Errno 1] _ssl.c:480: error:0D0C50A1:asn1 " - "encoding routines:ASN1_item_verify: unknown message digest " - "algorithm" - ) - - # save the original state - original_env_val = os.environ.pop("SHOTGUN_FORCE_CERTIFICATE_VALIDATION", None) - - # ensure we're starting with the right values - self.sg.reset_user_agent() - - # ensure the initial settings are correct. These will be different depending on whether - # the ssl module imported successfully or not. - if "ssl" in sys.modules: - self.assertFalse(self.sg.config.no_ssl_validation) - self.assertFalse(shotgun_api3.shotgun.NO_SSL_VALIDATION) - self.assertTrue("(validate)" in " ".join(self.sg._user_agents)) - self.assertFalse("(no-validate)" in " ".join(self.sg._user_agents)) - else: - self.assertTrue(self.sg.config.no_ssl_validation) - self.assertTrue(shotgun_api3.shotgun.NO_SSL_VALIDATION) - self.assertFalse("(validate)" in " ".join(self.sg._user_agents)) - self.assertTrue("(no-validate)" in " ".join(self.sg._user_agents)) - - try: - self.sg.info() - except ssl.SSLError: - # ensure the api has reset the values in the correct fallback behavior - self.assertTrue(self.sg.config.no_ssl_validation) - self.assertTrue(shotgun_api3.shotgun.NO_SSL_VALIDATION) - self.assertFalse("(validate)" in " ".join(self.sg._user_agents)) - self.assertTrue("(no-validate)" in " ".join(self.sg._user_agents)) - - if original_env_val is not None: - os.environ["SHOTGUN_FORCE_CERTIFICATE_VALIDATION"] = original_env_val - - @unittest.mock.patch("shotgun_api3.shotgun.Http.request") - def test_sha2_error_with_strict(self, mock_request): - # Simulate the exception raised with SHA-2 errors - mock_request.side_effect = ssl.SSLError( - "[Errno 1] _ssl.c:480: error:0D0C50A1:asn1 " - "encoding routines:ASN1_item_verify: unknown message digest " - "algorithm" - ) - - # save the original state - original_env_val = os.environ.pop("SHOTGUN_FORCE_CERTIFICATE_VALIDATION", None) - os.environ["SHOTGUN_FORCE_CERTIFICATE_VALIDATION"] = "1" - - # ensure we're starting with the right values - self.sg.config.no_ssl_validation = False - shotgun_api3.shotgun.NO_SSL_VALIDATION = False - self.sg.reset_user_agent() - - try: - self.sg.info() - except ssl.SSLError: - # ensure the api has NOT reset the values in the fallback behavior because we have - # set the env variable to force validation - self.assertFalse(self.sg.config.no_ssl_validation) - self.assertFalse(shotgun_api3.shotgun.NO_SSL_VALIDATION) - self.assertFalse("(no-validate)" in " ".join(self.sg._user_agents)) - self.assertTrue("(validate)" in " ".join(self.sg._user_agents)) - - if original_env_val is not None: - os.environ["SHOTGUN_FORCE_CERTIFICATE_VALIDATION"] = original_env_val - @unittest.mock.patch.object(urllib.request.OpenerDirector, "open") def test_sanitized_auth_params(self, mock_open): # Simulate the server blowing up and giving us a 500 error diff --git a/tests/test_client.py b/tests/test_client.py index 6c254264b..1e41ea914 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -269,12 +269,11 @@ def test_user_agent(self): args, _ = self.sg._http_request.call_args (_, _, _, headers) = args ssl_validate_lut = {True: "no-validate", False: "validate"} - expected = "shotgun-json (%s); Python %s (%s); ssl %s (%s)" % ( + expected = "shotgun-json (%s); Python %s (%s); ssl %s" % ( api.__version__, client_caps.py_version, client_caps.platform.capitalize(), client_caps.ssl_version, - ssl_validate_lut[config.no_ssl_validation], ) self.assertEqual(expected, headers.get("user-agent")) @@ -283,12 +282,11 @@ def test_user_agent(self): self.sg.info() args, _ = self.sg._http_request.call_args (_, _, _, headers) = args - expected = "shotgun-json (%s); Python %s (%s); ssl %s (%s); test-agent" % ( + expected = "shotgun-json (%s); Python %s (%s); ssl %s; test-agent" % ( api.__version__, client_caps.py_version, client_caps.platform.capitalize(), client_caps.ssl_version, - ssl_validate_lut[config.no_ssl_validation], ) self.assertEqual(expected, headers.get("user-agent")) @@ -297,12 +295,11 @@ def test_user_agent(self): self.sg.info() args, _ = self.sg._http_request.call_args (_, _, _, headers) = args - expected = "shotgun-json (%s); Python %s (%s); ssl %s (%s)" % ( + expected = "shotgun-json (%s); Python %s (%s); ssl %s" % ( api.__version__, client_caps.py_version, client_caps.platform.capitalize(), client_caps.ssl_version, - ssl_validate_lut[config.no_ssl_validation], ) self.assertEqual(expected, headers.get("user-agent")) From 09dd4cbd395fc1c8a67bdb99bf14111b296a1ab9 Mon Sep 17 00:00:00 2001 From: Eduardo Chauca <166560435+eduardoChaucaGallegos@users.noreply.github.com> Date: Wed, 10 Sep 2025 18:34:01 -0500 Subject: [PATCH 41/59] Packaging for the v3.9.0 release (#413) * packaging for the v3.9.0 release --- HISTORY.rst | 8 +++++++- setup.py | 2 +- shotgun_api3/shotgun.py | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index c19b61fec..30b68b792 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,7 +4,13 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. -v3.8.5 (2025 Xxx X) +v3.9.0 (2025 Sep 10) +=================== + +- Removed Python 2 code. +- Removed the six module. Note: if your code depends on the six library previously included in this package, you will need to update it, as it is no longer supported. + +v3.8.5 (2025 Jul 31) =================== - We don't want to retry on general exceptions (e.g. timeout or remote disconnection) diff --git a/setup.py b/setup.py index 9240486b5..647661096 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( name="shotgun_api3", - version="3.8.4", + version="3.9.0", description="Flow Production Tracking Python API", long_description=readme, author="Autodesk", diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 128896cc7..67996c9f3 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -81,7 +81,7 @@ # ---------------------------------------------------------------------------- # Version -__version__ = "3.8.4" +__version__ = "3.9.0" # ---------------------------------------------------------------------------- # Errors From c1d99f5d0533fa732f78aeca1af66f9550b68fb9 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 24 Sep 2025 11:50:02 -0700 Subject: [PATCH 42/59] Fixup "Title underline too short" warning messages (#414) Those happened when building the documentation --- HISTORY.rst | 4 ++-- docs/cookbook/attachments.rst | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index 30b68b792..7b9fc4018 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -5,13 +5,13 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. v3.9.0 (2025 Sep 10) -=================== +==================== - Removed Python 2 code. - Removed the six module. Note: if your code depends on the six library previously included in this package, you will need to update it, as it is no longer supported. v3.8.5 (2025 Jul 31) -=================== +==================== - We don't want to retry on general exceptions (e.g. timeout or remote disconnection) because we might send a resource modification request (create, batch create, etc) and diff --git a/docs/cookbook/attachments.rst b/docs/cookbook/attachments.rst index 934ed58e8..de992431d 100644 --- a/docs/cookbook/attachments.rst +++ b/docs/cookbook/attachments.rst @@ -304,7 +304,7 @@ defaults. Any other keys that are provided will be ignored. Alternative to ``local_path`` Example 1: Using ``local_path`` ------------------------------- +------------------------------- :: @@ -345,7 +345,7 @@ the most appropriate specific LocalStorage match and assigned it to local_storag Example 2: Using ``relative_path`` ---------------------------------- +---------------------------------- :: From 5112d082b6fa9e017529898617ff2ff3d0a46e48 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Thu, 2 Oct 2025 15:34:07 -0700 Subject: [PATCH 43/59] SG-40165 Update support URL (#411) --- docs/advanced.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/advanced.rst b/docs/advanced.rst index f10437ff5..b4bc73a35 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -5,7 +5,7 @@ Advanced Topics ############### Below are some more advanced topics regarding usage of the Python API. If you would like to see -something that's missing here, please feel free to contact support at https://knowledge.autodesk.com/contact-support +something that's missing here, please feel free to contact support at https://knowledge.autodesk.com/support with your suggestions and we'll get it added! .. toctree:: From c7c3bc7b82fd773d0ccbae87d6bfe891471a8fa6 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Mon, 6 Oct 2025 18:05:02 -0700 Subject: [PATCH 44/59] SG-40165 Update Autodesk support URL (#416) --- README.md | 2 +- docs/advanced.rst | 2 +- docs/reference.rst | 2 +- setup.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index f37c5dc21..000e37d84 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ # Flow Production Tracking Python API -Autodesk provides a simple Python-based API for accessing Flow Production Tracking and integrating with other tools. This is the official API that is maintained by Autodesk (https://knowledge.autodesk.com/contact-support) +Autodesk provides a simple Python-based API for accessing Flow Production Tracking and integrating with other tools. This is the official API that is maintained by Autodesk (https://www.autodesk.com/support) The latest version can always be found at http://github.com/shotgunsoftware/python-api diff --git a/docs/advanced.rst b/docs/advanced.rst index b4bc73a35..762f851ea 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -5,7 +5,7 @@ Advanced Topics ############### Below are some more advanced topics regarding usage of the Python API. If you would like to see -something that's missing here, please feel free to contact support at https://knowledge.autodesk.com/support +something that's missing here, please feel free to contact support at https://www.autodesk.com/support with your suggestions and we'll get it added! .. toctree:: diff --git a/docs/reference.rst b/docs/reference.rst index 6b16a37bd..28bfabf1b 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -931,7 +931,7 @@ usage. This **does not** mean your Flow Production Tracking server performance will suffer in general, just any pages that are specifically displaying EventLogEntries in the web application, or API queries on the event log that are run. We are always looking for ways to improve this in the future. If you have any -immediate concerns, please `reach out to our support team `_ +immediate concerns, please `reach out to our support team `_ ********************* Environment Variables diff --git a/setup.py b/setup.py index 647661096..8d903f5f1 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ description="Flow Production Tracking Python API", long_description=readme, author="Autodesk", - author_email="https://www.autodesk.com/support/contact-support", + author_email="https://www.autodesk.com/support", url="https://github.com/shotgunsoftware/python-api", license=license, packages=find_packages(exclude=("tests",)), From 36724cf42cd27eedd727586a7f1cd9106236fbce Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Thu, 16 Oct 2025 13:22:59 -0700 Subject: [PATCH 45/59] SG-40026 Renove unused CACertsHTTPSConnection class (#418) * Renove unused CACertsHTTPSConnection class Should have been removed in #372 * Update shotgun_api3/shotgun.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- shotgun_api3/shotgun.py | 39 ++------------------------------------- tests/test_client.py | 4 ++-- 2 files changed, 4 insertions(+), 39 deletions(-) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 67996c9f3..6400ff373 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -4382,7 +4382,7 @@ def _upload_data_to_storage(self, data, content_type, size, storage_url): else: break else: - raise ShotgunError("Max attemps limit reached.") + raise ShotgunError("Max attempts limit reached.") etag = result.info()["Etag"] LOG.debug("Part upload completed successfully.") @@ -4502,42 +4502,7 @@ def _send_form(self, url, params): return result else: - raise ShotgunError("Max attemps limit reached.") - - -class CACertsHTTPSConnection(http.client.HTTPConnection): - """ " - This class allows to create an HTTPS connection that uses the custom certificates - passed in. - """ - - default_port = http.client.HTTPS_PORT - - def __init__(self, *args, **kwargs): - """ - :param args: Positional arguments passed down to the base class. - :param ca_certs: Path to the custom CA certs file. - :param kwargs: Keyword arguments passed down to the bas class - """ - # Pop that argument, - self.__ca_certs = kwargs.pop("ca_certs") - super().__init__(self, *args, **kwargs) - - def connect(self): - "Connect to a host on a given (SSL) port." - super().connect(self) - # Now that the regular HTTP socket has been created, wrap it with our SSL certs. - if (sys.version_info.major, sys.version_info.minor) >= (3, 8): - context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - context.verify_mode = ssl.CERT_REQUIRED - context.check_hostname = False - if self.__ca_certs: - context.load_verify_locations(self.__ca_certs) - self.sock = context.wrap_socket(self.sock) - else: - self.sock = ssl.wrap_socket( - self.sock, ca_certs=self.__ca_certs, cert_reqs=ssl.CERT_REQUIRED - ) + raise ShotgunError("Max attempts limit reached.") # Helpers from the previous API, left as is. diff --git a/tests/test_client.py b/tests/test_client.py index 1e41ea914..a43e3ed4a 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -523,7 +523,7 @@ def test_upload_s3_urlerror__get_attachment_upload_info(self): # Test the exception message the_exception = cm.exception - self.assertEqual(str(the_exception), "Max attemps limit reached.") + self.assertEqual(str(the_exception), "Max attempts limit reached.") def test_upload_s3_urlerror__upload_to_storage(self): """ @@ -557,7 +557,7 @@ def test_upload_s3_urlerror__upload_to_storage(self): # Test the exception message the_exception = cm.exception - self.assertEqual(str(the_exception), "Max attemps limit reached.") + self.assertEqual(str(the_exception), "Max attempts limit reached.") def test_transform_data(self): """Outbound data is transformed""" From 304a1534ac4d7758ba97d354f27a734fbd625622 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Mon, 27 Oct 2025 11:11:53 -0700 Subject: [PATCH 46/59] SG-39039 Issue a deprecation warning if using Python version <3.9 (#417) --- shotgun_api3/__init__.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/shotgun_api3/__init__.py b/shotgun_api3/__init__.py index d296aa97a..943a9fa8b 100644 --- a/shotgun_api3/__init__.py +++ b/shotgun_api3/__init__.py @@ -8,6 +8,19 @@ # agreement to the Shotgun Pipeline Toolkit Source Code License. All rights # not expressly granted therein are reserved by Shotgun Software Inc. +import sys +import warnings + +if sys.version_info < (3, 9): + warnings.warn( + "Python versions older than 3.9 are no longer supported since 2025-03 " + "and compatibility will be removed at any time after 2026-01. " + "Please update to Python 3.9 or a newer supported version.", + DeprecationWarning, + stacklevel=2, + ) + + from .shotgun import ( Shotgun, ShotgunError, From 03811c10914710b60934c0095444abbaafa70a91 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio Date: Thu, 6 Nov 2025 14:34:17 -0500 Subject: [PATCH 47/59] SG-39414 Add type annotations (#422) * Add type annotations (from #393) * Add more fixes * Remove unused types --- setup.py | 6 +- shotgun_api3/lib/mockgun/mockgun.py | 5 +- shotgun_api3/lib/mockgun/schema.py | 2 +- shotgun_api3/py.typed | 0 shotgun_api3/shotgun.py | 585 +++++++++++++++++----------- 5 files changed, 371 insertions(+), 227 deletions(-) create mode 100644 shotgun_api3/py.typed diff --git a/setup.py b/setup.py index 8d903f5f1..0ddda9d79 100644 --- a/setup.py +++ b/setup.py @@ -30,17 +30,17 @@ packages=find_packages(exclude=("tests",)), script_args=sys.argv[1:], include_package_data=True, - package_data={"": ["cacerts.txt", "cacert.pem"]}, + package_data={"": ["cacerts.txt", "cacert.pem", "py.typed"]}, zip_safe=False, - python_requires=">=3.7.0", + python_requires=">=3.9.0", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", ], ) diff --git a/shotgun_api3/lib/mockgun/mockgun.py b/shotgun_api3/lib/mockgun/mockgun.py index 45d0b2aa5..522e162d9 100644 --- a/shotgun_api3/lib/mockgun/mockgun.py +++ b/shotgun_api3/lib/mockgun/mockgun.py @@ -115,6 +115,7 @@ """ import datetime +from typing import Any from ... import ShotgunError from ...shotgun import _Config @@ -580,7 +581,7 @@ def _get_new_row(self, entity_type): row[field] = default_value return row - def _compare(self, field_type, lval, operator, rval): + def _compare(self, field_type: str, lval: Any, operator: str, rval: Any) -> bool: """ Compares a field using the operator and value provide by the filter. @@ -797,7 +798,7 @@ def _row_matches_filter(self, entity_type, row, sg_filter, retired_only): return self._compare(field_type, lval, operator, rval) - def _rearrange_filters(self, filters): + def _rearrange_filters(self, filters: list) -> None: """ Modifies the filter syntax to turn it into a list of three items regardless of the actual filter. Most of the filters are list of three elements, so this doesn't change much. diff --git a/shotgun_api3/lib/mockgun/schema.py b/shotgun_api3/lib/mockgun/schema.py index ab671629d..f5d9312cc 100644 --- a/shotgun_api3/lib/mockgun/schema.py +++ b/shotgun_api3/lib/mockgun/schema.py @@ -47,7 +47,7 @@ class SchemaFactory(object): _schema_cache_path = None @classmethod - def get_schemas(cls, schema_path, schema_entity_path): + def get_schemas(cls, schema_path: str, schema_entity_path: str) -> tuple: """ Retrieves the schemas from disk. diff --git a/shotgun_api3/py.typed b/shotgun_api3/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 6400ff373..0c0c9cd5c 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -50,6 +50,16 @@ import urllib.request import uuid # used for attachment upload import xml.etree.ElementTree +from typing import ( + Any, + BinaryIO, + Iterable, + Literal, + Optional, + TypedDict, + TypeVar, + Union, +) # Import Error and ResponseError (even though they're unused in this file) since they need # to be exposed as part of the API. @@ -83,6 +93,30 @@ # Version __version__ = "3.9.0" + +# ---------------------------------------------------------------------------- +# Types + + +T = TypeVar("T") + + +class OrderItem(TypedDict): + field_name: str + direction: str + + +class GroupingItem(TypedDict): + field: str + type: str + direction: str + + +class BaseEntity(TypedDict, total=False): + id: int + type: str + + # ---------------------------------------------------------------------------- # Errors @@ -168,7 +202,7 @@ class ServerCapabilities(object): the future. Therefore, usage of this class is discouraged. """ - def __init__(self, host, meta): + def __init__(self, host: str, meta: dict[str, Any]) -> None: """ ServerCapabilities.__init__ @@ -208,14 +242,14 @@ def __init__(self, host, meta): self.version = tuple(self.version[:3]) self._ensure_json_supported() - def _ensure_python_version_supported(self): + def _ensure_python_version_supported(self) -> None: """ Checks the if current Python version is supported. """ if sys.version_info < (3, 7): raise ShotgunError("This module requires Python version 3.7 or higher.") - def _ensure_support(self, feature, raise_hell=True): + def _ensure_support(self, feature: dict[str, Any], raise_hell: bool = True) -> bool: """ Checks the server version supports a given feature, raises an exception if it does not. @@ -243,13 +277,13 @@ def _ensure_support(self, feature, raise_hell=True): else: return True - def _ensure_json_supported(self): + def _ensure_json_supported(self) -> None: """ Ensures server has support for JSON API endpoint added in v2.4.0. """ self._ensure_support({"version": (2, 4, 0), "label": "JSON API"}) - def ensure_include_archived_projects(self): + def ensure_include_archived_projects(self) -> None: """ Ensures server has support for archived Projects feature added in v5.3.14. """ @@ -257,7 +291,7 @@ def ensure_include_archived_projects(self): {"version": (5, 3, 14), "label": "include_archived_projects parameter"} ) - def ensure_per_project_customization(self): + def ensure_per_project_customization(self) -> bool: """ Ensures server has support for per-project customization feature added in v5.4.4. """ @@ -265,7 +299,7 @@ def ensure_per_project_customization(self): {"version": (5, 4, 4), "label": "project parameter"}, True ) - def ensure_support_for_additional_filter_presets(self): + def ensure_support_for_additional_filter_presets(self) -> bool: """ Ensures server has support for additional filter presets feature added in v7.0.0. """ @@ -273,7 +307,7 @@ def ensure_support_for_additional_filter_presets(self): {"version": (7, 0, 0), "label": "additional_filter_presets parameter"}, True ) - def ensure_user_following_support(self): + def ensure_user_following_support(self) -> bool: """ Ensures server has support for listing items a user is following, added in v7.0.12. """ @@ -281,7 +315,7 @@ def ensure_user_following_support(self): {"version": (7, 0, 12), "label": "user_following parameter"}, True ) - def ensure_paging_info_without_counts_support(self): + def ensure_paging_info_without_counts_support(self) -> bool: """ Ensures server has support for optimized pagination, added in v7.4.0. """ @@ -289,7 +323,7 @@ def ensure_paging_info_without_counts_support(self): {"version": (7, 4, 0), "label": "optimized pagination"}, False ) - def ensure_return_image_urls_support(self): + def ensure_return_image_urls_support(self) -> bool: """ Ensures server has support for returning thumbnail URLs without additional round-trips, added in v3.3.0. """ @@ -297,7 +331,7 @@ def ensure_return_image_urls_support(self): {"version": (3, 3, 0), "label": "return thumbnail URLs"}, False ) - def __str__(self): + def __str__(self) -> str: return "ServerCapabilities: host %s, version %s, is_dev %s" % ( self.host, self.version, @@ -355,7 +389,7 @@ class _Config(object): Container for the client configuration. """ - def __init__(self, sg): + def __init__(self, sg: "Shotgun"): """ :param sg: Shotgun connection. """ @@ -376,41 +410,41 @@ def __init__(self, sg): # If the optional timeout parameter is given, blocking operations # (like connection attempts) will timeout after that many seconds # (if it is not given, the global default timeout setting is used) - self.timeout_secs = None + self.timeout_secs: Optional[float] = None self.api_ver = "api3" self.convert_datetimes_to_utc = True - self._records_per_page = None - self.api_key = None - self.script_name = None - self.user_login = None - self.user_password = None - self.auth_token = None - self.sudo_as_login = None + self._records_per_page: Optional[int] = None + self.api_key: Optional[str] = None + self.script_name: Optional[str] = None + self.user_login: Optional[str] = None + self.user_password: Optional[str] = None + self.auth_token: Optional[str] = None + self.sudo_as_login: Optional[str] = None # Authentication parameters to be folded into final auth_params dict - self.extra_auth_params = None + self.extra_auth_params: Optional[dict[str, Any]] = None # uuid as a string - self.session_uuid = None - self.scheme = None - self.server = None - self.api_path = None + self.session_uuid: Optional[str] = None + self.scheme: Optional[str] = None + self.server: Optional[str] = None + self.api_path: Optional[str] = None # The raw_http_proxy reflects the exact string passed in # to the Shotgun constructor. This can be useful if you # need to construct a Shotgun API instance based on # another Shotgun API instance. - self.raw_http_proxy = None + self.raw_http_proxy: Optional[str] = None # if a proxy server is being used, the proxy_handler # below will contain a urllib2.ProxyHandler instance # which can be used whenever a request needs to be made. - self.proxy_handler = None - self.proxy_server = None + self.proxy_handler: Optional["urllib.request.ProxyHandler"] = None + self.proxy_server: Optional[str] = None self.proxy_port = 8080 - self.proxy_user = None - self.proxy_pass = None - self.session_token = None - self.authorization = None + self.proxy_user: Optional[str] = None + self.proxy_pass: Optional[str] = None + self.session_token: Optional[str] = None + self.authorization: Optional[str] = None self.localized = False - def set_server_params(self, base_url): + def set_server_params(self, base_url: str) -> None: """ Set the different server related fields based on the passed in URL. @@ -432,7 +466,7 @@ def set_server_params(self, base_url): ) @property - def records_per_page(self): + def records_per_page(self) -> int: """ The records per page value from the server. """ @@ -465,19 +499,19 @@ class Shotgun(object): def __init__( self, - base_url, - script_name=None, - api_key=None, - convert_datetimes_to_utc=True, - http_proxy=None, - connect=True, - ca_certs=None, - login=None, - password=None, - sudo_as_login=None, - session_token=None, - auth_token=None, - ): + base_url: str, + script_name: Optional[str] = None, + api_key: Optional[str] = None, + convert_datetimes_to_utc: bool = True, + http_proxy: Optional[str] = None, + connect: bool = True, + ca_certs: Optional[str] = None, + login: Optional[str] = None, + password: Optional[str] = None, + sudo_as_login: Optional[str] = None, + session_token: Optional[str] = None, + auth_token: Optional[str] = None, + ) -> None: """ Initializes a new instance of the Shotgun client. @@ -589,7 +623,7 @@ def __init__( "must provide login/password, session_token or script_name/api_key" ) - self.config = _Config(self) + self.config: _Config = _Config(self) self.config.api_key = api_key self.config.script_name = script_name self.config.user_login = login @@ -625,7 +659,7 @@ def __init__( ): SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION = True - self._connection = None + self._connection: Optional[Http] = None self.__ca_certs = self._get_certs_file(ca_certs) @@ -690,7 +724,7 @@ def __init__( # this relies on self.client_caps being set first self.reset_user_agent() - self._server_caps = None + self._server_caps: Optional[ServerCapabilities] = None # test to ensure the the server supports the json API # call to server will only be made once and will raise error if connect: @@ -704,7 +738,7 @@ def __init__( self.config.user_password = None self.config.auth_token = None - def _split_url(self, base_url): + def _split_url(self, base_url: str) -> tuple[Optional[str], Optional[str]]: """ Extract the hostname:port and username/password/token from base_url sent when connect to the API. @@ -736,7 +770,7 @@ def _split_url(self, base_url): # API Functions @property - def server_info(self): + def server_info(self) -> dict[str, Any]: """ Property containing server information. @@ -754,7 +788,7 @@ def server_info(self): return self.server_caps.server_info @property - def server_caps(self): + def server_caps(self) -> ServerCapabilities: """ Property containing :class:`ServerCapabilities` object. @@ -769,7 +803,7 @@ def server_caps(self): self._server_caps = ServerCapabilities(self.config.server, self.info()) return self._server_caps - def connect(self): + def connect(self) -> None: """ Connect client to the server if it is not already connected. @@ -780,7 +814,7 @@ def connect(self): self.info() return - def close(self): + def close(self) -> None: """ Close the current connection to the server. @@ -789,7 +823,7 @@ def close(self): self._close_connection() return - def info(self): + def info(self) -> dict[str, Any]: """ Get API-related metadata from the Shotgun server. @@ -822,15 +856,15 @@ def info(self): def find_one( self, - entity_type, - filters, - fields=None, - order=None, - filter_operator=None, - retired_only=False, - include_archived_projects=True, - additional_filter_presets=None, - ): + entity_type: str, + filters: Union[list, tuple, dict[str, Any]], + fields: Optional[list[str]] = None, + order: Optional[list[OrderItem]] = None, + filter_operator: Optional[Literal["all", "any"]] = None, + retired_only: bool = False, + include_archived_projects: bool = True, + additional_filter_presets: Optional[list[dict[str, Any]]] = None, + ) -> Optional[BaseEntity]: """ Shortcut for :meth:`~shotgun_api3.Shotgun.find` with ``limit=1`` so it returns a single result. @@ -845,7 +879,7 @@ def find_one( :param list fields: Optional list of fields to include in each entity record returned. Defaults to ``["id"]``. - :param int order: Optional list of fields to order the results by. List has the format:: + :param list order: Optional list of fields to order the results by. List has the format:: [ {'field_name':'foo', 'direction':'asc'}, @@ -862,7 +896,7 @@ def find_one( same query. :param bool include_archived_projects: Optional boolean flag to include entities whose projects have been archived. Defaults to ``True``. - :param additional_filter_presets: Optional list of presets to further filter the result + :param list additional_filter_presets: Optional list of presets to further filter the result set, list has the form:: [{ @@ -902,17 +936,17 @@ def find_one( def find( self, - entity_type, - filters, - fields=None, - order=None, - filter_operator=None, - limit=0, - retired_only=False, - page=0, - include_archived_projects=True, - additional_filter_presets=None, - ): + entity_type: str, + filters: Union[list, tuple, dict[str, Any]], + fields: Optional[list[str]] = None, + order: Optional[list[OrderItem]] = None, + filter_operator: Optional[Literal["all", "any"]] = None, + limit: int = 0, + retired_only: bool = False, + page: int = 0, + include_archived_projects: bool = True, + additional_filter_presets: Optional[list[dict[str, Any]]] = None, + ) -> list[BaseEntity]: """ Find entities matching the given filters. @@ -990,7 +1024,7 @@ def find( same query. :param bool include_archived_projects: Optional boolean flag to include entities whose projects have been archived. Defaults to ``True``. - :param additional_filter_presets: Optional list of presets to further filter the result + :param list additional_filter_presets: Optional list of presets to further filter the result set, list has the form:: [{ @@ -1101,15 +1135,15 @@ def find( def _construct_read_parameters( self, - entity_type, - fields, - filters, - retired_only, - order, - include_archived_projects, - additional_filter_presets, - ): - params = {} + entity_type: str, + fields: Optional[list[str]], + filters: dict[str, Any], + retired_only: bool, + order: Optional[list[dict[str, Any]]], + include_archived_projects: bool, + additional_filter_presets: Optional[list[dict[str, Any]]], + ) -> dict[str, Any]: + params: dict[str, Any] = {} params["type"] = entity_type params["return_fields"] = fields or ["id"] params["filters"] = filters @@ -1139,7 +1173,9 @@ def _construct_read_parameters( params["sorts"] = sort_list return params - def _add_project_param(self, params, project_entity): + def _add_project_param( + self, params: dict[str, Any], project_entity + ) -> dict[str, Any]: if project_entity and self.server_caps.ensure_per_project_customization(): params["project"] = project_entity @@ -1147,8 +1183,12 @@ def _add_project_param(self, params, project_entity): return params def _translate_update_params( - self, entity_type, entity_id, data, multi_entity_update_modes - ): + self, + entity_type: str, + entity_id: int, + data: dict, + multi_entity_update_modes: Optional[dict], + ) -> dict[str, Any]: global SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION def optimize_field(field_dict): @@ -1170,13 +1210,13 @@ def optimize_field(field_dict): def summarize( self, - entity_type, - filters, - summary_fields, - filter_operator=None, - grouping=None, - include_archived_projects=True, - ): + entity_type: str, + filters: Union[list, dict[str, Any]], + summary_fields: list[dict[str, str]], + filter_operator: Optional[str] = None, + grouping: Optional[list[GroupingItem]] = None, + include_archived_projects: bool = True, + ) -> dict[str, Any]: """ Summarize field data returned by a query. @@ -1376,7 +1416,12 @@ def summarize( records = self._call_rpc("summarize", params) return records - def create(self, entity_type, data, return_fields=None): + def create( + self, + entity_type: str, + data: dict[str, Any], + return_fields: Optional[list] = None, + ) -> dict[str, Any]: """ Create a new entity of the specified ``entity_type``. @@ -1459,7 +1504,13 @@ def create(self, entity_type, data, return_fields=None): return result - def update(self, entity_type, entity_id, data, multi_entity_update_modes=None): + def update( + self, + entity_type: str, + entity_id: int, + data: dict[str, Any], + multi_entity_update_modes: Optional[dict[str, Any]] = None, + ) -> BaseEntity: """ Update the specified entity with the supplied data. @@ -1538,7 +1589,7 @@ def update(self, entity_type, entity_id, data, multi_entity_update_modes=None): return result - def delete(self, entity_type, entity_id): + def delete(self, entity_type: str, entity_id: int) -> bool: """ Retire the specified entity. @@ -1562,7 +1613,7 @@ def delete(self, entity_type, entity_id): return self._call_rpc("delete", params) - def revive(self, entity_type, entity_id): + def revive(self, entity_type: str, entity_id: int) -> bool: """ Revive an entity that has previously been deleted. @@ -1580,7 +1631,7 @@ def revive(self, entity_type, entity_id): return self._call_rpc("revive", params) - def batch(self, requests): + def batch(self, requests: list[dict[str, Any]]) -> list[dict[str, Any]]: """ Make a batch request of several :meth:`~shotgun_api3.Shotgun.create`, :meth:`~shotgun_api3.Shotgun.update`, and :meth:`~shotgun_api3.Shotgun.delete` calls. @@ -1695,7 +1746,13 @@ def _required_keys(message, required_keys, data): records = self._call_rpc("batch", calls) return self._parse_records(records) - def work_schedule_read(self, start_date, end_date, project=None, user=None): + def work_schedule_read( + self, + start_date: str, + end_date: str, + project: Optional[dict[str, Any]] = None, + user: Optional[dict[str, Any]] = None, + ) -> dict[str, Any]: """ Return the work day rules for a given date range. @@ -1766,13 +1823,13 @@ def work_schedule_read(self, start_date, end_date, project=None, user=None): def work_schedule_update( self, - date, - working, - description=None, - project=None, - user=None, - recalculate_field=None, - ): + date: str, + working: bool, + description: Optional[str] = None, + project: Optional[dict[str, Any]] = None, + user: Optional[dict[str, Any]] = None, + recalculate_field: Optional[str] = None, + ) -> dict[str, Any]: """ Update the work schedule for a given date. @@ -1826,7 +1883,7 @@ def work_schedule_update( return self._call_rpc("work_schedule_update", params) - def follow(self, user, entity): + def follow(self, user: dict[str, Any], entity: dict[str, Any]) -> dict[str, Any]: """ Add the entity to the user's followed entities. @@ -1854,7 +1911,7 @@ def follow(self, user, entity): return self._call_rpc("follow", params) - def unfollow(self, user, entity): + def unfollow(self, user: dict[str, Any], entity: dict[str, Any]) -> dict[str, Any]: """ Remove entity from the user's followed entities. @@ -1881,7 +1938,7 @@ def unfollow(self, user, entity): return self._call_rpc("unfollow", params) - def followers(self, entity): + def followers(self, entity: dict[str, Any]) -> list[dict[str, Any]]: """ Return all followers for an entity. @@ -1909,7 +1966,12 @@ def followers(self, entity): return self._call_rpc("followers", params) - def following(self, user, project=None, entity_type=None): + def following( + self, + user: dict[str, Any], + project: Optional[dict[str, Any]] = None, + entity_type: Optional[str] = None, + ) -> list[BaseEntity]: """ Return all entity instances a user is following. @@ -1940,7 +2002,9 @@ def following(self, user, project=None, entity_type=None): return self._call_rpc("following", params) - def schema_entity_read(self, project_entity=None): + def schema_entity_read( + self, project_entity: Optional[BaseEntity] = None + ) -> dict[str, dict[str, Any]]: """ Return all active entity types, their display names, and their visibility. @@ -1975,7 +2039,7 @@ def schema_entity_read(self, project_entity=None): The returned display names for this method will be localized when the ``localize`` Shotgun config property is set to ``True``. See :ref:`localization` for more information. """ - params = {} + params: dict[str, Any] = {} params = self._add_project_param(params, project_entity) @@ -1984,7 +2048,9 @@ def schema_entity_read(self, project_entity=None): else: return self._call_rpc("schema_entity_read", None) - def schema_read(self, project_entity=None): + def schema_read( + self, project_entity: Optional[BaseEntity] = None + ) -> dict[str, dict[str, Any]]: """ Get the schema for all fields on all entities. @@ -2047,7 +2113,7 @@ def schema_read(self, project_entity=None): The returned display names for this method will be localized when the ``localize`` Shotgun config property is set to ``True``. See :ref:`localization` for more information. """ - params = {} + params: dict[str, Any] = {} params = self._add_project_param(params, project_entity) @@ -2056,7 +2122,12 @@ def schema_read(self, project_entity=None): else: return self._call_rpc("schema_read", None) - def schema_field_read(self, entity_type, field_name=None, project_entity=None): + def schema_field_read( + self, + entity_type: str, + field_name: Optional[str] = None, + project_entity: Optional[BaseEntity] = None, + ) -> dict[str, dict[str, Any]]: """ Get schema for all fields on the specified entity type or just the field name specified if provided. @@ -2121,8 +2192,12 @@ def schema_field_read(self, entity_type, field_name=None, project_entity=None): return self._call_rpc("schema_field_read", params) def schema_field_create( - self, entity_type, data_type, display_name, properties=None - ): + self, + entity_type: str, + data_type: str, + display_name: str, + properties: Optional[dict[str, Any]] = None, + ) -> str: """ Create a field for the specified entity type. @@ -2160,8 +2235,12 @@ def schema_field_create( return self._call_rpc("schema_field_create", params) def schema_field_update( - self, entity_type, field_name, properties, project_entity=None - ): + self, + entity_type: str, + field_name: str, + properties: dict[str, Any], + project_entity: Optional[BaseEntity] = None, + ) -> bool: """ Update the properties for the specified field on an entity. @@ -2175,9 +2254,9 @@ def schema_field_update( >>> sg.schema_field_update("Asset", "sg_test_number", properties) True - :param entity_type: Entity type of field to update. - :param field_name: Internal Shotgun name of the field to update. - :param properties: Dictionary with key/value pairs where the key is the property to be + :param str entity_type: Entity type of field to update. + :param str field_name: Internal Shotgun name of the field to update. + :param dict properties: Dictionary with key/value pairs where the key is the property to be updated and the value is the new value. :param dict project_entity: Optional Project entity specifying which project to modify the ``visible`` property for. If ``visible`` is present in ``properties`` and @@ -2202,7 +2281,7 @@ def schema_field_update( params = self._add_project_param(params, project_entity) return self._call_rpc("schema_field_update", params) - def schema_field_delete(self, entity_type, field_name): + def schema_field_delete(self, entity_type: str, field_name: str) -> bool: """ Delete the specified field from the entity type. @@ -2219,7 +2298,7 @@ def schema_field_delete(self, entity_type, field_name): return self._call_rpc("schema_field_delete", params) - def add_user_agent(self, agent): + def add_user_agent(self, agent: str) -> None: """ Add agent to the user-agent header. @@ -2231,7 +2310,7 @@ def add_user_agent(self, agent): """ self._user_agents.append(agent) - def reset_user_agent(self): + def reset_user_agent(self) -> None: """ Reset user agent to the default value. @@ -2251,7 +2330,7 @@ def reset_user_agent(self): "ssl %s" % (self.client_caps.ssl_version), ] - def set_session_uuid(self, session_uuid): + def set_session_uuid(self, session_uuid: str) -> None: """ Set the browser session_uuid in the current Shotgun API instance. @@ -2269,12 +2348,12 @@ def set_session_uuid(self, session_uuid): def share_thumbnail( self, - entities, - thumbnail_path=None, - source_entity=None, - filmstrip_thumbnail=False, - **kwargs, - ): + entities: list[dict[str, Any]], + thumbnail_path: Optional[str] = None, + source_entity: Optional[BaseEntity] = None, + filmstrip_thumbnail: bool = False, + **kwargs: Any, + ) -> int: """ Associate a thumbnail with more than one Shotgun entity. @@ -2413,7 +2492,9 @@ def share_thumbnail( return attachment_id - def upload_thumbnail(self, entity_type, entity_id, path, **kwargs): + def upload_thumbnail( + self, entity_type: str, entity_id: int, path: str, **kwargs: Any + ) -> int: """ Upload a file from a local path and assign it as the thumbnail for the specified entity. @@ -2438,12 +2519,15 @@ def upload_thumbnail(self, entity_type, entity_id, path, **kwargs): :param int entity_id: Id of the entity to set the thumbnail for. :param str path: Full path to the thumbnail file on disk. :returns: Id of the new attachment + :rtype: int """ return self.upload( entity_type, entity_id, path, field_name="thumb_image", **kwargs ) - def upload_filmstrip_thumbnail(self, entity_type, entity_id, path, **kwargs): + def upload_filmstrip_thumbnail( + self, entity_type: str, entity_id: int, path: str, **kwargs: Any + ) -> int: """ Upload filmstrip thumbnail to specified entity. @@ -2494,13 +2578,13 @@ def upload_filmstrip_thumbnail(self, entity_type, entity_id, path, **kwargs): def upload( self, - entity_type, - entity_id, - path, - field_name=None, - display_name=None, - tag_list=None, - ): + entity_type: str, + entity_id: int, + path: str, + field_name: Optional[str] = None, + display_name: Optional[str] = None, + tag_list: Optional[str] = None, + ) -> int: """ Upload a file to the specified entity. @@ -2583,14 +2667,14 @@ def upload( def _upload_to_storage( self, - entity_type, - entity_id, - path, - field_name, - display_name, - tag_list, - is_thumbnail, - ): + entity_type: str, + entity_id: int, + path: str, + field_name: Optional[str], + display_name: Optional[str], + tag_list: Optional[str], + is_thumbnail: bool, + ) -> int: """ Internal function to upload a file to the Cloud storage and link it to the specified entity. @@ -2673,14 +2757,14 @@ def _upload_to_storage( def _upload_to_sg( self, - entity_type, - entity_id, - path, - field_name, - display_name, - tag_list, - is_thumbnail, - ): + entity_type: str, + entity_id: int, + path: str, + field_name: Optional[str], + display_name: Optional[str], + tag_list: Optional[str], + is_thumbnail: bool, + ) -> int: """ Internal function to upload a file to Shotgun and link it to the specified entity. @@ -2752,7 +2836,9 @@ def _upload_to_sg( attachment_id = int(result.split(":", 2)[1].split("\n", 1)[0]) return attachment_id - def _get_attachment_upload_info(self, is_thumbnail, filename, is_multipart_upload): + def _get_attachment_upload_info( + self, is_thumbnail: bool, filename: str, is_multipart_upload: bool + ) -> dict[str, Any]: """ Internal function to get the information needed to upload a file to Cloud storage. @@ -2799,7 +2885,12 @@ def _get_attachment_upload_info(self, is_thumbnail, filename, is_multipart_uploa "upload_info": upload_info, } - def download_attachment(self, attachment=False, file_path=None, attachment_id=None): + def download_attachment( + self, + attachment: Union[dict[str, Any], Literal[False]] = False, + file_path: Optional[str] = None, + attachment_id: Optional[int] = None, + ) -> Union[str, bytes, None]: """ Download the file associated with a Shotgun Attachment. @@ -2915,7 +3006,7 @@ def download_attachment(self, attachment=False, file_path=None, attachment_id=No else: return attachment - def get_auth_cookie_handler(self): + def get_auth_cookie_handler(self) -> urllib.request.HTTPCookieProcessor: """ Return an urllib cookie handler containing a cookie for FPTR authentication. @@ -2947,7 +3038,9 @@ def get_auth_cookie_handler(self): cj.set_cookie(c) return urllib.request.HTTPCookieProcessor(cj) - def get_attachment_download_url(self, attachment): + def get_attachment_download_url( + self, attachment: Optional[Union[int, dict[str, Any]]] + ) -> str: """ Return the URL for downloading provided Attachment. @@ -3005,7 +3098,9 @@ def get_attachment_download_url(self, attachment): ) return url - def authenticate_human_user(self, user_login, user_password, auth_token=None): + def authenticate_human_user( + self, user_login: str, user_password: str, auth_token: Optional[str] = None + ) -> Union[dict[str, Any], None]: """ Authenticate Shotgun HumanUser. @@ -3064,7 +3159,9 @@ def authenticate_human_user(self, user_login, user_password, auth_token=None): self.config.auth_token = original_auth_token raise - def update_project_last_accessed(self, project, user=None): + def update_project_last_accessed( + self, project: dict[str, Any], user: Optional[dict[str, Any]] = None + ) -> None: """ Update a Project's ``last_accessed_by_current_user`` field to the current timestamp. @@ -3110,7 +3207,9 @@ def update_project_last_accessed(self, project, user=None): record = self._call_rpc("update_project_last_accessed_by_current_user", params) self._parse_records(record)[0] - def note_thread_read(self, note_id, entity_fields=None): + def note_thread_read( + self, note_id: int, entity_fields: Optional[dict[str, Any]] = None + ) -> list[dict[str, Any]]: """ Return the full conversation for a given note, including Replies and Attachments. @@ -3185,7 +3284,13 @@ def note_thread_read(self, note_id, entity_fields=None): result = self._parse_records(record) return result - def text_search(self, text, entity_types, project_ids=None, limit=None): + def text_search( + self, + text: str, + entity_types: dict[str, Any], + project_ids: Optional[list] = None, + limit: Optional[int] = None, + ) -> dict[str, Any]: """ Search across the specified entity types for the given text. @@ -3279,13 +3384,13 @@ def text_search(self, text, entity_types, project_ids=None, limit=None): def activity_stream_read( self, - entity_type, - entity_id, - entity_fields=None, - min_id=None, - max_id=None, - limit=None, - ): + entity_type: str, + entity_id: int, + entity_fields: Optional[dict[str, Any]] = None, + min_id: Optional[int] = None, + max_id: Optional[int] = None, + limit: Optional[int] = None, + ) -> dict[str, Any]: """ Retrieve activity stream data from Shotgun. @@ -3375,7 +3480,7 @@ def activity_stream_read( result = self._parse_records(record)[0] return result - def nav_expand(self, path, seed_entity_field=None, entity_fields=None): + def nav_expand(self, path: str, seed_entity_field=None, entity_fields=None): """ Expand the navigation hierarchy for the supplied path. @@ -3395,7 +3500,9 @@ def nav_expand(self, path, seed_entity_field=None, entity_fields=None): }, ) - def nav_search_string(self, root_path, search_string, seed_entity_field=None): + def nav_search_string( + self, root_path: str, search_string: str, seed_entity_field=None + ): """ Search function adapted to work with the navigation hierarchy. @@ -3414,7 +3521,12 @@ def nav_search_string(self, root_path, search_string, seed_entity_field=None): }, ) - def nav_search_entity(self, root_path, entity, seed_entity_field=None): + def nav_search_entity( + self, + root_path: str, + entity: dict[str, Any], + seed_entity_field: Optional[dict[str, Any]] = None, + ): """ Search function adapted to work with the navigation hierarchy. @@ -3434,7 +3546,7 @@ def nav_search_entity(self, root_path, entity, seed_entity_field=None): }, ) - def get_session_token(self): + def get_session_token(self) -> str: """ Get the session token associated with the current session. @@ -3458,7 +3570,7 @@ def get_session_token(self): return session_token - def preferences_read(self, prefs=None): + def preferences_read(self, prefs: Optional[list] = None) -> dict[str, Any]: """ Get a subset of the site preferences. @@ -3481,7 +3593,7 @@ def preferences_read(self, prefs=None): return self._call_rpc("preferences_read", {"prefs": prefs}) - def user_subscriptions_read(self): + def user_subscriptions_read(self) -> list: """ Get the list of user subscriptions. @@ -3493,8 +3605,9 @@ def user_subscriptions_read(self): return self._call_rpc("user_subscriptions_read", None) - def user_subscriptions_create(self, users): - # type: (list[dict[str, Union[str, list[str], None]) -> bool + def user_subscriptions_create( + self, users: list[dict[str, Union[str, list[str], None]]] + ) -> bool: """ Assign subscriptions to users. @@ -3515,7 +3628,7 @@ def user_subscriptions_create(self, users): return response.get("status") == "success" - def _build_opener(self, handler): + def _build_opener(self, handler) -> urllib.request.OpenerDirector: """ Build urllib2 opener with appropriate proxy handler. """ @@ -3616,7 +3729,13 @@ def entity_types(self): # ======================================================================== # RPC Functions - def _call_rpc(self, method, params, include_auth_params=True, first=False): + def _call_rpc( + self, + method: str, + params: Any, + include_auth_params: bool = True, + first: bool = False, + ) -> Any: """ Call the specified method on the Shotgun Server sending the supplied payload. """ @@ -3680,7 +3799,7 @@ def _call_rpc(self, method, params, include_auth_params=True, first=False): return results[0] return results - def _auth_params(self): + def _auth_params(self) -> dict[str, Any]: """ Return a dictionary of the authentication parameters being used. """ @@ -3735,7 +3854,7 @@ def _auth_params(self): return auth_params - def _sanitize_auth_params(self, params): + def _sanitize_auth_params(self, params: dict[str, Any]) -> dict[str, Any]: """ Given an authentication parameter dictionary, sanitize any sensitive information and return the sanitized dict copy. @@ -3746,7 +3865,9 @@ def _sanitize_auth_params(self, params): sanitized_params[k] = "********" return sanitized_params - def _build_payload(self, method, params, include_auth_params=True): + def _build_payload( + self, method: str, params, include_auth_params: bool = True + ) -> dict[str, Any]: """ Build the payload to be send to the rpc endpoint. """ @@ -3764,7 +3885,7 @@ def _build_payload(self, method, params, include_auth_params=True): return {"method_name": method, "params": call_params} - def _encode_payload(self, payload): + def _encode_payload(self, payload) -> bytes: """ Encode the payload to a string to be passed to the rpc endpoint. @@ -3775,7 +3896,9 @@ def _encode_payload(self, payload): return json.dumps(payload, ensure_ascii=False).encode("utf-8") - def _make_call(self, verb, path, body, headers): + def _make_call( + self, verb: str, path: str, body, headers: Optional[dict[str, Any]] + ) -> tuple[tuple[int, str], dict[str, Any], str]: """ Make an HTTP call to the server. @@ -3825,7 +3948,9 @@ def _make_call(self, verb, path, body, headers): ) time.sleep(rpc_attempt_interval) - def _http_request(self, verb, path, body, headers): + def _http_request( + self, verb: str, path: str, body, headers: dict[str, Any] + ) -> tuple[tuple[int, str], dict[str, Any], str]: """ Make the actual HTTP request. """ @@ -3849,7 +3974,9 @@ def _http_request(self, verb, path, body, headers): return (http_status, resp_headers, resp_body) - def _make_upload_request(self, request, opener): + def _make_upload_request( + self, request, opener: "urllib.request.OpenerDirector" + ) -> "urllib.request._UrlopenRet": """ Open the given request object, return the response, raises URLError on protocol errors. @@ -3861,7 +3988,7 @@ def _make_upload_request(self, request, opener): raise return result - def _parse_http_status(self, status): + def _parse_http_status(self, status: tuple) -> None: """ Parse the status returned from the http request. @@ -3879,7 +4006,9 @@ def _parse_http_status(self, status): return - def _decode_response(self, headers, body): + def _decode_response( + self, headers: dict[str, Any], body: str + ) -> Union[str, dict[str, Any]]: """ Decode the response from the server from the wire format to a python data structure. @@ -3900,7 +4029,7 @@ def _decode_response(self, headers, body): return self._json_loads(body) return body - def _json_loads(self, body): + def _json_loads(self, body: str) -> Any: return json.loads(body) def _response_errors(self, sg_response): @@ -3949,7 +4078,7 @@ def _response_errors(self, sg_response): raise Fault(sg_response.get("message", "Unknown Error")) return - def _visit_data(self, data, visitor): + def _visit_data(self, data: T, visitor) -> T: """ Walk the data (simple python types) and call the visitor. """ @@ -3959,17 +4088,17 @@ def _visit_data(self, data, visitor): recursive = self._visit_data if isinstance(data, list): - return [recursive(i, visitor) for i in data] + return [recursive(i, visitor) for i in data] # type: ignore[return-value] if isinstance(data, tuple): - return tuple(recursive(i, visitor) for i in data) + return tuple(recursive(i, visitor) for i in data) # type: ignore[return-value] if isinstance(data, dict): - return dict((k, recursive(v, visitor)) for k, v in data.items()) + return dict((k, recursive(v, visitor)) for k, v in data.items()) # type: ignore[return-value] return visitor(data) - def _transform_outbound(self, data): + def _transform_outbound(self, data: T) -> T: """ Transform data types or values before they are sent by the client. @@ -4016,7 +4145,7 @@ def _outbound_visitor(value): return self._visit_data(data, _outbound_visitor) - def _transform_inbound(self, data): + def _transform_inbound(self, data: T) -> T: """ Transforms data types or values after they are received from the server. """ @@ -4052,7 +4181,7 @@ def _inbound_visitor(value): # ======================================================================== # Connection Functions - def _get_connection(self): + def _get_connection(self) -> Http: """ Return the current connection or creates a new connection to the current server. """ @@ -4081,7 +4210,7 @@ def _get_connection(self): return self._connection - def _close_connection(self): + def _close_connection(self) -> None: """ Close the current connection. """ @@ -4100,7 +4229,7 @@ def _close_connection(self): # ======================================================================== # Utility - def _parse_records(self, records): + def _parse_records(self, records: list) -> list: """ Parse 'records' returned from the api to do local modifications: @@ -4156,7 +4285,7 @@ def _parse_records(self, records): return records - def _build_thumb_url(self, entity_type, entity_id): + def _build_thumb_url(self, entity_type: str, entity_id: int) -> str: """ Return the URL for the thumbnail of an entity given the entity type and the entity id. @@ -4204,8 +4333,12 @@ def _build_thumb_url(self, entity_type, entity_id): raise RuntimeError("Unknown code %s %s" % (code, thumb_url)) def _dict_to_list( - self, d, key_name="field_name", value_name="value", extra_data=None - ): + self, + d: Optional[dict[str, Any]], + key_name: str = "field_name", + value_name: str = "value", + extra_data=None, + ) -> list[dict[str, Any]]: """ Utility function to convert a dict into a list dicts using the key_name and value_name keys. @@ -4222,7 +4355,7 @@ def _dict_to_list( ret.append(d) return ret - def _dict_to_extra_data(self, d, key_name="value"): + def _dict_to_extra_data(self, d: Optional[dict], key_name="value") -> dict: """ Utility function to convert a dict into a dict compatible with the extra_data arg of _dict_to_list. @@ -4231,7 +4364,7 @@ def _dict_to_extra_data(self, d, key_name="value"): """ return dict([(k, {key_name: v}) for (k, v) in (d or {}).items()]) - def _upload_file_to_storage(self, path, storage_url): + def _upload_file_to_storage(self, path: str, storage_url: str) -> None: """ Internal function to upload an entire file to the Cloud storage. @@ -4251,7 +4384,9 @@ def _upload_file_to_storage(self, path, storage_url): LOG.debug("File uploaded to Cloud storage: %s", filename) - def _multipart_upload_file_to_storage(self, path, upload_info): + def _multipart_upload_file_to_storage( + self, path: str, upload_info: dict[str, Any] + ) -> None: """ Internal function to upload a file to the Cloud storage in multiple parts. @@ -4293,7 +4428,9 @@ def _multipart_upload_file_to_storage(self, path, upload_info): LOG.debug("File uploaded in multiple parts to Cloud storage: %s", path) - def _get_upload_part_link(self, upload_info, filename, part_number): + def _get_upload_part_link( + self, upload_info: dict[str, Any], filename: str, part_number: int + ) -> str: """ Internal function to get the url to upload the next part of a file to the Cloud storage, in a multi-part upload process. @@ -4333,7 +4470,9 @@ def _get_upload_part_link(self, upload_info, filename, part_number): LOG.debug("Got next upload link from server for multipart upload.") return result.split("\n", 2)[1] - def _upload_data_to_storage(self, data, content_type, size, storage_url): + def _upload_data_to_storage( + self, data: BinaryIO, content_type: str, size: int, storage_url: str + ) -> str: """ Internal function to upload data to Cloud storage. @@ -4388,13 +4527,15 @@ def _upload_data_to_storage(self, data, content_type, size, storage_url): LOG.debug("Part upload completed successfully.") return etag - def _complete_multipart_upload(self, upload_info, filename, etags): + def _complete_multipart_upload( + self, upload_info: dict[str, Any], filename: str, etags: Iterable[str] + ) -> None: """ Internal function to complete a multi-part upload to the Cloud storage. :param dict upload_info: Contains details received from the server, about the upload. :param str filename: Name of the file for which we want to complete the upload. - :param tupple etags: Contains the etag of each uploaded file part. + :param tuple etags: Contains the etag of each uploaded file part. """ params = { @@ -4421,7 +4562,9 @@ def _complete_multipart_upload(self, upload_info, filename, etags): if not result.startswith("1"): raise ShotgunError("Unable get upload part link: %s" % result) - def _requires_direct_s3_upload(self, entity_type, field_name): + def _requires_direct_s3_upload( + self, entity_type: str, field_name: Optional[str] + ) -> bool: """ Internal function that determines if an entity_type + field_name combination should be uploaded to cloud storage. @@ -4462,7 +4605,7 @@ def _requires_direct_s3_upload(self, entity_type, field_name): else: return False - def _send_form(self, url, params): + def _send_form(self, url: str, params: dict[str, Any]) -> str: """ Utility function to send a Form to Shotgun and process any HTTP errors that could occur. @@ -4594,7 +4737,7 @@ def https_request(self, request): return self.http_request(request) -def _translate_filters(filters, filter_operator): +def _translate_filters(filters: Union[list, tuple], filter_operator) -> dict[str, Any]: """ Translate filters params into data structure expected by rpc call. """ @@ -4603,7 +4746,7 @@ def _translate_filters(filters, filter_operator): return _translate_filters_dict(wrapped_filters) -def _translate_filters_dict(sg_filter): +def _translate_filters_dict(sg_filter: dict[str, Any]) -> dict[str, Any]: new_filters = {} filter_operator = sg_filter.get("filter_operator") @@ -4663,7 +4806,7 @@ def _translate_filters_simple(sg_filter): return condition -def _version_str(version): +def _version_str(version) -> str: """ Convert a tuple of int's to a '.' separated str. """ From b0df9fe683b4510e69f5b77c50d5d0ce233be03e Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Wed, 12 Nov 2025 12:01:53 -0800 Subject: [PATCH 48/59] SG-40841 Add information about relative_path field (#420) * Add information about relative_path field * Update docs/cookbook/attachments.rst * Add missing trailing comma for consistence --- docs/cookbook/attachments.rst | 63 ++++++++++++++++++++++------------- 1 file changed, 40 insertions(+), 23 deletions(-) diff --git a/docs/cookbook/attachments.rst b/docs/cookbook/attachments.rst index de992431d..cfe609de8 100644 --- a/docs/cookbook/attachments.rst +++ b/docs/cookbook/attachments.rst @@ -124,17 +124,22 @@ will vary. :: - { 'content_type': 'video/quicktime', + { + 'content_type': 'video/quicktime', 'link_type': 'local', + 'local_path': '/Users/kp/Movies/testing/test_movie_002.mov', + 'local_path_linux': '/home/users/macusers/kp/Movies/testing/test_movie_002.mov', + 'local_path_mac': '/Users/kp/Movies/testing/test_movie_002.mov', + 'local_path_windows': 'M:\\macusers\\kp\\Movies\\testing\\test_movie_002.mov', + 'local_storage': { + 'id': 1, + 'name': 'Dailies Directories', + 'type': 'LocalStorage', + }, 'name': 'my_test_movie.mov', - 'local_path': '/Users/kp/Movies/testing/test_movie_002.mov' - 'local_path_linux': '/home/users/macusers/kp/Movies/testing/test_movie_002.mov' - 'local_path_mac': '/Users/kp/Movies/testing/test_movie_002.mov' - 'local_path_windows': 'M:\\macusers\kp\Movies\testing\test_movie_002.mov' - 'local_storage': {'id': 1, - 'name': 'Dailies Directories', - 'type': 'LocalStorage'}, - 'url': 'file:///Users/kp/Movies/testing/test_movie_002.mov'} + 'relative_path': 'testing/test_movie_002.mov', + 'url': 'file:///Users/kp/Movies/testing/test_movie_002.mov', + } ******************** @@ -237,6 +242,9 @@ are available: - **local_storage** (:obj:`dict`) *read-only*: A dictionary representing which LocalStorage entity is applied for this local file link. +- **relative_path** (:obj:`str`) *read-only*: + The path to the file relative to the ``local_storage`` root. + - **url** (:obj:`str`) *read-only*: A file URI (``file://``) path provided for convenience pointing to the value in the ``local_path`` @@ -250,19 +258,26 @@ Reading Local File Fields Returns:: - {'id':123, - 'sg_uploaded_movie': { 'content_type': None, - 'link_type': 'local', - 'name': 'my_test_movie.mov', - 'local_path': '/Users/kp/Movies/testing/test_movie_001_.mov' - 'local_path_linux': '/home/users/macusers/kp/Movies/testing/test_movie_001_.mov' - 'local_path_mac': '/Users/kp/Movies/testing/test_movie_001_.mov' - 'local_path_windows': 'M:\\macusers\kp\Movies\testing\test_movie_001_.mov' - 'local_storage': {'id': 1, - 'name': 'Dailies Directories', - 'type': 'LocalStorage'}, - 'url': 'file:///Users/kp/Movies/testing/test_movie_001_.mov'}, - 'type': 'Version'} + { + 'id': 123, + 'sg_uploaded_movie': { + 'content_type': None, + 'link_type': 'local', + 'local_path': '/Users/kp/Movies/testing/test_movie_001_.mov', + 'local_path_linux': '/home/users/macusers/kp/Movies/testing/test_movie_001_.mov', + 'local_path_mac': '/Users/kp/Movies/testing/test_movie_001_.mov', + 'local_path_windows': 'M:\\macusers\\kp\\Movies\\testing\\test_movie_001_.mov', + 'local_storage': { + 'id': 1, + 'name': 'Dailies Directories', + 'type': 'LocalStorage', + }, + 'relative_path': 'testing/test_movie_001_.mov', + 'name': 'my_test_movie.mov', + 'url': 'file:///Users/kp/Movies/testing/test_movie_001_.mov', + }, + 'type': 'Version', + } .. note:: When viewing results that include file/link fields with local file link values, all of the @@ -335,7 +350,8 @@ Returns:: 'name': 'Dailies Directories', 'type': 'LocalStorage' }, - 'url': 'file:///Users/kp/Movies/testing/test_movie_002.mov' + 'relative_path': 'testing/test_movie_002.mov', + 'url': 'file:///Users/kp/Movies/testing/test_movie_002.mov', }, 'type': 'Version', } @@ -379,6 +395,7 @@ Returns:: 'name': 'Dailies Directories', 'type': 'LocalStorage' }, + 'relative_path': 'testing/test_movie_002.mov', 'url': 'file:///Users/kp/Movies/testing/test_movie_002.mov' }, 'type': 'Version', From 3b30ce4d6f7c34b2d0e38009421b652ff66351f4 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio Date: Thu, 20 Nov 2025 14:09:27 -0500 Subject: [PATCH 49/59] SG-39225 Support specific keys for entity optimization (#423) * Support specific keys for entity optimization * Update conditions * Update test * Support lists without dicts on optimization * Improved readability * Improvements by feedback * More granular optimization * Use simple conditions instead of set intersection * Include id and type checks * Make recursive and add more test cases --- shotgun_api3/shotgun.py | 41 ++++++++++++++++--------- tests/test_unit.py | 67 +++++++++++++++++++++++++++++++++++++---- 2 files changed, 88 insertions(+), 20 deletions(-) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 0c0c9cd5c..2a1108aac 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -1194,7 +1194,7 @@ def _translate_update_params( def optimize_field(field_dict): if SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION: return field_dict - return {k: _get_type_and_id_from_value(v) for k, v in field_dict.items()} + return {k: _optimize_filter_field(v) for k, v in field_dict.items()} full_fields = self._dict_to_list( data, @@ -4799,7 +4799,7 @@ def _translate_filters_simple(sg_filter): and condition["relation"] in ["is", "is_not", "in", "not_in"] and isinstance(values[0], dict) ): - values = [_get_type_and_id_from_value(v) for v in values] + values = [_optimize_filter_field(v) for v in values] condition["values"] = values @@ -4813,17 +4813,30 @@ def _version_str(version) -> str: return ".".join(map(str, version)) -def _get_type_and_id_from_value(value): +def _optimize_filter_field( + field_value: Union[dict, list], recursive: bool = True +) -> Union[dict, list]: """ - For an entity dictionary, returns a new dictionary with only the type and id keys. - If any of these keys are not present, the original dictionary is returned. + For an FPT entity, returns a new dictionary with only the type, + id, and other allowed keys. + If case of any processing error, the original dictionary is returned. + + At least `type` and `id` keys are required to do the optimization """ - try: - if isinstance(value, dict): - return {"type": value["type"], "id": value["id"]} - elif isinstance(value, list): - return [{"type": v["type"], "id": v["id"]} for v in value] - except (KeyError, TypeError): - LOG.debug(f"Could not optimize entity value {value}") - - return value + allowed_keys = { + "id", + "type", + "url", + "name", + "content_type", + "local_path", + "storage", + "relative_path", + } + if isinstance(field_value, dict) and "id" in field_value and "type" in field_value: + return {key: field_value[key] for key in allowed_keys if key in field_value} + + elif recursive and isinstance(field_value, list): + return [_optimize_filter_field(fv, recursive=False) for fv in field_value] + + return field_value diff --git a/tests/test_unit.py b/tests/test_unit.py index 42f882af4..786a83f02 100644 --- a/tests/test_unit.py +++ b/tests/test_unit.py @@ -673,15 +673,41 @@ def test_related_object_update_optimization_entity_multi(self): entity_id = 6626 data = { "sg_status_list": "ip", - "project": {"id": 70, "type": "Project", "name": "disposable name 70"}, + "project": {"id": 70, "type": "Project", "name": "important name 70"}, "sg_vvv": [ - {"id": 6441, "type": "Asset", "name": "disposable name 6441"}, {"id": 6440, "type": "Asset"}, + {"id": 6441, "type": "Asset", "custom_name": "disposable name 6441"}, + { + # To be kept + "id": 6442, + "type": "Asset", + "url": "http://test.com/asset/6442", + # to be removed + "custom_name": "disposable name 1", + "custom_name2": "disposable name 2", + "custom_name3": "disposable name 3", + "custom_name4": "disposable name 4", + }, + { + "sg_nested": { + "level1": { + "level2": {"id": 123, "type": "Entity", "foo": "bar"} + } + } + }, ], "sg_class": { + # To be kept "id": 1, "type": "CustomEntity53", - "name": "disposable name 1", + "url": "http://test.com", + "name": "important class name", + "local_path": "/some/local/path", + # to be removed + "custom_name": "disposable name 1", + "custom_name2": "disposable name 2", + "custom_name3": "disposable name 3", + "custom_name4": "disposable name 4", }, } expected = { @@ -689,17 +715,46 @@ def test_related_object_update_optimization_entity_multi(self): "id": 6626, "fields": [ {"field_name": "sg_status_list", "value": "ip"}, - {"field_name": "project", "value": {"type": "Project", "id": 70}}, + { + "field_name": "project", + "value": { + "type": "Project", + "id": 70, + "name": "important name 70", + }, + }, { "field_name": "sg_vvv", "value": [ - {"id": 6441, "type": "Asset"}, {"id": 6440, "type": "Asset"}, + {"id": 6441, "type": "Asset"}, + { + "id": 6442, + "type": "Asset", + "url": "http://test.com/asset/6442", + }, + { + "sg_nested": { + "level1": { + "level2": { + "id": 123, + "type": "Entity", + "foo": "bar", + } + } + } + }, ], }, { "field_name": "sg_class", - "value": {"type": "CustomEntity53", "id": 1}, + "value": { + "type": "CustomEntity53", + "id": 1, + "name": "important class name", + "url": "http://test.com", + "local_path": "/some/local/path", + }, }, ], } From 9e359839b1a09de152e9ddecd2540716e1ea0297 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio Date: Tue, 25 Nov 2025 12:43:15 -0500 Subject: [PATCH 50/59] SG-41239 Make type annotations compatible with py37 (#427) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Make type annotations compatible with py37 🫠 * Fake type check on CI --- azure-pipelines-templates/type_checking.yml | 48 ++++ azure-pipelines.yml | 1 + shotgun_api3/shotgun.py | 229 ++++++++++---------- 3 files changed, 168 insertions(+), 110 deletions(-) create mode 100644 azure-pipelines-templates/type_checking.yml diff --git a/azure-pipelines-templates/type_checking.yml b/azure-pipelines-templates/type_checking.yml new file mode 100644 index 000000000..dbabd6570 --- /dev/null +++ b/azure-pipelines-templates/type_checking.yml @@ -0,0 +1,48 @@ +# Copyright (c) 2025, Shotgun Software Inc. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# - Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# +# - Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# - Neither the name of the Shotgun Software Inc nor the names of its +# contributors may be used to endorse or promote products derived from this +# software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +jobs: +- job: type_checking + displayName: Type Checking (beta) + pool: + vmImage: 'ubuntu-latest' + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: 3.9 + addToPath: True + architecture: 'x64' + + - script: | + pip install --upgrade pip setuptools wheel + pip install --upgrade mypy + displayName: Install dependencies + + # Placeholder to future static type checking. For now we just run mypy and skip all known errors. + - bash: mypy shotgun_api3/shotgun.py --follow-imports skip --pretty --no-strict-optional --disable-error-code arg-type --disable-error-code assignment --disable-error-code return --disable-error-code return-value --disable-error-code attr-defined + displayName: Run type checking diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 52e6cfa9c..0e465bf22 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -52,6 +52,7 @@ pr: # Jobs run in parallel. jobs: - template: azure-pipelines-templates/code_style_validation.yml +- template: azure-pipelines-templates/type_checking.yml # These are jobs templates, they allow to reduce the redundancy between # variations of the same build. We pass in the image name diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 2a1108aac..39167971c 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -29,6 +29,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ +from __future__ import annotations # Requried for 3.7 + import base64 import copy import datetime @@ -53,10 +55,11 @@ from typing import ( Any, BinaryIO, + Dict, Iterable, - Literal, + List, Optional, - TypedDict, + Tuple, TypeVar, Union, ) @@ -100,21 +103,25 @@ T = TypeVar("T") +if sys.version_info < (3, 9): + OrderItem = Dict + GroupingItem = Dict + BaseEntity = Dict +else: + from typing import TypedDict -class OrderItem(TypedDict): - field_name: str - direction: str - - -class GroupingItem(TypedDict): - field: str - type: str - direction: str + class OrderItem(TypedDict): + field_name: str + direction: str + class GroupingItem(TypedDict): + field: str + type: str + direction: str -class BaseEntity(TypedDict, total=False): - id: int - type: str + class BaseEntity(TypedDict, total=False): + id: int + type: str # ---------------------------------------------------------------------------- @@ -202,7 +209,7 @@ class ServerCapabilities(object): the future. Therefore, usage of this class is discouraged. """ - def __init__(self, host: str, meta: dict[str, Any]) -> None: + def __init__(self, host: str, meta: Dict[str, Any]) -> None: """ ServerCapabilities.__init__ @@ -249,7 +256,7 @@ def _ensure_python_version_supported(self) -> None: if sys.version_info < (3, 7): raise ShotgunError("This module requires Python version 3.7 or higher.") - def _ensure_support(self, feature: dict[str, Any], raise_hell: bool = True) -> bool: + def _ensure_support(self, feature: Dict[str, Any], raise_hell: bool = True) -> bool: """ Checks the server version supports a given feature, raises an exception if it does not. @@ -421,7 +428,7 @@ def __init__(self, sg: "Shotgun"): self.auth_token: Optional[str] = None self.sudo_as_login: Optional[str] = None # Authentication parameters to be folded into final auth_params dict - self.extra_auth_params: Optional[dict[str, Any]] = None + self.extra_auth_params: Optional[Dict[str, Any]] = None # uuid as a string self.session_uuid: Optional[str] = None self.scheme: Optional[str] = None @@ -738,7 +745,7 @@ def __init__( self.config.user_password = None self.config.auth_token = None - def _split_url(self, base_url: str) -> tuple[Optional[str], Optional[str]]: + def _split_url(self, base_url: str) -> Tuple[Optional[str], Optional[str]]: """ Extract the hostname:port and username/password/token from base_url sent when connect to the API. @@ -770,7 +777,7 @@ def _split_url(self, base_url: str) -> tuple[Optional[str], Optional[str]]: # API Functions @property - def server_info(self) -> dict[str, Any]: + def server_info(self) -> Dict[str, Any]: """ Property containing server information. @@ -823,7 +830,7 @@ def close(self) -> None: self._close_connection() return - def info(self) -> dict[str, Any]: + def info(self) -> Dict[str, Any]: """ Get API-related metadata from the Shotgun server. @@ -857,13 +864,13 @@ def info(self) -> dict[str, Any]: def find_one( self, entity_type: str, - filters: Union[list, tuple, dict[str, Any]], - fields: Optional[list[str]] = None, - order: Optional[list[OrderItem]] = None, - filter_operator: Optional[Literal["all", "any"]] = None, + filters: Union[List, Tuple, Dict[str, Any]], + fields: Optional[List[str]] = None, + order: Optional[List[OrderItem]] = None, + filter_operator: Optional[str] = None, retired_only: bool = False, include_archived_projects: bool = True, - additional_filter_presets: Optional[list[dict[str, Any]]] = None, + additional_filter_presets: Optional[List[Dict[str, Any]]] = None, ) -> Optional[BaseEntity]: """ Shortcut for :meth:`~shotgun_api3.Shotgun.find` with ``limit=1`` so it returns a single @@ -937,16 +944,16 @@ def find_one( def find( self, entity_type: str, - filters: Union[list, tuple, dict[str, Any]], - fields: Optional[list[str]] = None, - order: Optional[list[OrderItem]] = None, - filter_operator: Optional[Literal["all", "any"]] = None, + filters: Union[List, Tuple, Dict[str, Any]], + fields: Optional[List[str]] = None, + order: Optional[List[OrderItem]] = None, + filter_operator: Optional[str] = None, limit: int = 0, retired_only: bool = False, page: int = 0, include_archived_projects: bool = True, - additional_filter_presets: Optional[list[dict[str, Any]]] = None, - ) -> list[BaseEntity]: + additional_filter_presets: Optional[List[Dict[str, Any]]] = None, + ) -> List[BaseEntity]: """ Find entities matching the given filters. @@ -1136,14 +1143,14 @@ def find( def _construct_read_parameters( self, entity_type: str, - fields: Optional[list[str]], - filters: dict[str, Any], + fields: Optional[List[str]], + filters: Dict[str, Any], retired_only: bool, - order: Optional[list[dict[str, Any]]], + order: Optional[List[Dict[str, Any]]], include_archived_projects: bool, - additional_filter_presets: Optional[list[dict[str, Any]]], - ) -> dict[str, Any]: - params: dict[str, Any] = {} + additional_filter_presets: Optional[List[Dict[str, Any]]], + ) -> Dict[str, Any]: + params: Dict[str, Any] = {} params["type"] = entity_type params["return_fields"] = fields or ["id"] params["filters"] = filters @@ -1174,8 +1181,8 @@ def _construct_read_parameters( return params def _add_project_param( - self, params: dict[str, Any], project_entity - ) -> dict[str, Any]: + self, params: Dict[str, Any], project_entity + ) -> Dict[str, Any]: if project_entity and self.server_caps.ensure_per_project_customization(): params["project"] = project_entity @@ -1186,9 +1193,9 @@ def _translate_update_params( self, entity_type: str, entity_id: int, - data: dict, - multi_entity_update_modes: Optional[dict], - ) -> dict[str, Any]: + data: Dict, + multi_entity_update_modes: Optional[Dict], + ) -> Dict[str, Any]: global SHOTGUN_API_DISABLE_ENTITY_OPTIMIZATION def optimize_field(field_dict): @@ -1211,12 +1218,12 @@ def optimize_field(field_dict): def summarize( self, entity_type: str, - filters: Union[list, dict[str, Any]], - summary_fields: list[dict[str, str]], + filters: Union[List, Dict[str, Any]], + summary_fields: List[Dict[str, str]], filter_operator: Optional[str] = None, - grouping: Optional[list[GroupingItem]] = None, + grouping: Optional[List[GroupingItem]] = None, include_archived_projects: bool = True, - ) -> dict[str, Any]: + ) -> Dict[str, Any]: """ Summarize field data returned by a query. @@ -1419,9 +1426,9 @@ def summarize( def create( self, entity_type: str, - data: dict[str, Any], - return_fields: Optional[list] = None, - ) -> dict[str, Any]: + data: Dict[str, Any], + return_fields: Optional[List[str]] = None, + ) -> Dict[str, Any]: """ Create a new entity of the specified ``entity_type``. @@ -1508,8 +1515,8 @@ def update( self, entity_type: str, entity_id: int, - data: dict[str, Any], - multi_entity_update_modes: Optional[dict[str, Any]] = None, + data: Dict[str, Any], + multi_entity_update_modes: Optional[Dict[str, Any]] = None, ) -> BaseEntity: """ Update the specified entity with the supplied data. @@ -1631,7 +1638,7 @@ def revive(self, entity_type: str, entity_id: int) -> bool: return self._call_rpc("revive", params) - def batch(self, requests: list[dict[str, Any]]) -> list[dict[str, Any]]: + def batch(self, requests: List[Dict[str, Any]]) -> List[Dict[str, Any]]: """ Make a batch request of several :meth:`~shotgun_api3.Shotgun.create`, :meth:`~shotgun_api3.Shotgun.update`, and :meth:`~shotgun_api3.Shotgun.delete` calls. @@ -1750,9 +1757,9 @@ def work_schedule_read( self, start_date: str, end_date: str, - project: Optional[dict[str, Any]] = None, - user: Optional[dict[str, Any]] = None, - ) -> dict[str, Any]: + project: Optional[Dict[str, Any]] = None, + user: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: """ Return the work day rules for a given date range. @@ -1826,10 +1833,10 @@ def work_schedule_update( date: str, working: bool, description: Optional[str] = None, - project: Optional[dict[str, Any]] = None, - user: Optional[dict[str, Any]] = None, + project: Optional[Dict[str, Any]] = None, + user: Optional[Dict[str, Any]] = None, recalculate_field: Optional[str] = None, - ) -> dict[str, Any]: + ) -> Dict[str, Any]: """ Update the work schedule for a given date. @@ -1883,7 +1890,7 @@ def work_schedule_update( return self._call_rpc("work_schedule_update", params) - def follow(self, user: dict[str, Any], entity: dict[str, Any]) -> dict[str, Any]: + def follow(self, user: Dict[str, Any], entity: Dict[str, Any]) -> Dict[str, Any]: """ Add the entity to the user's followed entities. @@ -1911,7 +1918,7 @@ def follow(self, user: dict[str, Any], entity: dict[str, Any]) -> dict[str, Any] return self._call_rpc("follow", params) - def unfollow(self, user: dict[str, Any], entity: dict[str, Any]) -> dict[str, Any]: + def unfollow(self, user: Dict[str, Any], entity: Dict[str, Any]) -> Dict[str, Any]: """ Remove entity from the user's followed entities. @@ -1938,7 +1945,7 @@ def unfollow(self, user: dict[str, Any], entity: dict[str, Any]) -> dict[str, An return self._call_rpc("unfollow", params) - def followers(self, entity: dict[str, Any]) -> list[dict[str, Any]]: + def followers(self, entity: Dict[str, Any]) -> List[Dict[str, Any]]: """ Return all followers for an entity. @@ -1968,10 +1975,10 @@ def followers(self, entity: dict[str, Any]) -> list[dict[str, Any]]: def following( self, - user: dict[str, Any], - project: Optional[dict[str, Any]] = None, + user: Dict[str, Any], + project: Optional[Dict[str, Any]] = None, entity_type: Optional[str] = None, - ) -> list[BaseEntity]: + ) -> List[BaseEntity]: """ Return all entity instances a user is following. @@ -2004,7 +2011,7 @@ def following( def schema_entity_read( self, project_entity: Optional[BaseEntity] = None - ) -> dict[str, dict[str, Any]]: + ) -> Dict[str, Dict[str, Any]]: """ Return all active entity types, their display names, and their visibility. @@ -2039,7 +2046,7 @@ def schema_entity_read( The returned display names for this method will be localized when the ``localize`` Shotgun config property is set to ``True``. See :ref:`localization` for more information. """ - params: dict[str, Any] = {} + params: Dict[str, Any] = {} params = self._add_project_param(params, project_entity) @@ -2050,7 +2057,7 @@ def schema_entity_read( def schema_read( self, project_entity: Optional[BaseEntity] = None - ) -> dict[str, dict[str, Any]]: + ) -> Dict[str, Dict[str, Any]]: """ Get the schema for all fields on all entities. @@ -2113,7 +2120,7 @@ def schema_read( The returned display names for this method will be localized when the ``localize`` Shotgun config property is set to ``True``. See :ref:`localization` for more information. """ - params: dict[str, Any] = {} + params: Dict[str, Any] = {} params = self._add_project_param(params, project_entity) @@ -2127,7 +2134,7 @@ def schema_field_read( entity_type: str, field_name: Optional[str] = None, project_entity: Optional[BaseEntity] = None, - ) -> dict[str, dict[str, Any]]: + ) -> Dict[str, Dict[str, Any]]: """ Get schema for all fields on the specified entity type or just the field name specified if provided. @@ -2196,7 +2203,7 @@ def schema_field_create( entity_type: str, data_type: str, display_name: str, - properties: Optional[dict[str, Any]] = None, + properties: Optional[Dict[str, Any]] = None, ) -> str: """ Create a field for the specified entity type. @@ -2238,7 +2245,7 @@ def schema_field_update( self, entity_type: str, field_name: str, - properties: dict[str, Any], + properties: Dict[str, Any], project_entity: Optional[BaseEntity] = None, ) -> bool: """ @@ -2348,7 +2355,7 @@ def set_session_uuid(self, session_uuid: str) -> None: def share_thumbnail( self, - entities: list[dict[str, Any]], + entities: List[Dict[str, Any]], thumbnail_path: Optional[str] = None, source_entity: Optional[BaseEntity] = None, filmstrip_thumbnail: bool = False, @@ -2838,7 +2845,7 @@ def _upload_to_sg( def _get_attachment_upload_info( self, is_thumbnail: bool, filename: str, is_multipart_upload: bool - ) -> dict[str, Any]: + ) -> Dict[str, Any]: """ Internal function to get the information needed to upload a file to Cloud storage. @@ -2887,7 +2894,7 @@ def _get_attachment_upload_info( def download_attachment( self, - attachment: Union[dict[str, Any], Literal[False]] = False, + attachment: Union[Dict[str, Any], bool] = False, file_path: Optional[str] = None, attachment_id: Optional[int] = None, ) -> Union[str, bytes, None]: @@ -3100,7 +3107,7 @@ def get_attachment_download_url( def authenticate_human_user( self, user_login: str, user_password: str, auth_token: Optional[str] = None - ) -> Union[dict[str, Any], None]: + ) -> Union[Dict[str, Any], None]: """ Authenticate Shotgun HumanUser. @@ -3160,7 +3167,7 @@ def authenticate_human_user( raise def update_project_last_accessed( - self, project: dict[str, Any], user: Optional[dict[str, Any]] = None + self, project: Dict[str, Any], user: Optional[Dict[str, Any]] = None ) -> None: """ Update a Project's ``last_accessed_by_current_user`` field to the current timestamp. @@ -3208,8 +3215,8 @@ def update_project_last_accessed( self._parse_records(record)[0] def note_thread_read( - self, note_id: int, entity_fields: Optional[dict[str, Any]] = None - ) -> list[dict[str, Any]]: + self, note_id: int, entity_fields: Optional[Dict[str, Any]] = None + ) -> List[Dict[str, Any]]: """ Return the full conversation for a given note, including Replies and Attachments. @@ -3287,10 +3294,10 @@ def note_thread_read( def text_search( self, text: str, - entity_types: dict[str, Any], - project_ids: Optional[list] = None, + entity_types: Dict[str, Any], + project_ids: Optional[List] = None, limit: Optional[int] = None, - ) -> dict[str, Any]: + ) -> Dict[str, Any]: """ Search across the specified entity types for the given text. @@ -3386,11 +3393,11 @@ def activity_stream_read( self, entity_type: str, entity_id: int, - entity_fields: Optional[dict[str, Any]] = None, + entity_fields: Optional[Dict[str, Any]] = None, min_id: Optional[int] = None, max_id: Optional[int] = None, limit: Optional[int] = None, - ) -> dict[str, Any]: + ) -> Dict[str, Any]: """ Retrieve activity stream data from Shotgun. @@ -3524,8 +3531,8 @@ def nav_search_string( def nav_search_entity( self, root_path: str, - entity: dict[str, Any], - seed_entity_field: Optional[dict[str, Any]] = None, + entity: Dict[str, Any], + seed_entity_field: Optional[Dict[str, Any]] = None, ): """ Search function adapted to work with the navigation hierarchy. @@ -3570,7 +3577,7 @@ def get_session_token(self) -> str: return session_token - def preferences_read(self, prefs: Optional[list] = None) -> dict[str, Any]: + def preferences_read(self, prefs: Optional[List] = None) -> Dict[str, Any]: """ Get a subset of the site preferences. @@ -3593,7 +3600,7 @@ def preferences_read(self, prefs: Optional[list] = None) -> dict[str, Any]: return self._call_rpc("preferences_read", {"prefs": prefs}) - def user_subscriptions_read(self) -> list: + def user_subscriptions_read(self) -> List: """ Get the list of user subscriptions. @@ -3606,7 +3613,7 @@ def user_subscriptions_read(self) -> list: return self._call_rpc("user_subscriptions_read", None) def user_subscriptions_create( - self, users: list[dict[str, Union[str, list[str], None]]] + self, users: List[Dict[str, Union[str, List[str], None]]] ) -> bool: """ Assign subscriptions to users. @@ -3799,7 +3806,7 @@ def _call_rpc( return results[0] return results - def _auth_params(self) -> dict[str, Any]: + def _auth_params(self) -> Dict[str, Any]: """ Return a dictionary of the authentication parameters being used. """ @@ -3854,7 +3861,7 @@ def _auth_params(self) -> dict[str, Any]: return auth_params - def _sanitize_auth_params(self, params: dict[str, Any]) -> dict[str, Any]: + def _sanitize_auth_params(self, params: Dict[str, Any]) -> Dict[str, Any]: """ Given an authentication parameter dictionary, sanitize any sensitive information and return the sanitized dict copy. @@ -3867,7 +3874,7 @@ def _sanitize_auth_params(self, params: dict[str, Any]) -> dict[str, Any]: def _build_payload( self, method: str, params, include_auth_params: bool = True - ) -> dict[str, Any]: + ) -> Dict[str, Any]: """ Build the payload to be send to the rpc endpoint. """ @@ -3897,8 +3904,8 @@ def _encode_payload(self, payload) -> bytes: return json.dumps(payload, ensure_ascii=False).encode("utf-8") def _make_call( - self, verb: str, path: str, body, headers: Optional[dict[str, Any]] - ) -> tuple[tuple[int, str], dict[str, Any], str]: + self, verb: str, path: str, body, headers: Optional[Dict[str, Any]] + ) -> Tuple[Tuple[int, str], Dict[str, Any], str]: """ Make an HTTP call to the server. @@ -3949,8 +3956,8 @@ def _make_call( time.sleep(rpc_attempt_interval) def _http_request( - self, verb: str, path: str, body, headers: dict[str, Any] - ) -> tuple[tuple[int, str], dict[str, Any], str]: + self, verb: str, path: str, body, headers: Dict[str, Any] + ) -> Tuple[Tuple[int, str], Dict[str, Any], str]: """ Make the actual HTTP request. """ @@ -3988,7 +3995,7 @@ def _make_upload_request( raise return result - def _parse_http_status(self, status: tuple) -> None: + def _parse_http_status(self, status: Tuple) -> None: """ Parse the status returned from the http request. @@ -4007,8 +4014,8 @@ def _parse_http_status(self, status: tuple) -> None: return def _decode_response( - self, headers: dict[str, Any], body: str - ) -> Union[str, dict[str, Any]]: + self, headers: Dict[str, Any], body: str + ) -> Union[str, Dict[str, Any]]: """ Decode the response from the server from the wire format to a python data structure. @@ -4229,7 +4236,7 @@ def _close_connection(self) -> None: # ======================================================================== # Utility - def _parse_records(self, records: list) -> list: + def _parse_records(self, records: List) -> List: """ Parse 'records' returned from the api to do local modifications: @@ -4334,11 +4341,11 @@ def _build_thumb_url(self, entity_type: str, entity_id: int) -> str: def _dict_to_list( self, - d: Optional[dict[str, Any]], + d: Optional[Dict[str, Any]], key_name: str = "field_name", value_name: str = "value", extra_data=None, - ) -> list[dict[str, Any]]: + ) -> List[Dict[str, Any]]: """ Utility function to convert a dict into a list dicts using the key_name and value_name keys. @@ -4355,7 +4362,9 @@ def _dict_to_list( ret.append(d) return ret - def _dict_to_extra_data(self, d: Optional[dict], key_name="value") -> dict: + def _dict_to_extra_data( + self, d: Optional[Dict[str, Any]], key_name="value" + ) -> Dict[str, Any]: """ Utility function to convert a dict into a dict compatible with the extra_data arg of _dict_to_list. @@ -4385,7 +4394,7 @@ def _upload_file_to_storage(self, path: str, storage_url: str) -> None: LOG.debug("File uploaded to Cloud storage: %s", filename) def _multipart_upload_file_to_storage( - self, path: str, upload_info: dict[str, Any] + self, path: str, upload_info: Dict[str, Any] ) -> None: """ Internal function to upload a file to the Cloud storage in multiple parts. @@ -4429,7 +4438,7 @@ def _multipart_upload_file_to_storage( LOG.debug("File uploaded in multiple parts to Cloud storage: %s", path) def _get_upload_part_link( - self, upload_info: dict[str, Any], filename: str, part_number: int + self, upload_info: Dict[str, Any], filename: str, part_number: int ) -> str: """ Internal function to get the url to upload the next part of a file to the @@ -4528,7 +4537,7 @@ def _upload_data_to_storage( return etag def _complete_multipart_upload( - self, upload_info: dict[str, Any], filename: str, etags: Iterable[str] + self, upload_info: Dict[str, Any], filename: str, etags: Iterable[str] ) -> None: """ Internal function to complete a multi-part upload to the Cloud storage. @@ -4605,7 +4614,7 @@ def _requires_direct_s3_upload( else: return False - def _send_form(self, url: str, params: dict[str, Any]) -> str: + def _send_form(self, url: str, params: Dict[str, Any]) -> str: """ Utility function to send a Form to Shotgun and process any HTTP errors that could occur. @@ -4737,7 +4746,7 @@ def https_request(self, request): return self.http_request(request) -def _translate_filters(filters: Union[list, tuple], filter_operator) -> dict[str, Any]: +def _translate_filters(filters: Union[List, Tuple], filter_operator) -> Dict[str, Any]: """ Translate filters params into data structure expected by rpc call. """ @@ -4746,7 +4755,7 @@ def _translate_filters(filters: Union[list, tuple], filter_operator) -> dict[str return _translate_filters_dict(wrapped_filters) -def _translate_filters_dict(sg_filter: dict[str, Any]) -> dict[str, Any]: +def _translate_filters_dict(sg_filter: Dict[str, Any]) -> Dict[str, Any]: new_filters = {} filter_operator = sg_filter.get("filter_operator") @@ -4814,8 +4823,8 @@ def _version_str(version) -> str: def _optimize_filter_field( - field_value: Union[dict, list], recursive: bool = True -) -> Union[dict, list]: + field_value: Union[Dict[str, Any], List], recursive: bool = True +) -> Union[Dict, List]: """ For an FPT entity, returns a new dictionary with only the type, id, and other allowed keys. From a5675898758be0cca45896f544c32d502411b973 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Tue, 25 Nov 2025 12:40:03 -0800 Subject: [PATCH 51/59] SG-40994 Update deprecation warning message about end of compat with Py 3.7 (#424) * Update deprecation warning message about end of compat with Py 3.7 * Make module not importable * Address review and update documentation * Update shotgun_api3/__init__.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Better for review * Apply suggestions from code review Co-authored-by: Martin Chesnay <104032692+mchesnay@users.noreply.github.com> * Apply suggestion from review * Unify variable name with tk-core --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: Martin Chesnay <104032692+mchesnay@users.noreply.github.com> --- docs/installation.rst | 12 ++++++++++-- docs/reference.rst | 13 +++++++++++++ shotgun_api3/__init__.py | 29 +++++++++++++++++++++++++---- shotgun_api3/shotgun.py | 8 -------- 4 files changed, 48 insertions(+), 14 deletions(-) diff --git a/docs/installation.rst b/docs/installation.rst index b082b1669..fe64d2844 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -6,8 +6,6 @@ Installation Minimum Requirements ******************** -- Python 3.7 - .. note:: Some features of the API are only supported by more recent versions of the Flow Production Tracking server. These features are added to the Python API in a backwards compatible way so that existing @@ -15,6 +13,16 @@ Minimum Requirements your version of Flow Production Tracking will raise an appropriate exception. In general, we attempt to document these where possible. +Python versions +=============== + +The Python API library supports the following Python versions: `3.9 - 3.11`. We recommend using Python 3.11. + +.. important:: + Python versions older than 3.9 are no longer supported as of March 2025 and compatibility will be discontinued after + March 2026. + + ****************************** Installing into ``PYTHONPATH`` ****************************** diff --git a/docs/reference.rst b/docs/reference.rst index 28bfabf1b..ce2c92cc6 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -994,6 +994,19 @@ Will internally be transformed as if you invoked something like this: sg.find('Asset', [['project', 'is', {'id': 999, 'type': 'Project'}]]) +SHOTGUN_ALLOW_OLD_PYTHON +======================== + +When set to ``1``, ``shotgun_api3`` will allow being imported from Python versions that are no longer supported. +Otherwise, when unset (or set to any other value), importing the module will raise an exception. + +This is not recommended and should only be used for testing purposes. + +.. important:: + The ability to import the module does not guarantee that the module will work properly on the unsupported Python + version. In fact, it is very likely that it will not work properly. + + ************ Localization ************ diff --git a/shotgun_api3/__init__.py b/shotgun_api3/__init__.py index 943a9fa8b..553097a75 100644 --- a/shotgun_api3/__init__.py +++ b/shotgun_api3/__init__.py @@ -8,14 +8,35 @@ # agreement to the Shotgun Pipeline Toolkit Source Code License. All rights # not expressly granted therein are reserved by Shotgun Software Inc. +import os import sys import warnings -if sys.version_info < (3, 9): +if sys.version_info < (3, 7): + if os.environ.get("SHOTGUN_ALLOW_OLD_PYTHON", "0") != "1": + # This is our preferred default behavior when using an old + # unsupported Python version. + # This way, we can control where the exception is raised, and it provides a + # comprehensive error message rather than having users facing a random + # Python traceback and trying to understand this is due to using an + # unsupported Python version. + + raise RuntimeError("This module requires Python version 3.7 or higher.") + + warnings.warn( + "Python versions older than 3.7 are no longer supported as of January " + "2023. Since the SHOTGUN_ALLOW_OLD_PYTHON variable is enabled, this " + "module is raising a warning instead of an exception. " + "However, it is very likely that this module will not be able to work " + "on this Python version.", + RuntimeWarning, + stacklevel=2, + ) +elif sys.version_info < (3, 9): warnings.warn( - "Python versions older than 3.9 are no longer supported since 2025-03 " - "and compatibility will be removed at any time after 2026-01. " - "Please update to Python 3.9 or a newer supported version.", + "Python versions older than 3.9 are no longer supported as of March " + "2025 and compatibility will be discontinued after March 2026. " + "Please update to Python 3.11 or any other supported version.", DeprecationWarning, stacklevel=2, ) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 39167971c..cd381e2ca 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -222,7 +222,6 @@ def __init__(self, host: str, meta: Dict[str, Any]) -> None: :ivar bool is_dev: ``True`` if server is running a development version of the Shotgun codebase. """ - self._ensure_python_version_supported() # Server host name self.host = host self.server_info = meta @@ -249,13 +248,6 @@ def __init__(self, host: str, meta: Dict[str, Any]) -> None: self.version = tuple(self.version[:3]) self._ensure_json_supported() - def _ensure_python_version_supported(self) -> None: - """ - Checks the if current Python version is supported. - """ - if sys.version_info < (3, 7): - raise ShotgunError("This module requires Python version 3.7 or higher.") - def _ensure_support(self, feature: Dict[str, Any], raise_hell: bool = True) -> bool: """ Checks the server version supports a given feature, raises an exception if it does not. From 24fa0e1d2354170b3e28d3370ba6e7e9df20b5d4 Mon Sep 17 00:00:00 2001 From: Carlos Villavicencio Date: Tue, 25 Nov 2025 17:34:03 -0500 Subject: [PATCH 52/59] Packaging for v3.9.1 (#428) --- HISTORY.rst | 10 ++++++++++ setup.py | 2 +- shotgun_api3/shotgun.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index 7b9fc4018..cf9cd4304 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,6 +4,16 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.9.1 (2025 Nov 25) +==================== + +- Removed the deprecated ``CACertsHTTPSConnection`` class, which was no longer needed after dropping Python 2 support. +- Added basic type annotations throughout the package to improve IDE support and code completion. Note: Some typing improvements are still in progress and will be refined in future releases. Special thanks to @chadrik for this contribution! +- Introduced a new environment variable ``SHOTGUN_ALLOW_OLD_PYTHON`` to temporarily bypass Python version warnings for users still on Python 3.7 or 3.8. While this provides flexibility during transition, we strongly recommend upgrading to Python 3.9 or newer for continued support and security updates. +- Enhanced payload optimization for entity dictionaries, making it more flexible and preventing potential issues when working with special fields like ``type`` and ``url``. +- Updated attachment documentation with detailed information about the ``relative_path`` field and its usage. +- Python versions older than 3.9 are now deprecated. A runtime warning will be displayed during initialization if you're using Python 3.7 or 3.8. Please plan to upgrade to Python 3.9 or newer as these older versions will not be supported in future releases. + v3.9.0 (2025 Sep 10) ==================== diff --git a/setup.py b/setup.py index 0ddda9d79..c0f416985 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( name="shotgun_api3", - version="3.9.0", + version="3.9.1", description="Flow Production Tracking Python API", long_description=readme, author="Autodesk", diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index cd381e2ca..fb8803568 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -94,7 +94,7 @@ # ---------------------------------------------------------------------------- # Version -__version__ = "3.9.0" +__version__ = "3.9.1" # ---------------------------------------------------------------------------- From 89e083d4dbcaeccddb627eb9fcda77d9cbddaf6d Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Tue, 9 Dec 2025 08:53:25 -0800 Subject: [PATCH 53/59] SG-41382 Switch Python version from 3.9 to 3.10 for running pre-commit (#430) --- azure-pipelines-templates/code_style_validation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/azure-pipelines-templates/code_style_validation.yml b/azure-pipelines-templates/code_style_validation.yml index 69e82b7e0..9808f1d7a 100644 --- a/azure-pipelines-templates/code_style_validation.yml +++ b/azure-pipelines-templates/code_style_validation.yml @@ -34,7 +34,7 @@ jobs: steps: - task: UsePythonVersion@0 inputs: - versionSpec: 3.9 + versionSpec: 3.10 addToPath: True architecture: 'x64' From feddaa09eb38f76da7f03a5e26216db0f37dda4f Mon Sep 17 00:00:00 2001 From: kuldeepgudekar <49712593+kuldeepgudekar@users.noreply.github.com> Date: Wed, 10 Dec 2025 14:48:30 +0530 Subject: [PATCH 54/59] SG-35561 export csv python changes (#386) * changes to export page from the api * uncommented other test cases * doc string added in export_page function * args positioning changed * updated according to new export page params * update the doc string * removed extra line added * test cases updated * test case fixed * test case updated * update in test cases * update for human_user authmode * update in mock test cases * update in docs, and version number updated * update in doc string * update the documentation link * removing merge conflicts with update in documentation * update the test cases * release notes updated * small doc changes added * packaging for the v3.9.2 release * Revert "packaging for the v3.9.2 release" This reverts commit d359d054a525bde57f186a7ae8dc8efeb1cf8a68. * packaging for the v3.9.2 release * updated history for release info --- HISTORY.rst | 6 ++++ docs/reference.rst | 2 ++ setup.py | 2 +- shotgun_api3/shotgun.py | 25 ++++++++++++- tests/test_api.py | 77 +++++++++++++++++++++++++++++++++++++++++ 5 files changed, 110 insertions(+), 2 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index cf9cd4304..b9d279bcb 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -4,6 +4,12 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. +v3.9.2 (2025 Dec 10) +=================== + +- Add ``export_page`` method to Shotgun class. +- Documentation has been updated to reflect this addition. + v3.9.1 (2025 Nov 25) ==================== diff --git a/docs/reference.rst b/docs/reference.rst index ce2c92cc6..0c7f248f1 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -66,6 +66,7 @@ The documentation for all of the methods you'll need in your scripts lives in he Shotgun.work_schedule_read Shotgun.work_schedule_update Shotgun.preferences_read + Shotgun.export_page .. rubric:: Working With Files @@ -150,6 +151,7 @@ also some specialized convenience methods for accessing particular types of info .. automethod:: Shotgun.work_schedule_read .. automethod:: Shotgun.work_schedule_update .. automethod:: Shotgun.preferences_read +.. automethod:: Shotgun.export_page Working With Files ================== diff --git a/setup.py b/setup.py index c0f416985..19fd90115 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ setup( name="shotgun_api3", - version="3.9.1", + version="3.9.2", description="Flow Production Tracking Python API", long_description=readme, author="Autodesk", diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index fb8803568..9d2d22f26 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -94,7 +94,7 @@ # ---------------------------------------------------------------------------- # Version -__version__ = "3.9.1" +__version__ = "3.9.2" # ---------------------------------------------------------------------------- @@ -1882,6 +1882,29 @@ def work_schedule_update( return self._call_rpc("work_schedule_update", params) + def export_page(self, page_id, format, layout_name=None): + """ + Export the specified page to the given format. + This method allows you to export a page to CSV. + Respective layout or page should be marked as API Exportable in the Flow Production Tracking UI. + For more information, see documentation_. + .. _documentation: https://help.autodesk.com/view/SGSUB/ENU/?guid=SG_Tutorials_tu_export_csv_html#enable-api-export-for-a-page + If ``layout_name`` is not passed in, the default layout name will be used. + >>> sg.export_page(12345, "csv", layout_name="My Layout") + "ID,Name,Status\\n1,Shot 001,ip\\n2, Shot 002,rev\\n" + >>> sg.export_page(12345, "csv") + "ID,Name,Status\\n1,Shot 001,ip\\n2,Shot 002,rev\\n" + :param int page_id: The ID of the page to export. + :param str format: The format to export the page to. Supported format is ``"csv"``. + :param str layout_name: Optional layout name. This should be the name of the layout seen in the Flow Production Tracking UI. + :returns: string containing data of the given page. + :rtype: string + """ + + params = dict(format=format, page_id=page_id, layout_name=layout_name) + + return self._call_rpc("export_page", params) + def follow(self, user: Dict[str, Any], entity: Dict[str, Any]) -> Dict[str, Any]: """ Add the entity to the user's followed entities. diff --git a/tests/test_api.py b/tests/test_api.py index 19b738f60..d9a1a8691 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1947,6 +1947,83 @@ def test_include_archived_projects(self): self.sg.update("Project", self.project["id"], {"archived": False}) +class TestExportPage(base.LiveTestBase): + + def setUp(self): + super(TestExportPage, self).setUp("HumanUser") + + def test_export_page_unavailable(self): + """ + Test export_page raises when report does not exist. + """ + if not self.sg.server_caps.version or self.sg.server_caps.version < (5, 1, 22): + return + + page_entity = self.sg.create("Page", {"entity_type": "Shot"}) + + with self.assertRaises(Exception) as cm: + self.sg.export_page(page_entity["id"], "csv") + self.assertIn( + f"This functionality is currently not available", str(cm.exception) + ) + + with self.assertRaises(Exception) as cm: + self.sg.export_page(page_entity["id"], "csv", layout_name="My Layout") + self.assertIn( + f"This functionality is currently not available", str(cm.exception) + ) + + def test_export_page_format_missing(self): + """ + Test export_page raises for invalid format. + """ + if not self.sg.server_caps.version or self.sg.server_caps.version < (5, 1, 22): + return + + with self.assertRaises(Exception) as cm: + self.sg.export_page(11, None) + self.assertIn("'format' missing", str(cm.exception)) + + with self.assertRaises(Exception) as cm: + self.sg.export_page(11, None, layout_name="My Layout") + self.assertIn("'format' missing", str(cm.exception)) + + def test_export_page_missing_page_id(self): + """ + Test export_page raises for missing page id. + """ + if not self.sg.server_caps.version or self.sg.server_caps.version < (5, 1, 22): + return + + with self.assertRaises(Exception) as cm: + self.sg.export_page(None, "csv") + self.assertIn("'page_id' missing", str(cm.exception)) + + with self.assertRaises(Exception) as cm: + self.sg.export_page(None, "csv", layout_name="My Layout") + self.assertIn("'page_id' missing", str(cm.exception)) + + @unittest.mock.patch("shotgun_api3.shotgun.Http.request") + def test_export_page_without_layout_name(self, mock_request): + """ + Test export_page works when layout_name is not provided. + """ + + if not self.sg.server_caps.version or self.sg.server_caps.version < (5, 1, 22): + return + + # Mock the underlying Http.request to return CSV content with appropriate headers + csv_body = "ID,Name,Status\n1,Shot 001,ip\n2,Shot 002,rev\n" + response = unittest.mock.MagicMock(name="response mock") + response.status = 200 + response.reason = "OK" + response.items.return_value = [("content-type", "text/csv; charset=utf-8")] + mock_request.return_value = (response, csv_body) + result = self.sg.export_page(11, "csv") + self.assertIsInstance(result, str) + self.assertTrue(result.startswith("ID,Name,Status")) + + class TestFollow(base.LiveTestBase): def test_follow_unfollow(self): From d054f5d93e6c98c845335c9bb9d0160d50c54765 Mon Sep 17 00:00:00 2001 From: kuldeepgudekar <49712593+kuldeepgudekar@users.noreply.github.com> Date: Thu, 11 Dec 2025 13:49:32 +0530 Subject: [PATCH 55/59] Fixed Title underline in history rst file (#431) --- HISTORY.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HISTORY.rst b/HISTORY.rst index b9d279bcb..eea6dc053 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -5,7 +5,7 @@ Flow Production Tracking Python API Changelog Here you can see the full list of changes between each Python API release. v3.9.2 (2025 Dec 10) -=================== +==================== - Add ``export_page`` method to Shotgun class. - Documentation has been updated to reflect this addition. From ac46ff1db00a990e92d5c19302b9e10389de78be Mon Sep 17 00:00:00 2001 From: kuldeepgudekar <49712593+kuldeepgudekar@users.noreply.github.com> Date: Thu, 11 Dec 2025 17:12:12 +0530 Subject: [PATCH 56/59] fixing the documentation link (#432) --- shotgun_api3/shotgun.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 9d2d22f26..2d06823c8 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -1885,11 +1885,15 @@ def work_schedule_update( def export_page(self, page_id, format, layout_name=None): """ Export the specified page to the given format. + This method allows you to export a page to CSV. + Respective layout or page should be marked as API Exportable in the Flow Production Tracking UI. - For more information, see documentation_. - .. _documentation: https://help.autodesk.com/view/SGSUB/ENU/?guid=SG_Tutorials_tu_export_csv_html#enable-api-export-for-a-page + If ``layout_name`` is not passed in, the default layout name will be used. + + For more information, see `documentation `_ . + >>> sg.export_page(12345, "csv", layout_name="My Layout") "ID,Name,Status\\n1,Shot 001,ip\\n2, Shot 002,rev\\n" >>> sg.export_page(12345, "csv") From 24b459132fd09d6c4a167c05ffb325f85044de19 Mon Sep 17 00:00:00 2001 From: kuldeepgudekar <49712593+kuldeepgudekar@users.noreply.github.com> Date: Tue, 16 Dec 2025 00:57:31 +0530 Subject: [PATCH 57/59] SG-35561 Update the test case for permissions and availability (#437) --- tests/test_api.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index d9a1a8691..d0e8407ea 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1960,18 +1960,20 @@ def test_export_page_unavailable(self): return page_entity = self.sg.create("Page", {"entity_type": "Shot"}) + error_messages = [ + "This functionality is currently not available", + f"Export for Page id={page_entity['id']} not available", + ] with self.assertRaises(Exception) as cm: self.sg.export_page(page_entity["id"], "csv") - self.assertIn( - f"This functionality is currently not available", str(cm.exception) - ) + msg = str(cm.exception) + self.assertIn(msg, error_messages) with self.assertRaises(Exception) as cm: self.sg.export_page(page_entity["id"], "csv", layout_name="My Layout") - self.assertIn( - f"This functionality is currently not available", str(cm.exception) - ) + msg = str(cm.exception) + self.assertIn(msg, error_messages) def test_export_page_format_missing(self): """ From 81ffb8be6f4da216bb150856706826fdc73bc462 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Tue, 16 Dec 2025 09:18:39 -0800 Subject: [PATCH 58/59] SG-41463 Fixup docs warning (#435) docstring of shotgun_api3.shotgun.Shotgun.export_page:15: Block quote ends without a blank line; unexpected unindent. --- shotgun_api3/shotgun.py | 1 + 1 file changed, 1 insertion(+) diff --git a/shotgun_api3/shotgun.py b/shotgun_api3/shotgun.py index 2d06823c8..9b6a91a9d 100644 --- a/shotgun_api3/shotgun.py +++ b/shotgun_api3/shotgun.py @@ -1898,6 +1898,7 @@ def export_page(self, page_id, format, layout_name=None): "ID,Name,Status\\n1,Shot 001,ip\\n2, Shot 002,rev\\n" >>> sg.export_page(12345, "csv") "ID,Name,Status\\n1,Shot 001,ip\\n2,Shot 002,rev\\n" + :param int page_id: The ID of the page to export. :param str format: The format to export the page to. Supported format is ``"csv"``. :param str layout_name: Optional layout name. This should be the name of the layout seen in the Flow Production Tracking UI. From e56d41c14f8efb6bddf8f24085e19007e71b9e26 Mon Sep 17 00:00:00 2001 From: Julien Langlois <16244608+julien-lang@users.noreply.github.com> Date: Tue, 16 Dec 2025 12:03:00 -0800 Subject: [PATCH 59/59] SG-41463 Improve CI Pipeline by re-using tk-ci-tools and build Sphinx documentation (#434) --- .../code_style_validation.yml | 50 ------------------- azure-pipelines.yml | 19 ++++++- 2 files changed, 18 insertions(+), 51 deletions(-) delete mode 100644 azure-pipelines-templates/code_style_validation.yml diff --git a/azure-pipelines-templates/code_style_validation.yml b/azure-pipelines-templates/code_style_validation.yml deleted file mode 100644 index 9808f1d7a..000000000 --- a/azure-pipelines-templates/code_style_validation.yml +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (c) 2024, Shotgun Software Inc. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# - Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# -# - Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# - Neither the name of the Shotgun Software Inc nor the names of its -# contributors may be used to endorse or promote products derived from this -# software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -jobs: -- job: code_style_validation - displayName: Code Style Validation - pool: - vmImage: 'ubuntu-latest' - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: 3.10 - addToPath: True - architecture: 'x64' - - - script: | - pip install --upgrade pip setuptools wheel - pip install --upgrade pre-commit - displayName: Install dependencies - - - bash: pre-commit autoupdate - displayName: Update pre-commit hook versions - - - bash: pre-commit run --all - displayName: Validate code with pre-commit diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 0e465bf22..b9be5ab7e 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -26,6 +26,16 @@ # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +resources: + repositories: + - repository: templates + type: github + name: shotgunsoftware/tk-ci-tools + ref: refs/heads/master + endpoint: shotgunsoftware + # Despite using the "tk-" prefix, tk-ci-tools is not a Toolkit only tool. + # We use it to avoid duplicating and maintaining CI pipeline code. + # We've stored some variables in Azure. They contain credentials # and are encrypted. They are also not available to clients. # This statement says which variable groups this repo requires. @@ -51,7 +61,14 @@ pr: # This here is the list of jobs we want to run for our build. # Jobs run in parallel. jobs: -- template: azure-pipelines-templates/code_style_validation.yml +- template: build-pipeline.yml@templates + parameters: + # Python API does not follow the exact same Python version lifecycle than + # Toolkit. So we prefer to control the test execution here instead. + has_unit_tests: false + + has_ui_resources: false + - template: azure-pipelines-templates/type_checking.yml # These are jobs templates, they allow to reduce the redundancy between