diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 4eaf808f..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,92 +0,0 @@ ---- -version: 2.1 - -executors: - python: - docker: - - image: cimg/python:3.9 - -jobs: - flake8_lint: - executor: python - steps: - - checkout - - run: pip install tox - - run: tox -e flake8 - isort_lint: - executor: python - steps: - - checkout - - run: pip install tox - - run: tox -e isort - mypy_lint: - executor: python - steps: - - checkout - - run: pip install tox - - run: tox -e mypy - test: - parameters: - python: - type: string - docker: - - image: cimg/python:<< parameters.python >> - environment: - TOXENV: "py<< parameters.python >>" - steps: - - checkout - - run: echo 'export PATH=$HOME/.local/bin:$PATH' >> $BASH_ENV - - run: pip install --user tox "virtualenv<20.22.0" - - run: tox - test_nooptionals: - parameters: - python: - type: string - docker: - - image: cimg/python:<< parameters.python >> - environment: - TOXENV: "py<< parameters.python >>-nooptionals" - steps: - - checkout - - run: pip install tox - - run: tox - test_pypy: - parameters: - python: - type: string - docker: - - image: pypy:<< parameters.python >> - environment: - TOXENV: "pypy<< parameters.python >>" - steps: - - checkout - - run: pip install tox - - run: tox - - -workflows: - version: 2 - client_python: - jobs: - - flake8_lint - - isort_lint - - mypy_lint - - test: - matrix: - parameters: - python: - - "3.9.18" - - "3.10" - - "3.11" - - "3.12" - - "3.13" - - test_nooptionals: - matrix: - parameters: - python: - - "3.9" - - test_pypy: - matrix: - parameters: - python: - - "3.9" diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 00000000..a7e4e094 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,110 @@ +name: CI + +on: + push: + branches: [ master ] + pull_request: + +permissions: + contents: read + +jobs: + flake8_lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Set up Python + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: '3.9' + - name: Install tox + run: pip install tox + - name: Run flake8 + run: tox -e flake8 + + isort_lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Set up Python + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: '3.9' + - name: Install tox + run: pip install tox + - name: Run isort + run: tox -e isort + + mypy_lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Set up Python + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: '3.9' + - name: Install tox + run: pip install tox + - name: Run mypy + run: tox -e mypy + + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + pip install --user tox "virtualenv<20.22.0" + echo "$HOME/.local/bin" >> $GITHUB_PATH + - name: Set tox environment + id: toxenv + run: | + VERSION="${{ matrix.python-version }}" + # Extract major.minor version (strip patch if present) + TOX_VERSION=$(echo "$VERSION" | cut -d. -f1,2) + echo "toxenv=py${TOX_VERSION}" >> $GITHUB_OUTPUT + - name: Run tests + run: tox + env: + TOXENV: ${{ steps.toxenv.outputs.toxenv }} + + test_nooptionals: + runs-on: ubuntu-latest + env: + PYTHON_VERSION: '3.9' + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Set up Python + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: Install tox + run: pip install tox + - name: Run tests without optional dependencies + run: tox + env: + TOXENV: py${{ env.PYTHON_VERSION }}-nooptionals + + test_pypy: + runs-on: ubuntu-latest + env: + PYTHON_VERSION: '3.9' + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Set up PyPy + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: pypy-${{ env.PYTHON_VERSION }} + - name: Install tox + run: pip install tox + - name: Run tests with PyPy + run: tox + env: + TOXENV: pypy${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/github-pages.yaml b/.github/workflows/github-pages.yaml index 621f2d73..d8db8cbc 100644 --- a/.github/workflows/github-pages.yaml +++ b/.github/workflows/github-pages.yaml @@ -11,9 +11,6 @@ on: # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages permissions: contents: read - pages: write - id-token: write - actions: read # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. @@ -32,6 +29,9 @@ jobs: runs-on: ubuntu-latest env: HUGO_VERSION: 0.145.0 + permissions: + pages: write + id-token: write steps: - name: Install Hugo CLI run: | @@ -40,13 +40,13 @@ jobs: #- name: Install Dart Sass # run: sudo snap install dart-sass - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: submodules: recursive fetch-depth: 0 - name: Setup Pages id: pages - uses: actions/configure-pages@v5 + uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5.0.0 - name: Install Node.js dependencies run: "[[ -f package-lock.json || -f npm-shrinkwrap.json ]] && npm ci || true" working-directory: ./docs @@ -62,7 +62,7 @@ jobs: --baseURL "${{ steps.pages.outputs.base_url }}/" working-directory: ./docs - name: Upload artifact - uses: actions/upload-pages-artifact@v3 + uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4.0.0 with: path: ./docs/public @@ -73,7 +73,11 @@ jobs: url: ${{ steps.deployment.outputs.page_url }} runs-on: ubuntu-latest needs: build + permissions: + pages: write + id-token: write + actions: read steps: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v4 + uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4.0.5 diff --git a/docs/content/exporting/http/_index.md b/docs/content/exporting/http/_index.md index dc1b8f2c..f7a6aac6 100644 --- a/docs/content/exporting/http/_index.md +++ b/docs/content/exporting/http/_index.md @@ -24,6 +24,7 @@ to shutdown the server gracefully: ```python server, t = start_http_server(8000) server.shutdown() +server.server_close() t.join() ``` diff --git a/docs/content/exporting/http/aiohttp.md b/docs/content/exporting/http/aiohttp.md new file mode 100644 index 00000000..726b92cb --- /dev/null +++ b/docs/content/exporting/http/aiohttp.md @@ -0,0 +1,23 @@ +--- +title: AIOHTTP +weight: 6 +--- + +To use Prometheus with a [AIOHTTP server](https://docs.aiohttp.org/en/stable/web.html), +there is `make_aiohttp_handler` which creates a handler. + +```python +from aiohttp import web +from prometheus_client.aiohttp import make_aiohttp_handler + +app = web.Application() +app.router.add_get("/metrics", make_aiohttp_handler()) +``` + +By default, this handler will instruct AIOHTTP to automatically compress the +response if requested by the client. This behaviour can be disabled by passing +`disable_compression=True` when creating the app, like this: + +```python +app.router.add_get("/metrics", make_aiohttp_handler(disable_compression=True)) +``` diff --git a/docs/content/exporting/http/django.md b/docs/content/exporting/http/django.md new file mode 100644 index 00000000..a900a3a2 --- /dev/null +++ b/docs/content/exporting/http/django.md @@ -0,0 +1,47 @@ +--- +title: Django +weight: 5 +--- + +To use Prometheus with [Django](https://www.djangoproject.com/) you can use the provided view class +to add a metrics endpoint to your app. + +```python +# urls.py + +from django.urls import path +from prometheus_client.django import PrometheusDjangoView + +urlpatterns = [ + # ... any other urls that you want + path("metrics/", PrometheusDjangoView.as_view(), name="prometheus-metrics"), + # ... still more urls +] +``` + +By default, Multiprocessing support is activated if environment variable `PROMETHEUS_MULTIPROC_DIR` is set. +You can override this through the view arguments: + +```python +from django.conf import settings + +urlpatterns = [ + path( + "metrics/", + PrometheusDjangoView.as_view( + multiprocess_mode=settings.YOUR_SETTING # or any boolean value + ), + name="prometheus-metrics", + ), +] +``` + +Full multiprocessing instructions are provided [here]({{< ref "/multiprocess" >}}). + +# django-prometheus + +The included `PrometheusDjangoView` is useful if you want to define your own metrics from scratch. + +An external package called [django-prometheus](https://github.com/django-commons/django-prometheus/) +can be used instead if you want to get a bunch of ready-made monitoring metrics for your Django application +and easily benefit from utilities such as models monitoring. diff --git a/docs/content/exporting/pushgateway.md b/docs/content/exporting/pushgateway.md index bf5eb112..d9f9a945 100644 --- a/docs/content/exporting/pushgateway.md +++ b/docs/content/exporting/pushgateway.md @@ -54,6 +54,20 @@ g.set_to_current_time() push_to_gateway('localhost:9091', job='batchA', registry=registry, handler=my_auth_handler) ``` +# Compressing data before sending to pushgateway +Pushgateway (version >= 1.5.0) supports gzip and snappy compression (v > 1.6.0). This can help in network constrained environments. +To compress a push request, set the `compression` argument to `'gzip'` or `'snappy'`: +```python +push_to_gateway( + 'localhost:9091', + job='batchA', + registry=registry, + handler=my_auth_handler, + compression='gzip', +) +``` +Snappy compression requires the optional [`python-snappy`](https://github.com/andrix/python-snappy) package. + TLS Auth is also supported when using the push gateway with a special handler. ```python diff --git a/docs/content/multiprocess/_index.md b/docs/content/multiprocess/_index.md index 33507cd9..42ea6a67 100644 --- a/docs/content/multiprocess/_index.md +++ b/docs/content/multiprocess/_index.md @@ -10,9 +10,12 @@ it's common to have processes rather than threads to handle large workloads. To handle this the client library can be put in multiprocess mode. This comes with a number of limitations: -- Registries can not be used as normal, all instantiated metrics are exported +- Registries can not be used as normal: + - all instantiated metrics are collected - Registering metrics to a registry later used by a `MultiProcessCollector` may cause duplicate metrics to be exported + - Filtering on metrics works if and only if the constructor was called with + `support_collectors_without_names=True` and it but might be inefficient. - Custom collectors do not work (e.g. cpu and memory metrics) - Gauges cannot use `set_function` - Info and Enum metrics do not work @@ -49,7 +52,7 @@ MY_COUNTER = Counter('my_counter', 'Description of my counter') # Expose metrics. def app(environ, start_response): - registry = CollectorRegistry() + registry = CollectorRegistry(support_collectors_without_names=True) multiprocess.MultiProcessCollector(registry) data = generate_latest(registry) status = '200 OK' diff --git a/mypy.ini b/mypy.ini index fe372d07..3aa142c1 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,5 +1,5 @@ [mypy] -exclude = prometheus_client/decorator.py|prometheus_client/twisted|tests/test_twisted.py +exclude = prometheus_client/decorator.py|prometheus_client/twisted|tests/test_twisted.py|prometheus_client/django|tests/test_django.py implicit_reexport = False disallow_incomplete_defs = True diff --git a/prometheus_client/aiohttp/__init__.py b/prometheus_client/aiohttp/__init__.py new file mode 100644 index 00000000..9e5da157 --- /dev/null +++ b/prometheus_client/aiohttp/__init__.py @@ -0,0 +1,5 @@ +from .exposition import make_aiohttp_handler + +__all__ = [ + "make_aiohttp_handler", +] diff --git a/prometheus_client/aiohttp/exposition.py b/prometheus_client/aiohttp/exposition.py new file mode 100644 index 00000000..c1ae254d --- /dev/null +++ b/prometheus_client/aiohttp/exposition.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from aiohttp import hdrs, web +from aiohttp.typedefs import Handler + +from ..exposition import _bake_output +from ..registry import Collector, REGISTRY + + +def make_aiohttp_handler( + registry: Collector = REGISTRY, + disable_compression: bool = False, +) -> Handler: + """Create a aiohttp handler which serves the metrics from a registry.""" + + async def prometheus_handler(request: web.Request) -> web.Response: + # Prepare parameters + params = {key: request.query.getall(key) for key in request.query.keys()} + accept_header = ",".join(request.headers.getall(hdrs.ACCEPT, [])) + accept_encoding_header = "" + # Bake output + status, headers, output = _bake_output( + registry, + accept_header, + accept_encoding_header, + params, + # use AIOHTTP's compression + disable_compression=True, + ) + response = web.Response( + status=int(status.split(" ")[0]), + headers=headers, + body=output, + ) + if not disable_compression: + response.enable_compression() + return response + + return prometheus_handler diff --git a/prometheus_client/asgi.py b/prometheus_client/asgi.py index affd9844..6e527ca9 100644 --- a/prometheus_client/asgi.py +++ b/prometheus_client/asgi.py @@ -2,10 +2,10 @@ from urllib.parse import parse_qs from .exposition import _bake_output -from .registry import CollectorRegistry, REGISTRY +from .registry import Collector, REGISTRY -def make_asgi_app(registry: CollectorRegistry = REGISTRY, disable_compression: bool = False) -> Callable: +def make_asgi_app(registry: Collector = REGISTRY, disable_compression: bool = False) -> Callable: """Create a ASGI app which serves the metrics from a registry.""" async def prometheus_app(scope, receive, send): diff --git a/prometheus_client/bridge/graphite.py b/prometheus_client/bridge/graphite.py index 8cadbedc..235324b2 100755 --- a/prometheus_client/bridge/graphite.py +++ b/prometheus_client/bridge/graphite.py @@ -8,7 +8,7 @@ from timeit import default_timer from typing import Callable, Tuple -from ..registry import CollectorRegistry, REGISTRY +from ..registry import Collector, REGISTRY # Roughly, have to keep to what works as a file name. # We also remove periods, so labels can be distinguished. @@ -48,7 +48,7 @@ def run(self): class GraphiteBridge: def __init__(self, address: Tuple[str, int], - registry: CollectorRegistry = REGISTRY, + registry: Collector = REGISTRY, timeout_seconds: float = 30, _timer: Callable[[], float] = time.time, tags: bool = False, diff --git a/prometheus_client/django/__init__.py b/prometheus_client/django/__init__.py new file mode 100644 index 00000000..280dbfb0 --- /dev/null +++ b/prometheus_client/django/__init__.py @@ -0,0 +1,5 @@ +from .exposition import PrometheusDjangoView + +__all__ = [ + "PrometheusDjangoView", +] diff --git a/prometheus_client/django/exposition.py b/prometheus_client/django/exposition.py new file mode 100644 index 00000000..71fc8d8a --- /dev/null +++ b/prometheus_client/django/exposition.py @@ -0,0 +1,43 @@ +import os + +from django.http import HttpResponse +from django.views import View + +import prometheus_client +from prometheus_client import multiprocess +from prometheus_client.exposition import _bake_output + + +class PrometheusDjangoView(View): + multiprocess_mode: bool = "PROMETHEUS_MULTIPROC_DIR" in os.environ or "prometheus_multiproc_dir" in os.environ + registry: prometheus_client.CollectorRegistry = None + + def get(self, request, *args, **kwargs): + if self.registry is None: + if self.multiprocess_mode: + self.registry = prometheus_client.CollectorRegistry() + multiprocess.MultiProcessCollector(self.registry) + else: + self.registry = prometheus_client.REGISTRY + accept_header = request.headers.get("Accept") + accept_encoding_header = request.headers.get("Accept-Encoding") + # Bake output + status, headers, output = _bake_output( + registry=self.registry, + accept_header=accept_header, + accept_encoding_header=accept_encoding_header, + params=request.GET, + disable_compression=False, + ) + status = int(status.split(" ")[0]) + return HttpResponse( + output, + status=status, + headers=headers, + ) + + def options(self, request, *args, **kwargs): + return HttpResponse( + status=200, + headers={"Allow": "OPTIONS,GET"}, + ) diff --git a/prometheus_client/exposition.py b/prometheus_client/exposition.py index 93285804..2d402a0f 100644 --- a/prometheus_client/exposition.py +++ b/prometheus_client/exposition.py @@ -9,7 +9,9 @@ import ssl import sys import threading -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import ( + Any, Callable, Dict, List, Literal, Optional, Sequence, Tuple, Union, +) from urllib.error import HTTPError from urllib.parse import parse_qs, quote_plus, urlparse from urllib.request import ( @@ -18,11 +20,16 @@ ) from wsgiref.simple_server import make_server, WSGIRequestHandler, WSGIServer -from packaging.version import Version - from .openmetrics import exposition as openmetrics -from .registry import CollectorRegistry, REGISTRY -from .utils import floatToGoString +from .registry import Collector, REGISTRY +from .utils import floatToGoString, parse_version + +try: + import snappy # type: ignore + SNAPPY_AVAILABLE = True +except ImportError: + snappy = None # type: ignore + SNAPPY_AVAILABLE = False __all__ = ( 'CONTENT_TYPE_LATEST', @@ -48,6 +55,7 @@ """Content type of the latest format""" CONTENT_TYPE_LATEST = CONTENT_TYPE_PLAIN_1_0_0 +CompressionType = Optional[Literal['gzip', 'snappy']] class _PrometheusRedirectHandler(HTTPRedirectHandler): @@ -120,7 +128,7 @@ def _bake_output(registry, accept_header, accept_encoding_header, params, disabl return '200 OK', headers, output -def make_wsgi_app(registry: CollectorRegistry = REGISTRY, disable_compression: bool = False) -> Callable: +def make_wsgi_app(registry: Collector = REGISTRY, disable_compression: bool = False) -> Callable: """Create a WSGI app which serves the metrics from a registry.""" def prometheus_app(environ, start_response): @@ -225,7 +233,7 @@ def _get_ssl_ctx( def start_wsgi_server( port: int, addr: str = '0.0.0.0', - registry: CollectorRegistry = REGISTRY, + registry: Collector = REGISTRY, certfile: Optional[str] = None, keyfile: Optional[str] = None, client_cafile: Optional[str] = None, @@ -254,12 +262,12 @@ class TmpServer(ThreadingWSGIServer): start_http_server = start_wsgi_server -def generate_latest(registry: CollectorRegistry = REGISTRY, escaping: str = openmetrics.UNDERSCORES) -> bytes: +def generate_latest(registry: Collector = REGISTRY, escaping: str = openmetrics.UNDERSCORES) -> bytes: """ Generates the exposition format using the basic Prometheus text format. Params: - registry: CollectorRegistry to export data from. + registry: Collector to export data from. escaping: Escaping scheme used for metric and label names. Returns: UTF-8 encoded string containing the metrics in text format. @@ -332,7 +340,7 @@ def sample_line(samples): return ''.join(output).encode('utf-8') -def choose_encoder(accept_header: str) -> Tuple[Callable[[CollectorRegistry], bytes], str]: +def choose_encoder(accept_header: str) -> Tuple[Callable[[Collector], bytes], str]: # Python client library accepts a narrower range of content-types than # Prometheus does. accept_header = accept_header or '' @@ -346,7 +354,7 @@ def choose_encoder(accept_header: str) -> Tuple[Callable[[CollectorRegistry], by # mimetype. if not version: return (partial(openmetrics.generate_latest, escaping=openmetrics.UNDERSCORES, version="1.0.0"), openmetrics.CONTENT_TYPE_LATEST) - if version and Version(version) >= Version('1.0.0'): + if version and parse_version(version) >= (1, 0, 0): return (partial(openmetrics.generate_latest, escaping=escaping, version=version), f'application/openmetrics-text; version={version}; charset=utf-8; escaping=' + str(escaping)) elif accepted.split(';')[0].strip() == 'text/plain': @@ -355,7 +363,7 @@ def choose_encoder(accept_header: str) -> Tuple[Callable[[CollectorRegistry], by escaping = _get_escaping(toks) # Only return an escaping header if we have a good version and # mimetype. - if version and Version(version) >= Version('1.0.0'): + if version and parse_version(version) >= (1, 0, 0): return (partial(generate_latest, escaping=escaping), CONTENT_TYPE_LATEST + '; escaping=' + str(escaping)) return generate_latest, CONTENT_TYPE_PLAIN_0_0_4 @@ -410,7 +418,7 @@ def gzip_accepted(accept_encoding_header: str) -> bool: class MetricsHandler(BaseHTTPRequestHandler): """HTTP handler that gives metrics from ``REGISTRY``.""" - registry: CollectorRegistry = REGISTRY + registry: Collector = REGISTRY def do_GET(self) -> None: # Prepare parameters @@ -431,7 +439,7 @@ def log_message(self, format: str, *args: Any) -> None: """Log nothing.""" @classmethod - def factory(cls, registry: CollectorRegistry) -> type: + def factory(cls, registry: Collector) -> type: """Returns a dynamic MetricsHandler class tied to the passed registry. """ @@ -446,7 +454,7 @@ def factory(cls, registry: CollectorRegistry) -> type: return MyMetricsHandler -def write_to_textfile(path: str, registry: CollectorRegistry, escaping: str = openmetrics.ALLOWUTF8, tmpdir: Optional[str] = None) -> None: +def write_to_textfile(path: str, registry: Collector, escaping: str = openmetrics.ALLOWUTF8, tmpdir: Optional[str] = None) -> None: """Write metrics to the given path. This is intended for use with the Node exporter textfile collector. @@ -594,10 +602,11 @@ def tls_auth_handler( def push_to_gateway( gateway: str, job: str, - registry: CollectorRegistry, + registry: Collector, grouping_key: Optional[Dict[str, Any]] = None, timeout: Optional[float] = 30, handler: Callable = default_handler, + compression: CompressionType = None, ) -> None: """Push metrics to the given pushgateway. @@ -605,7 +614,7 @@ def push_to_gateway( 'http://pushgateway.local', or 'pushgateway.local'. Scheme defaults to 'http' if none is provided `job` is the job label to be attached to all pushed metrics - `registry` is an instance of CollectorRegistry + `registry` is a Collector, normally an instance of CollectorRegistry `grouping_key` please see the pushgateway documentation for details. Defaults to None `timeout` is how long push will attempt to connect before giving up. @@ -634,19 +643,22 @@ def push_to_gateway( failure. 'content' is the data which should be used to form the HTTP Message Body. + `compression` selects the payload compression. Supported values are 'gzip' + and 'snappy'. Defaults to None (no compression). This overwrites all metrics with the same job and grouping_key. This uses the PUT HTTP method.""" - _use_gateway('PUT', gateway, job, registry, grouping_key, timeout, handler) + _use_gateway('PUT', gateway, job, registry, grouping_key, timeout, handler, compression) def pushadd_to_gateway( gateway: str, job: str, - registry: Optional[CollectorRegistry], + registry: Optional[Collector], grouping_key: Optional[Dict[str, Any]] = None, timeout: Optional[float] = 30, handler: Callable = default_handler, + compression: CompressionType = None, ) -> None: """PushAdd metrics to the given pushgateway. @@ -654,7 +666,7 @@ def pushadd_to_gateway( 'http://pushgateway.local', or 'pushgateway.local'. Scheme defaults to 'http' if none is provided `job` is the job label to be attached to all pushed metrics - `registry` is an instance of CollectorRegistry + `registry` is a Collector, normally an instance of CollectorRegistry `grouping_key` please see the pushgateway documentation for details. Defaults to None `timeout` is how long push will attempt to connect before giving up. @@ -665,10 +677,12 @@ def pushadd_to_gateway( will be carried out by a default handler. See the 'prometheus_client.push_to_gateway' documentation for implementation requirements. + `compression` selects the payload compression. Supported values are 'gzip' + and 'snappy'. Defaults to None (no compression). This replaces metrics with the same name, job and grouping_key. This uses the POST HTTP method.""" - _use_gateway('POST', gateway, job, registry, grouping_key, timeout, handler) + _use_gateway('POST', gateway, job, registry, grouping_key, timeout, handler, compression) def delete_from_gateway( @@ -704,10 +718,11 @@ def _use_gateway( method: str, gateway: str, job: str, - registry: Optional[CollectorRegistry], + registry: Optional[Collector], grouping_key: Optional[Dict[str, Any]], timeout: Optional[float], handler: Callable, + compression: CompressionType = None, ) -> None: gateway_url = urlparse(gateway) # See https://bugs.python.org/issue27657 for details on urlparse in py>=3.7.6. @@ -717,30 +732,60 @@ def _use_gateway( gateway = gateway.rstrip('/') url = '{}/metrics/{}/{}'.format(gateway, *_escape_grouping_key("job", job)) - data = b'' - if method != 'DELETE': - if registry is None: - registry = REGISTRY - data = generate_latest(registry) - if grouping_key is None: grouping_key = {} url += ''.join( '/{}/{}'.format(*_escape_grouping_key(str(k), str(v))) for k, v in sorted(grouping_key.items())) + data = b'' + headers: List[Tuple[str, str]] = [] + if method != 'DELETE': + if registry is None: + registry = REGISTRY + data = generate_latest(registry) + data, headers = _compress_payload(data, compression) + else: + # DELETE requests still need Content-Type header per test expectations + headers = [('Content-Type', CONTENT_TYPE_PLAIN_0_0_4)] + if compression is not None: + raise ValueError('Compression is not supported for DELETE requests.') + handler( url=url, method=method, timeout=timeout, - headers=[('Content-Type', CONTENT_TYPE_PLAIN_0_0_4)], data=data, + headers=headers, data=data, )() +def _compress_payload(data: bytes, compression: CompressionType) -> Tuple[bytes, List[Tuple[str, str]]]: + headers = [('Content-Type', CONTENT_TYPE_PLAIN_0_0_4)] + if compression is None: + return data, headers + + encoding = compression.lower() + if encoding == 'gzip': + headers.append(('Content-Encoding', 'gzip')) + return gzip.compress(data), headers + if encoding == 'snappy': + if not SNAPPY_AVAILABLE: + raise RuntimeError('Snappy compression requires the python-snappy package to be installed.') + headers.append(('Content-Encoding', 'snappy')) + compressor = snappy.StreamCompressor() + compressed = compressor.compress(data) + flush = getattr(compressor, 'flush', None) + if callable(flush): + compressed += flush() + return compressed, headers + raise ValueError(f"Unsupported compression type: {compression}") + + def _escape_grouping_key(k, v): if v == "": # Per https://github.com/prometheus/pushgateway/pull/346. return k + "@base64", "=" - elif '/' in v: + elif '/' in v or ' ' in v: # Added in Pushgateway 0.9.0. + # Use base64 encoding for values containing slashes or spaces return k + "@base64", base64.urlsafe_b64encode(v.encode("utf-8")).decode("utf-8") else: return k, quote_plus(v) diff --git a/prometheus_client/metrics.py b/prometheus_client/metrics.py index b9f25ffc..4c53b26b 100644 --- a/prometheus_client/metrics.py +++ b/prometheus_client/metrics.py @@ -109,6 +109,10 @@ def __init__(self: T, registry: Optional[CollectorRegistry] = REGISTRY, _labelvalues: Optional[Sequence[str]] = None, ) -> None: + + self._original_name = name + self._namespace = namespace + self._subsystem = subsystem self._name = _build_full_name(self._type, name, namespace, subsystem, unit) self._labelnames = _validate_labelnames(self, labelnames) self._labelvalues = tuple(_labelvalues or ()) @@ -176,13 +180,25 @@ def labels(self: T, *labelvalues: Any, **labelkwargs: Any) -> T: labelvalues = tuple(str(l) for l in labelvalues) with self._lock: if labelvalues not in self._metrics: + + original_name = getattr(self, '_original_name', self._name) + namespace = getattr(self, '_namespace', '') + subsystem = getattr(self, '_subsystem', '') + unit = getattr(self, '_unit', '') + + child_kwargs = dict(self._kwargs) if self._kwargs else {} + for k in ('namespace', 'subsystem', 'unit'): + child_kwargs.pop(k, None) + self._metrics[labelvalues] = self.__class__( - self._name, + original_name, documentation=self._documentation, labelnames=self._labelnames, - unit=self._unit, + namespace=namespace, + subsystem=subsystem, + unit=unit, _labelvalues=labelvalues, - **self._kwargs + **child_kwargs ) return self._metrics[labelvalues] @@ -203,6 +219,39 @@ def remove(self, *labelvalues: Any) -> None: if labelvalues in self._metrics: del self._metrics[labelvalues] + def remove_by_labels(self, labels: dict[str, str]) -> None: + """Remove all series whose labelset partially matches the given labels.""" + if 'prometheus_multiproc_dir' in os.environ or 'PROMETHEUS_MULTIPROC_DIR' in os.environ: + warnings.warn( + "Removal of labels has not been implemented in multi-process mode yet.", + UserWarning + ) + + if not self._labelnames: + raise ValueError('No label names were set when constructing %s' % self) + + if not isinstance(labels, dict): + raise TypeError("labels must be a dict of {label_name: label_value}") + + if not labels: + return # no operation + + invalid = [k for k in labels.keys() if k not in self._labelnames] + if invalid: + raise ValueError( + 'Unknown label names: %s; expected %s' % (invalid, self._labelnames) + ) + + pos_filter = {self._labelnames.index(k): str(v) for k, v in labels.items()} + + with self._lock: + # list(...) to avoid "dictionary changed size during iteration" + for lv in list(self._metrics.keys()): + if all(lv[pos] == want for pos, want in pos_filter.items()): + # pop with default avoids KeyError if concurrently removed + self._metrics.pop(lv, None) + + def clear(self) -> None: """Remove all labelsets from the metric""" if 'prometheus_multiproc_dir' in os.environ or 'PROMETHEUS_MULTIPROC_DIR' in os.environ: diff --git a/prometheus_client/multiprocess.py b/prometheus_client/multiprocess.py index 2682190a..db55874e 100644 --- a/prometheus_client/multiprocess.py +++ b/prometheus_client/multiprocess.py @@ -88,32 +88,42 @@ def _parse_key(key): @staticmethod def _accumulate_metrics(metrics, accumulate): for metric in metrics.values(): - samples = defaultdict(float) - sample_timestamps = defaultdict(float) + samples = defaultdict(lambda: defaultdict(float)) + sample_timestamps = defaultdict(lambda: defaultdict(float)) buckets = defaultdict(lambda: defaultdict(float)) - samples_setdefault = samples.setdefault for s in metric.samples: name, labels, value, timestamp, exemplar, native_histogram_value = s + + if ( + metric.type == 'gauge' + and metric._multiprocess_mode in ( + 'min', 'livemin', + 'max', 'livemax', + 'sum', 'livesum', + 'mostrecent', 'livemostrecent', + ) + ): + labels = tuple(l for l in labels if l[0] != 'pid') + if metric.type == 'gauge': - without_pid_key = (name, tuple(l for l in labels if l[0] != 'pid')) if metric._multiprocess_mode in ('min', 'livemin'): - current = samples_setdefault(without_pid_key, value) + current = samples[labels].setdefault((name, labels), value) if value < current: - samples[without_pid_key] = value + samples[labels][(name, labels)] = value elif metric._multiprocess_mode in ('max', 'livemax'): - current = samples_setdefault(without_pid_key, value) + current = samples[labels].setdefault((name, labels), value) if value > current: - samples[without_pid_key] = value + samples[labels][(name, labels)] = value elif metric._multiprocess_mode in ('sum', 'livesum'): - samples[without_pid_key] += value + samples[labels][(name, labels)] += value elif metric._multiprocess_mode in ('mostrecent', 'livemostrecent'): - current_timestamp = sample_timestamps[without_pid_key] + current_timestamp = sample_timestamps[labels][name] timestamp = float(timestamp or 0) if current_timestamp < timestamp: - samples[without_pid_key] = value - sample_timestamps[without_pid_key] = timestamp + samples[labels][(name, labels)] = value + sample_timestamps[labels][name] = timestamp else: # all/liveall - samples[(name, labels)] = value + samples[labels][(name, labels)] = value elif metric.type == 'histogram': # A for loop with early exit is faster than a genexpr @@ -127,10 +137,10 @@ def _accumulate_metrics(metrics, accumulate): break else: # did not find the `le` key # _sum/_count - samples[(name, labels)] += value + samples[labels][(name, labels)] += value else: # Counter and Summary. - samples[(name, labels)] += value + samples[labels][(name, labels)] += value # Accumulate bucket values. if metric.type == 'histogram': @@ -143,14 +153,17 @@ def _accumulate_metrics(metrics, accumulate): ) if accumulate: acc += value - samples[sample_key] = acc + samples[labels][sample_key] = acc else: - samples[sample_key] = value + samples[labels][sample_key] = value if accumulate: - samples[(metric.name + '_count', labels)] = acc + samples[labels][(metric.name + '_count', labels)] = acc # Convert to correct sample format. - metric.samples = [Sample(name_, dict(labels), value) for (name_, labels), value in samples.items()] + metric.samples = [] + for _, samples_by_labels in samples.items(): + for (name_, labels), value in samples_by_labels.items(): + metric.samples.append(Sample(name_, dict(labels), value)) return metrics.values() def collect(self): diff --git a/prometheus_client/openmetrics/exposition.py b/prometheus_client/openmetrics/exposition.py index 1dc05c5b..5e69e463 100644 --- a/prometheus_client/openmetrics/exposition.py +++ b/prometheus_client/openmetrics/exposition.py @@ -4,9 +4,7 @@ from sys import maxunicode from typing import Callable -from packaging.version import Version - -from ..utils import floatToGoString +from ..utils import floatToGoString, parse_version from ..validation import ( _is_valid_legacy_labelname, _is_valid_legacy_metric_name, ) @@ -94,7 +92,7 @@ def generate_latest(registry, escaping=UNDERSCORES, version="1.0.0"): timestamp = f' {s.timestamp}' # Skip native histogram samples entirely if version < 2.0.0 - if s.native_histogram and Version(version) < Version('2.0.0'): + if s.native_histogram and parse_version(version) < (2, 0, 0): continue native_histogram = '' diff --git a/prometheus_client/registry.py b/prometheus_client/registry.py index 8de4ce91..c2b55d15 100644 --- a/prometheus_client/registry.py +++ b/prometheus_client/registry.py @@ -1,24 +1,21 @@ -from abc import ABC, abstractmethod import copy from threading import Lock -from typing import Dict, Iterable, List, Optional +from typing import Dict, Iterable, List, Optional, Protocol from .metrics_core import Metric -# Ideally this would be a Protocol, but Protocols are only available in Python >= 3.8. -class Collector(ABC): - @abstractmethod +class Collector(Protocol): def collect(self) -> Iterable[Metric]: - pass + """Collect metrics.""" -class _EmptyCollector(Collector): +class _EmptyCollector: def collect(self) -> Iterable[Metric]: return [] -class CollectorRegistry(Collector): +class CollectorRegistry: """Metric collector registry. Collectors must have a no-argument method 'collect' that returns a list of @@ -26,12 +23,15 @@ class CollectorRegistry(Collector): exposition formats. """ - def __init__(self, auto_describe: bool = False, target_info: Optional[Dict[str, str]] = None): + def __init__(self, auto_describe: bool = False, target_info: Optional[Dict[str, str]] = None, + support_collectors_without_names: bool = False): self._collector_to_names: Dict[Collector, List[str]] = {} self._names_to_collectors: Dict[str, Collector] = {} self._auto_describe = auto_describe self._lock = Lock() self._target_info: Optional[Dict[str, str]] = {} + self._support_collectors_without_names = support_collectors_without_names + self._collectors_without_names: List[Collector] = [] self.set_target_info(target_info) def register(self, collector: Collector) -> None: @@ -46,6 +46,8 @@ def register(self, collector: Collector) -> None: for name in names: self._names_to_collectors[name] = collector self._collector_to_names[collector] = names + if self._support_collectors_without_names and not names: + self._collectors_without_names.append(collector) def unregister(self, collector: Collector) -> None: """Remove a collector from the registry.""" @@ -148,7 +150,7 @@ def __init__(self, names: Iterable[str], registry: CollectorRegistry): self._registry = registry def collect(self) -> Iterable[Metric]: - collectors = set() + collectors = set(self._registry._collectors_without_names) target_info_metric = None with self._registry._lock: if 'target_info' in self._name_set and self._registry._target_info: diff --git a/prometheus_client/utils.py b/prometheus_client/utils.py index 0d2b0948..87b75ca8 100644 --- a/prometheus_client/utils.py +++ b/prometheus_client/utils.py @@ -1,4 +1,5 @@ import math +from typing import Union INF = float("inf") MINUS_INF = float("-inf") @@ -22,3 +23,14 @@ def floatToGoString(d): mantissa = f'{s[0]}.{s[1:dot]}{s[dot + 1:]}'.rstrip('0.') return f'{mantissa}e+0{dot - 1}' return s + + +def parse_version(version_str: str) -> tuple[Union[int, str], ...]: + version: list[Union[int, str]] = [] + for part in version_str.split('.'): + try: + version.append(int(part)) + except ValueError: + version.append(part) + + return tuple(version) diff --git a/pyproject.toml b/pyproject.toml index af4c7f2f..ed3ef389 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "prometheus_client" -version = "0.23.0" +version = "0.24.1" description = "Python client for the Prometheus monitoring system." readme = "README.md" license = "Apache-2.0 AND BSD-2-Clause" @@ -34,6 +34,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: System :: Monitoring", @@ -43,6 +44,12 @@ classifiers = [ twisted = [ "twisted", ] +aiohttp = [ + "aiohttp", +] +django = [ + "django", +] [project.urls] Homepage = "https://github.com/prometheus/client_python" diff --git a/tests/test_aiohttp.py b/tests/test_aiohttp.py new file mode 100644 index 00000000..e4fa368b --- /dev/null +++ b/tests/test_aiohttp.py @@ -0,0 +1,192 @@ +from __future__ import annotations + +import gzip +from typing import TYPE_CHECKING +from unittest import skipUnless + +from prometheus_client import CollectorRegistry, Counter +from prometheus_client.exposition import CONTENT_TYPE_PLAIN_0_0_4 + +try: + from aiohttp import ClientResponse, hdrs, web + from aiohttp.test_utils import AioHTTPTestCase + + from prometheus_client.aiohttp import make_aiohttp_handler + + AIOHTTP_INSTALLED = True +except ImportError: + if TYPE_CHECKING: + assert False + + from unittest import IsolatedAsyncioTestCase as AioHTTPTestCase + + AIOHTTP_INSTALLED = False + + +class AioHTTPTest(AioHTTPTestCase): + @skipUnless(AIOHTTP_INSTALLED, "AIOHTTP is not installed") + def setUp(self) -> None: + self.registry = CollectorRegistry() + + async def get_application(self) -> web.Application: + app = web.Application() + # The AioHTTPTestCase requires that applications be static, so we need + # both versions to be available so the test can choose between them + app.router.add_get("/metrics", make_aiohttp_handler(self.registry)) + app.router.add_get( + "/metrics_uncompressed", + make_aiohttp_handler(self.registry, disable_compression=True), + ) + return app + + def increment_metrics( + self, + metric_name: str, + help_text: str, + increments: int, + ) -> None: + c = Counter(metric_name, help_text, registry=self.registry) + for _ in range(increments): + c.inc() + + def assert_metrics( + self, + output: str, + metric_name: str, + help_text: str, + increments: int, + ) -> None: + self.assertIn("# HELP " + metric_name + "_total " + help_text + "\n", output) + self.assertIn("# TYPE " + metric_name + "_total counter\n", output) + self.assertIn(metric_name + "_total " + str(increments) + ".0\n", output) + + def assert_not_metrics( + self, + output: str, + metric_name: str, + help_text: str, + increments: int, + ) -> None: + self.assertNotIn("# HELP " + metric_name + "_total " + help_text + "\n", output) + self.assertNotIn("# TYPE " + metric_name + "_total counter\n", output) + self.assertNotIn(metric_name + "_total " + str(increments) + ".0\n", output) + + async def assert_outputs( + self, + response: ClientResponse, + metric_name: str, + help_text: str, + increments: int, + ) -> None: + self.assertIn( + CONTENT_TYPE_PLAIN_0_0_4, + response.headers.getall(hdrs.CONTENT_TYPE), + ) + output = await response.text() + self.assert_metrics(output, metric_name, help_text, increments) + + async def validate_metrics( + self, + metric_name: str, + help_text: str, + increments: int, + ) -> None: + """ + AIOHTTP handler serves the metrics from the provided registry. + """ + self.increment_metrics(metric_name, help_text, increments) + async with self.client.get("/metrics") as response: + response.raise_for_status() + await self.assert_outputs(response, metric_name, help_text, increments) + + async def test_report_metrics_1(self): + await self.validate_metrics("counter", "A counter", 2) + + async def test_report_metrics_2(self): + await self.validate_metrics("counter", "Another counter", 3) + + async def test_report_metrics_3(self): + await self.validate_metrics("requests", "Number of requests", 5) + + async def test_report_metrics_4(self): + await self.validate_metrics("failed_requests", "Number of failed requests", 7) + + async def test_gzip(self): + # Increment a metric. + metric_name = "counter" + help_text = "A counter" + increments = 2 + self.increment_metrics(metric_name, help_text, increments) + + async with self.client.get( + "/metrics", + auto_decompress=False, + headers={hdrs.ACCEPT_ENCODING: "gzip"}, + ) as response: + response.raise_for_status() + self.assertIn(hdrs.CONTENT_ENCODING, response.headers) + self.assertIn("gzip", response.headers.getall(hdrs.CONTENT_ENCODING)) + body = await response.read() + output = gzip.decompress(body).decode("utf8") + self.assert_metrics(output, metric_name, help_text, increments) + + async def test_gzip_disabled(self): + # Increment a metric. + metric_name = "counter" + help_text = "A counter" + increments = 2 + self.increment_metrics(metric_name, help_text, increments) + + async with self.client.get( + "/metrics_uncompressed", + auto_decompress=False, + headers={hdrs.ACCEPT_ENCODING: "gzip"}, + ) as response: + response.raise_for_status() + self.assertNotIn(hdrs.CONTENT_ENCODING, response.headers) + output = await response.text() + self.assert_metrics(output, metric_name, help_text, increments) + + async def test_openmetrics_encoding(self): + """Response content type is application/openmetrics-text when appropriate Accept header is in request""" + async with self.client.get( + "/metrics", + auto_decompress=False, + headers={hdrs.ACCEPT: "application/openmetrics-text; version=1.0.0"}, + ) as response: + response.raise_for_status() + self.assertEqual( + response.headers.getone(hdrs.CONTENT_TYPE).split(";", maxsplit=1)[0], + "application/openmetrics-text", + ) + + async def test_plaintext_encoding(self): + """Response content type is text/plain when Accept header is missing in request""" + async with self.client.get("/metrics") as response: + response.raise_for_status() + self.assertEqual( + response.headers.getone(hdrs.CONTENT_TYPE).split(";", maxsplit=1)[0], + "text/plain", + ) + + async def test_qs_parsing(self): + """Only metrics that match the 'name[]' query string param appear""" + + metrics = [("asdf", "first test metric", 1), ("bsdf", "second test metric", 2)] + + for m in metrics: + self.increment_metrics(*m) + + for i_1 in range(len(metrics)): + async with self.client.get( + "/metrics", + params={"name[]": f"{metrics[i_1][0]}_total"}, + ) as response: + output = await response.text() + self.assert_metrics(output, *metrics[i_1]) + + for i_2 in range(len(metrics)): + if i_1 == i_2: + continue + + self.assert_not_metrics(output, *metrics[i_2]) diff --git a/tests/test_asgi.py b/tests/test_asgi.py index 386ff598..6e795e21 100644 --- a/tests/test_asgi.py +++ b/tests/test_asgi.py @@ -1,19 +1,11 @@ +import asyncio import gzip -from unittest import skipUnless, TestCase +from unittest import TestCase -from prometheus_client import CollectorRegistry, Counter -from prometheus_client.exposition import CONTENT_TYPE_PLAIN_0_0_4 - -try: - # Python >3.5 only - import asyncio - - from asgiref.testing import ApplicationCommunicator +from asgiref.testing import ApplicationCommunicator - from prometheus_client import make_asgi_app - HAVE_ASYNCIO_AND_ASGI = True -except ImportError: - HAVE_ASYNCIO_AND_ASGI = False +from prometheus_client import CollectorRegistry, Counter, make_asgi_app +from prometheus_client.exposition import CONTENT_TYPE_PLAIN_0_0_4 def setup_testing_defaults(scope): @@ -33,7 +25,6 @@ def setup_testing_defaults(scope): class ASGITest(TestCase): - @skipUnless(HAVE_ASYNCIO_AND_ASGI, "Don't have asyncio/asgi installed.") def setUp(self): self.registry = CollectorRegistry() self.captured_status = None @@ -45,7 +36,7 @@ def setUp(self): def tearDown(self): if self.communicator: - asyncio.get_event_loop().run_until_complete( + asyncio.new_event_loop().run_until_complete( self.communicator.wait() ) @@ -53,7 +44,7 @@ def seed_app(self, app): self.communicator = ApplicationCommunicator(app, self.scope) def send_input(self, payload): - asyncio.get_event_loop().run_until_complete( + asyncio.new_event_loop().run_until_complete( self.communicator.send_input(payload) ) @@ -61,7 +52,7 @@ def send_default_request(self): self.send_input({"type": "http.request", "body": b""}) def get_output(self): - output = asyncio.get_event_loop().run_until_complete( + output = asyncio.new_event_loop().run_until_complete( self.communicator.receive_output(0) ) return output @@ -229,6 +220,35 @@ def test_qs_parsing(self): self.assert_not_metrics(output, *metrics[i_2]) - asyncio.get_event_loop().run_until_complete( + asyncio.new_event_loop().run_until_complete( self.communicator.wait() ) + + def test_qs_parsing_multi(self): + """Only metrics that match the 'name[]' query string param appear""" + + app = make_asgi_app(self.registry) + metrics = [ + ("asdf", "first test metric", 1), + ("bsdf", "second test metric", 2), + ("csdf", "third test metric", 3) + ] + + for m in metrics: + self.increment_metrics(*m) + + self.seed_app(app) + self.scope['query_string'] = "&".join([f"name[]={m[0]}_total" for m in metrics[0:2]]).encode("utf-8") + self.send_default_request() + + outputs = self.get_all_output() + response_body = outputs[1] + output = response_body['body'].decode('utf8') + + self.assert_metrics(output, *metrics[0]) + self.assert_metrics(output, *metrics[1]) + self.assert_not_metrics(output, *metrics[2]) + + asyncio.new_event_loop().run_until_complete( + self.communicator.wait() + ) diff --git a/tests/test_core.py b/tests/test_core.py index 284bce09..66492c6f 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -630,6 +630,40 @@ def test_labels_coerced_to_string(self): self.counter.remove(None) self.assertEqual(None, self.registry.get_sample_value('c_total', {'l': 'None'})) + def test_remove_by_labels(self): + from prometheus_client import Counter + + c = Counter('c2', 'help', ['tenant', 'endpoint'], registry=self.registry) + c.labels('acme', '/').inc() + c.labels('acme', '/checkout').inc() + c.labels('globex', '/').inc() + + ret = c.remove_by_labels({'tenant': 'acme'}) + self.assertIsNone(ret) + + self.assertIsNone(self.registry.get_sample_value('c2_total', {'tenant': 'acme', 'endpoint': '/'})) + self.assertIsNone(self.registry.get_sample_value('c2_total', {'tenant': 'acme', 'endpoint': '/checkout'})) + self.assertEqual(1, self.registry.get_sample_value('c2_total', {'tenant': 'globex', 'endpoint': '/'})) + + + def test_remove_by_labels_invalid_label_name(self): + from prometheus_client import Counter + c = Counter('c3', 'help', ['tenant', 'endpoint'], registry=self.registry) + c.labels('acme', '/').inc() + with self.assertRaises(ValueError): + c.remove_by_labels({'badkey': 'x'}) + + + def test_remove_by_labels_empty_is_noop(self): + from prometheus_client import Counter + c = Counter('c4', 'help', ['tenant', 'endpoint'], registry=self.registry) + c.labels('acme', '/').inc() + + ret = c.remove_by_labels({}) + self.assertIsNone(ret) + # Ensure the series is still present + self.assertEqual(1, self.registry.get_sample_value('c4_total', {'tenant': 'acme', 'endpoint': '/'})) + def test_non_string_labels_raises(self): class Test: __str__ = None @@ -990,6 +1024,24 @@ def test_restricted_registry_does_not_call_extra(self): self.assertEqual([m], list(registry.restricted_registry(['s_sum']).collect())) mock_collector.collect.assert_not_called() + def test_restricted_registry_ignore_no_names_collectors(self): + from unittest.mock import MagicMock + registry = CollectorRegistry() + mock_collector = MagicMock() + mock_collector.describe.return_value = [] + registry.register(mock_collector) + self.assertEqual(list(registry.restricted_registry(['metric']).collect()), []) + mock_collector.collect.assert_not_called() + + def test_restricted_registry_collects_no_names_collectors(self): + from unittest.mock import MagicMock + registry = CollectorRegistry(support_collectors_without_names=True) + mock_collector = MagicMock() + mock_collector.describe.return_value = [] + registry.register(mock_collector) + self.assertEqual(list(registry.restricted_registry(['metric']).collect()), []) + mock_collector.collect.assert_called() + def test_restricted_registry_does_not_yield_while_locked(self): registry = CollectorRegistry(target_info={'foo': 'bar'}) Summary('s', 'help', registry=registry).observe(7) diff --git a/tests/test_django.py b/tests/test_django.py new file mode 100644 index 00000000..659bb3f6 --- /dev/null +++ b/tests/test_django.py @@ -0,0 +1,48 @@ +from unittest import skipUnless + +from prometheus_client import CollectorRegistry, Counter, generate_latest +from prometheus_client.openmetrics.exposition import ALLOWUTF8 + +try: + import django + from django.test import RequestFactory, TestCase + + from prometheus_client.django import PrometheusDjangoView + + HAVE_DJANGO = True +except ImportError: + from unittest import TestCase + + HAVE_DJANGO = False + +else: + from django.conf import settings + + if not settings.configured: + settings.configure( + DATABASES={ + "default": { + "ENGINE": "django.db.backends.sqlite3", + 'NAME': ':memory:' + } + }, + INSTALLED_APPS=[], + ) + django.setup() + + +class MetricsResourceTest(TestCase): + @skipUnless(HAVE_DJANGO, "Don't have django installed.") + def setUp(self): + self.registry = CollectorRegistry() + self.factory = RequestFactory() + + def test_reports_metrics(self): + c = Counter('cc', 'A counter', registry=self.registry) + c.inc() + + request = self.factory.get("/metrics") + + response = PrometheusDjangoView.as_view(registry=self.registry)(request) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, generate_latest(self.registry, ALLOWUTF8)) diff --git a/tests/test_exposition.py b/tests/test_exposition.py index 3dd5e378..a3c97820 100644 --- a/tests/test_exposition.py +++ b/tests/test_exposition.py @@ -1,3 +1,4 @@ +import gzip from http.server import BaseHTTPRequestHandler, HTTPServer import os import threading @@ -300,6 +301,13 @@ def test_push_with_groupingkey_empty_label(self): self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_PLAIN_0_0_4) self.assertEqual(self.requests[0][1], b'# HELP g help\n# TYPE g gauge\ng 0.0\n') + def test_push_with_groupingkey_with_spaces(self): + push_to_gateway(self.address, "my_job", self.registry, {'label': 'value with spaces'}) + self.assertEqual(self.requests[0][0].command, 'PUT') + self.assertEqual(self.requests[0][0].path, '/metrics/job/my_job/label@base64/dmFsdWUgd2l0aCBzcGFjZXM=') + self.assertEqual(self.requests[0][0].headers.get('content-type'), CONTENT_TYPE_PLAIN_0_0_4) + self.assertEqual(self.requests[0][1], b'# HELP g help\n# TYPE g gauge\ng 0.0\n') + def test_push_with_complex_groupingkey(self): push_to_gateway(self.address, "my_job", self.registry, {'a': 9, 'b': 'a/ z'}) self.assertEqual(self.requests[0][0].command, 'PUT') @@ -404,6 +412,30 @@ def test_push_with_trailing_slash(self): self.assertNotIn('//', self.requests[0][0].path) + def test_push_with_gzip_compression(self): + push_to_gateway(self.address, "my_job", self.registry, compression='gzip') + request, body = self.requests[0] + self.assertEqual(request.headers.get('content-encoding'), 'gzip') + decompressed = gzip.decompress(body) + self.assertEqual(decompressed, b'# HELP g help\n# TYPE g gauge\ng 0.0\n') + + def test_push_with_snappy_compression(self): + snappy = pytest.importorskip('snappy') + push_to_gateway(self.address, "my_job", self.registry, compression='snappy') + request, body = self.requests[0] + self.assertEqual(request.headers.get('content-encoding'), 'snappy') + decompressor = snappy.StreamDecompressor() + decompressed = decompressor.decompress(body) + flush = getattr(decompressor, 'flush', None) + if callable(flush): + decompressed += flush() + self.assertEqual(decompressed, b'# HELP g help\n# TYPE g gauge\ng 0.0\n') + + def test_push_with_invalid_compression(self): + with self.assertRaisesRegex(ValueError, 'Unsupported compression type'): + push_to_gateway(self.address, "my_job", self.registry, compression='brotli') + self.assertEqual(self.requests, []) + def test_instance_ip_grouping_key(self): self.assertTrue('' != instance_ip_grouping_key()['instance']) diff --git a/tests/test_multiprocess.py b/tests/test_multiprocess.py index 77fd3d81..ee0c7423 100644 --- a/tests/test_multiprocess.py +++ b/tests/test_multiprocess.py @@ -52,7 +52,7 @@ def setUp(self): self.tempdir = tempfile.mkdtemp() os.environ['PROMETHEUS_MULTIPROC_DIR'] = self.tempdir values.ValueClass = MultiProcessValue(lambda: 123) - self.registry = CollectorRegistry() + self.registry = CollectorRegistry(support_collectors_without_names=True) self.collector = MultiProcessCollector(self.registry) @property @@ -276,10 +276,8 @@ def add_label(key, value): Sample('g', add_label('pid', '1'), 1.0), ]) - metrics['h'].samples.sort( - key=lambda x: (x[0], float(x[1].get('le', 0))) - ) expected_histogram = [ + Sample('h_sum', labels, 6.0), Sample('h_bucket', add_label('le', '0.005'), 0.0), Sample('h_bucket', add_label('le', '0.01'), 0.0), Sample('h_bucket', add_label('le', '0.025'), 0.0), @@ -296,11 +294,99 @@ def add_label(key, value): Sample('h_bucket', add_label('le', '10.0'), 2.0), Sample('h_bucket', add_label('le', '+Inf'), 2.0), Sample('h_count', labels, 2.0), - Sample('h_sum', labels, 6.0), ] self.assertEqual(metrics['h'].samples, expected_histogram) + def test_collect_histogram_ordering(self): + pid = 0 + values.ValueClass = MultiProcessValue(lambda: pid) + labels = {i: i for i in 'abcd'} + + def add_label(key, value): + l = labels.copy() + l[key] = value + return l + + h = Histogram('h', 'help', labelnames=['view'], registry=None) + + h.labels(view='view1').observe(1) + + pid = 1 + + h.labels(view='view1').observe(5) + h.labels(view='view2').observe(1) + + metrics = {m.name: m for m in self.collector.collect()} + + expected_histogram = [ + Sample('h_sum', {'view': 'view1'}, 6.0), + Sample('h_bucket', {'view': 'view1', 'le': '0.005'}, 0.0), + Sample('h_bucket', {'view': 'view1', 'le': '0.01'}, 0.0), + Sample('h_bucket', {'view': 'view1', 'le': '0.025'}, 0.0), + Sample('h_bucket', {'view': 'view1', 'le': '0.05'}, 0.0), + Sample('h_bucket', {'view': 'view1', 'le': '0.075'}, 0.0), + Sample('h_bucket', {'view': 'view1', 'le': '0.1'}, 0.0), + Sample('h_bucket', {'view': 'view1', 'le': '0.25'}, 0.0), + Sample('h_bucket', {'view': 'view1', 'le': '0.5'}, 0.0), + Sample('h_bucket', {'view': 'view1', 'le': '0.75'}, 0.0), + Sample('h_bucket', {'view': 'view1', 'le': '1.0'}, 1.0), + Sample('h_bucket', {'view': 'view1', 'le': '2.5'}, 1.0), + Sample('h_bucket', {'view': 'view1', 'le': '5.0'}, 2.0), + Sample('h_bucket', {'view': 'view1', 'le': '7.5'}, 2.0), + Sample('h_bucket', {'view': 'view1', 'le': '10.0'}, 2.0), + Sample('h_bucket', {'view': 'view1', 'le': '+Inf'}, 2.0), + Sample('h_count', {'view': 'view1'}, 2.0), + Sample('h_sum', {'view': 'view2'}, 1.0), + Sample('h_bucket', {'view': 'view2', 'le': '0.005'}, 0.0), + Sample('h_bucket', {'view': 'view2', 'le': '0.01'}, 0.0), + Sample('h_bucket', {'view': 'view2', 'le': '0.025'}, 0.0), + Sample('h_bucket', {'view': 'view2', 'le': '0.05'}, 0.0), + Sample('h_bucket', {'view': 'view2', 'le': '0.075'}, 0.0), + Sample('h_bucket', {'view': 'view2', 'le': '0.1'}, 0.0), + Sample('h_bucket', {'view': 'view2', 'le': '0.25'}, 0.0), + Sample('h_bucket', {'view': 'view2', 'le': '0.5'}, 0.0), + Sample('h_bucket', {'view': 'view2', 'le': '0.75'}, 0.0), + Sample('h_bucket', {'view': 'view2', 'le': '1.0'}, 1.0), + Sample('h_bucket', {'view': 'view2', 'le': '2.5'}, 1.0), + Sample('h_bucket', {'view': 'view2', 'le': '5.0'}, 1.0), + Sample('h_bucket', {'view': 'view2', 'le': '7.5'}, 1.0), + Sample('h_bucket', {'view': 'view2', 'le': '10.0'}, 1.0), + Sample('h_bucket', {'view': 'view2', 'le': '+Inf'}, 1.0), + Sample('h_count', {'view': 'view2'}, 1.0), + ] + + self.assertEqual(metrics['h'].samples, expected_histogram) + + def test_restrict(self): + pid = 0 + values.ValueClass = MultiProcessValue(lambda: pid) + labels = {i: i for i in 'abcd'} + + def add_label(key, value): + l = labels.copy() + l[key] = value + return l + + c = Counter('c', 'help', labelnames=labels.keys(), registry=None) + g = Gauge('g', 'help', labelnames=labels.keys(), registry=None) + + c.labels(**labels).inc(1) + g.labels(**labels).set(1) + + pid = 1 + + c.labels(**labels).inc(1) + g.labels(**labels).set(1) + + metrics = {m.name: m for m in self.registry.restricted_registry(['c_total']).collect()} + + self.assertEqual(metrics.keys(), {'c'}) + + self.assertEqual( + metrics['c'].samples, [Sample('c_total', labels, 2.0)] + ) + def test_collect_preserves_help(self): pid = 0 values.ValueClass = MultiProcessValue(lambda: pid) @@ -347,10 +433,8 @@ def add_label(key, value): m.name: m for m in self.collector.merge(files, accumulate=False) } - metrics['h'].samples.sort( - key=lambda x: (x[0], float(x[1].get('le', 0))) - ) expected_histogram = [ + Sample('h_sum', labels, 6.0), Sample('h_bucket', add_label('le', '0.005'), 0.0), Sample('h_bucket', add_label('le', '0.01'), 0.0), Sample('h_bucket', add_label('le', '0.025'), 0.0), @@ -366,7 +450,6 @@ def add_label(key, value): Sample('h_bucket', add_label('le', '7.5'), 0.0), Sample('h_bucket', add_label('le', '10.0'), 0.0), Sample('h_bucket', add_label('le', '+Inf'), 0.0), - Sample('h_sum', labels, 6.0), ] self.assertEqual(metrics['h'].samples, expected_histogram) @@ -396,6 +479,116 @@ def test_remove_clear_warning(self): assert "Removal of labels has not been implemented" in str(w[0].message) assert issubclass(w[-1].category, UserWarning) assert "Clearing labels has not been implemented" in str(w[-1].message) + + def test_child_name_is_built_once_with_namespace_subsystem_unit(self): + """ + Repro for #1035: + In multiprocess mode, child metrics must NOT rebuild the full name + (namespace/subsystem/unit) a second time. The exported family name should + be built once, and Counter samples should use "_total". + """ + from prometheus_client import Counter + + class CustomCounter(Counter): + def __init__( + self, + name, + documentation, + labelnames=(), + namespace="mydefaultnamespace", + subsystem="mydefaultsubsystem", + unit="", + registry=None, + _labelvalues=None + ): + # Intentionally provide non-empty defaults to trigger the bug path. + super().__init__( + name=name, + documentation=documentation, + labelnames=labelnames, + namespace=namespace, + subsystem=subsystem, + unit=unit, + registry=registry, + _labelvalues=_labelvalues) + + # Create a Counter with explicit namespace/subsystem/unit + c = CustomCounter( + name='m', + documentation='help', + labelnames=('status', 'method'), + namespace='ns', + subsystem='ss', + unit='seconds', # avoid '_total_total' confusion + registry=None, # not registered in local registry in multiprocess mode + ) + + # Create two labeled children + c.labels(status='200', method='GET').inc() + c.labels(status='404', method='POST').inc() + + # Collect from the multiprocess collector initialized in setUp() + metrics = {m.name: m for m in self.collector.collect()} + + # Family name should be built once (no '_total' in family name) + expected_family = 'ns_ss_m_seconds' + self.assertIn(expected_family, metrics, f"missing family {expected_family}") + + # Counter samples must use '_total' + mf = metrics[expected_family] + sample_names = {s.name for s in mf.samples} + self.assertTrue( + all(name == expected_family + '_total' for name in sample_names), + f"unexpected sample names: {sample_names}" + ) + + # Ensure no double-built prefix sneaks in (the original bug) + bad_prefix = 'mydefaultnamespace_mydefaultsubsystem_' + all_names = {mf.name, *sample_names} + self.assertTrue( + all(not n.startswith(bad_prefix) for n in all_names), + f"found double-built name(s): {[n for n in all_names if n.startswith(bad_prefix)]}" + ) + + def test_child_preserves_parent_context_for_subclasses(self): + """ + Ensure child metrics preserve parent's namespace/subsystem/unit information + so that subclasses can correctly use these parameters in their logic. + """ + class ContextAwareCounter(Counter): + def __init__(self, + name, + documentation, + labelnames=(), + namespace="", + subsystem="", + unit="", + **kwargs): + self.context = { + 'namespace': namespace, + 'subsystem': subsystem, + 'unit': unit + } + super().__init__(name, documentation, + labelnames=labelnames, + namespace=namespace, + subsystem=subsystem, + unit=unit, + **kwargs) + + parent = ContextAwareCounter('m', 'help', + labelnames=['status'], + namespace='prod', + subsystem='api', + unit='seconds', + registry=None) + + child = parent.labels(status='200') + + # Verify that child retains parent's context + self.assertEqual(child.context['namespace'], 'prod') + self.assertEqual(child.context['subsystem'], 'api') + self.assertEqual(child.context['unit'], 'seconds') class TestMmapedDict(unittest.TestCase): diff --git a/tox.ini b/tox.ini index bef57f85..992bd0a7 100644 --- a/tox.ini +++ b/tox.ini @@ -1,16 +1,17 @@ [tox] -envlist = coverage-clean,py{3.9,3.10,3.11,3.12,3.13,py3.9,3.9-nooptionals},coverage-report,flake8,isort,mypy +envlist = coverage-clean,py{3.9,3.10,3.11,3.12,3.13,3.14,py3.9,3.9-nooptionals},coverage-report,flake8,isort,mypy [testenv] deps = + asgiref coverage pytest pytest-benchmark attrs {py3.9,pypy3.9}: twisted - # NOTE: Pinned due to https://github.com/prometheus/client_python/issues/1020 - py3.9: asgiref==3.7 - pypy3.9: asgiref==3.7 + {py3.9,pypy3.9}: aiohttp + {py3.9,pypy3.9}: django + {py3.9}: python-snappy commands = coverage run --parallel -m pytest {posargs} [testenv:py3.9-nooptionals] @@ -47,6 +48,7 @@ commands = [testenv:mypy] deps = pytest + aiohttp asgiref mypy==0.991 skip_install = true